From c88e5ae2577dfb21a1ed4ee329081698338c7e78 Mon Sep 17 00:00:00 2001 From: Alan Cai Date: Wed, 2 Mar 2022 11:44:41 -0800 Subject: [PATCH 1/8] Add ktlint to gradle build --- build.gradle | 2 ++ 1 file changed, 2 insertions(+) diff --git a/build.gradle b/build.gradle index 5dd10c51c1..0b212d357b 100644 --- a/build.gradle +++ b/build.gradle @@ -16,10 +16,12 @@ plugins { id 'org.jetbrains.kotlin.jvm' version '1.4.0' apply false // https://arturbosch.github.io/detekt/groovydsl.html id "io.gitlab.arturbosch.detekt" version "1.20.0-RC1" apply false + id 'org.jlleitschuh.gradle.ktlint' version '10.2.1' } allprojects { apply plugin: 'jacoco' + apply plugin: 'org.jlleitschuh.gradle.ktlint' jacoco { toolVersion = '0.8.5' From c1f29dca5125931db7e7db9cf202018b35ffec9e Mon Sep 17 00:00:00 2001 From: Alan Cai Date: Wed, 2 Mar 2022 11:52:01 -0800 Subject: [PATCH 2/8] Resolve pts/../PartiQLPtsEvaluator.kt manual imports --- .../partiql/lang/pts/PartiQlPtsEvaluator.kt | 82 +++++++++++-------- 1 file changed, 49 insertions(+), 33 deletions(-) diff --git a/pts/test/org/partiql/lang/pts/PartiQlPtsEvaluator.kt b/pts/test/org/partiql/lang/pts/PartiQlPtsEvaluator.kt index df935fc909..373f970401 100644 --- a/pts/test/org/partiql/lang/pts/PartiQlPtsEvaluator.kt +++ b/pts/test/org/partiql/lang/pts/PartiQlPtsEvaluator.kt @@ -8,12 +8,20 @@ import org.partiql.lang.CompilerPipeline import org.partiql.lang.SqlException import org.partiql.lang.eval.EvaluationSession import org.partiql.lang.eval.ExprValue -import org.partiql.lang.eval.ExprValueType.* +import org.partiql.lang.eval.ExprValueType import org.partiql.lang.eval.name import org.partiql.lang.eval.stringValue -import org.partiql.testscript.compiler.* -import org.partiql.testscript.evaluator.* -import org.partiql.testscript.evaluator.TestFailure.FailureReason.* +import org.partiql.testscript.compiler.AppendedTestExpression +import org.partiql.testscript.compiler.ExpectedError +import org.partiql.testscript.compiler.ExpectedSuccess +import org.partiql.testscript.compiler.SkippedTestExpression +import org.partiql.testscript.compiler.TestExpression +import org.partiql.testscript.compiler.TestScriptExpression +import org.partiql.testscript.evaluator.Evaluator +import org.partiql.testscript.evaluator.PtsEquality +import org.partiql.testscript.evaluator.TestFailure +import org.partiql.testscript.evaluator.TestResult +import org.partiql.testscript.evaluator.TestResultSuccess /** * Reference implementation PTS Evaluator. @@ -25,61 +33,70 @@ class PartiQlPtsEvaluator(equality: PtsEquality) : Evaluator(equality) { private val compilerPipeline = CompilerPipeline.standard(ion) override fun evaluate(testExpressions: List): List = - testExpressions.map { - when (it) { - is SkippedTestExpression -> TestResultSuccess(it) - is TestExpression -> runTest(it) - // the reference implementation doesn't need any appended information currently - // to run a test - is AppendedTestExpression -> runTest(it.original) - } + testExpressions.map { + when (it) { + is SkippedTestExpression -> TestResultSuccess(it) + is TestExpression -> runTest(it) + // the reference implementation doesn't need any appended information currently + // to run a test + is AppendedTestExpression -> runTest(it.original) } + } private fun runTest(test: TestExpression): TestResult = try { // recreate the environment struct using the evaluator ion system val ionStruct = ion.newValue(ion.newReader(test.environment).apply { next() }) as IonStruct - - + val globals = compilerPipeline.valueFactory.newFromIonValue(ionStruct).bindings val session = EvaluationSession.build { globals(globals) } val expression = compilerPipeline.compile(test.statement) val actualResult = expression.eval(session).toPtsIon() verifyTestResult(test, actualResult) - } catch (e: SqlException) { when (test.expected) { is ExpectedError -> TestResultSuccess(test) - is ExpectedSuccess -> TestFailure(test, e.generateMessage(), UNEXPECTED_ERROR) + is ExpectedSuccess -> TestFailure(test, e.generateMessage(), TestFailure.FailureReason.UNEXPECTED_ERROR) } } private fun verifyTestResult(test: TestExpression, actualResult: IonValue): TestResult = - when (val expected = test.expected) { - is ExpectedError -> TestFailure(test, actualResult.toIonText(), EXPECTED_ERROR_NOT_THROWN) - is ExpectedSuccess -> { - if (equality.areEqual(expected.expected, actualResult)) { - TestResultSuccess(test) - } else { - TestFailure(test, actualResult.toIonText(), ACTUAL_DIFFERENT_THAN_EXPECTED) - } + when (val expected = test.expected) { + is ExpectedError -> TestFailure(test, actualResult.toIonText(), TestFailure.FailureReason.EXPECTED_ERROR_NOT_THROWN) + is ExpectedSuccess -> { + if (equality.areEqual(expected.expected, actualResult)) { + TestResultSuccess(test) + } else { + TestFailure(test, actualResult.toIonText(), TestFailure.FailureReason.ACTUAL_DIFFERENT_THAN_EXPECTED) } } - + } + private fun ExprValue.toPtsIon(): IonValue { fun ExprValue.foldToIonSequence(initial: S): S = - this.fold(initial) { seq, el -> seq.apply { add(el.toPtsIon()) } } + this.fold(initial) { seq, el -> seq.apply { add(el.toPtsIon()) } } return when (this.type) { - MISSING -> missing - NULL, BOOL, INT, FLOAT, DECIMAL, DATE, TIME, TIMESTAMP, SYMBOL, STRING, CLOB, BLOB -> this.ionValue.clone() - LIST -> this.foldToIonSequence(ion.newEmptyList()) - SEXP -> this.foldToIonSequence(ion.newEmptySexp()) - STRUCT -> this.fold(ion.newEmptyStruct()) { struct, el -> + ExprValueType.MISSING -> missing + ExprValueType.NULL, + ExprValueType.BOOL, + ExprValueType.INT, + ExprValueType.FLOAT, + ExprValueType.DECIMAL, + ExprValueType.DATE, + ExprValueType.TIME, + ExprValueType.TIMESTAMP, + ExprValueType.SYMBOL, + ExprValueType.STRING, + ExprValueType.CLOB, + ExprValueType.BLOB -> this.ionValue.clone() + ExprValueType.LIST -> this.foldToIonSequence(ion.newEmptyList()) + ExprValueType.SEXP -> this.foldToIonSequence(ion.newEmptySexp()) + ExprValueType.STRUCT -> this.fold(ion.newEmptyStruct()) { struct, el -> struct.apply { add(el.name!!.stringValue(), el.toPtsIon()) } } - BAG -> { + ExprValueType.BAG -> { val bag = ion.newEmptySexp().apply { add(ion.newSymbol("bag")) } this.foldToIonSequence(bag) @@ -94,4 +111,3 @@ private fun IonValue.toIonText(): String { return sb.toString() } - From df4f0b19c22e53c0693307115fa94ad9c5fa3da2 Mon Sep 17 00:00:00 2001 From: Alan Cai Date: Wed, 2 Mar 2022 12:06:02 -0800 Subject: [PATCH 3/8] Resolve examples/ manual imports --- .../partiql/examples/CsvExprValueExample.kt | 21 ++++--- .../examples/CustomFunctionsExample.kt | 61 +++++++++++-------- .../examples/EvaluationWithLazyBindings.kt | 13 ++-- .../partiql/examples/ParserErrorExample.kt | 18 +++--- .../org/partiql/examples/ParserExample.kt | 8 ++- .../org/partiql/examples/PreventJoinVistor.kt | 9 ++- .../org/partiql/examples/util/Example.kt | 12 ++-- .../kotlin/org/partiql/examples/util/Main.kt | 45 ++++++++------ 8 files changed, 108 insertions(+), 79 deletions(-) diff --git a/examples/src/kotlin/org/partiql/examples/CsvExprValueExample.kt b/examples/src/kotlin/org/partiql/examples/CsvExprValueExample.kt index 357fd4d345..cb1a5d5fd7 100644 --- a/examples/src/kotlin/org/partiql/examples/CsvExprValueExample.kt +++ b/examples/src/kotlin/org/partiql/examples/CsvExprValueExample.kt @@ -1,11 +1,17 @@ package org.partiql.examples -import com.amazon.ion.* -import com.amazon.ion.system.* +import com.amazon.ion.IonValue +import com.amazon.ion.system.IonSystemBuilder import org.partiql.examples.util.Example -import org.partiql.lang.* -import org.partiql.lang.eval.* -import org.partiql.lang.util.* +import org.partiql.lang.CompilerPipeline +import org.partiql.lang.eval.BaseExprValue +import org.partiql.lang.eval.Bindings +import org.partiql.lang.eval.EvaluationSession +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueFactory +import org.partiql.lang.eval.ExprValueType +import org.partiql.lang.eval.namedValue +import org.partiql.lang.eval.stringValue import java.io.PrintStream /** @@ -16,7 +22,7 @@ import java.io.PrintStream * * The first column in the row will be assigned the name `_1`, the second `_2` and so on. */ -private class CsvRowExprValue(private val valueFactory: ExprValueFactory, private val rowString: String): BaseExprValue() { +private class CsvRowExprValue(private val valueFactory: ExprValueFactory, private val rowString: String) : BaseExprValue() { /** The Ion type that CsvRowExprValue is must similar to is a struct. */ override val type: ExprValueType get() = ExprValueType.STRUCT @@ -88,7 +94,8 @@ class CsvExprValueExample(out: PrintStream) : Example(out) { .filter { it.isNotEmpty() } .map { CsvRowExprValue(pipeline.valueFactory, it) - }) + } + ) } } diff --git a/examples/src/kotlin/org/partiql/examples/CustomFunctionsExample.kt b/examples/src/kotlin/org/partiql/examples/CustomFunctionsExample.kt index 888bd7f0ce..8eeb3785b2 100644 --- a/examples/src/kotlin/org/partiql/examples/CustomFunctionsExample.kt +++ b/examples/src/kotlin/org/partiql/examples/CustomFunctionsExample.kt @@ -1,10 +1,18 @@ package org.partiql.examples -import com.amazon.ion.system.* +import com.amazon.ion.system.IonSystemBuilder import org.partiql.examples.util.Example -import org.partiql.lang.* +import org.partiql.lang.CompilerPipeline import org.partiql.lang.errors.ErrorCode -import org.partiql.lang.eval.* +import org.partiql.lang.eval.Environment +import org.partiql.lang.eval.EvaluationException +import org.partiql.lang.eval.EvaluationSession +import org.partiql.lang.eval.ExprFunction +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueFactory +import org.partiql.lang.eval.ExprValueType +import org.partiql.lang.eval.StructOrdering +import org.partiql.lang.eval.namedValue import org.partiql.lang.types.FunctionSignature import org.partiql.lang.types.StaticType import java.io.PrintStream @@ -61,7 +69,7 @@ class FibScalarExprFunc(private val valueFactory: ExprValueFactory) : ExprFuncti * fashion demonstrates how one could implement what would be known as a table-valued * function in a traditional SQL implementation. */ -class FibListExprFunc(private val valueFactory: ExprValueFactory): ExprFunction { +class FibListExprFunc(private val valueFactory: ExprValueFactory) : ExprFunction { override val signature = FunctionSignature( name = "fib_list", requiredParameters = listOf(StaticType.INT), @@ -115,27 +123,28 @@ class CustomFunctionsExample(out: PrintStream) : Example(out) { override fun run() { listOf( - "fib_scalar(NULL)", - "fib_scalar(MISSING)", - "fib_scalar(0)", - "fib_scalar(1)", - "fib_scalar(2)", - "fib_scalar(3)", - "fib_scalar(4)", - "fib_scalar(5)", - "fib_scalar(6)", - "fib_scalar(7)", - "fib_scalar(8)", - "fib_list(NULL)", - "fib_list(MISSING)", - "fib_list(0)", - "fib_list(1)", - "fib_list(2)", - "fib_list(3)", - "fib_list(4)", - "fib_list(5)", - "fib_list(6)", - "fib_list(7)", - "fib_list(8)").forEach { query -> print(query, eval(query)) } + "fib_scalar(NULL)", + "fib_scalar(MISSING)", + "fib_scalar(0)", + "fib_scalar(1)", + "fib_scalar(2)", + "fib_scalar(3)", + "fib_scalar(4)", + "fib_scalar(5)", + "fib_scalar(6)", + "fib_scalar(7)", + "fib_scalar(8)", + "fib_list(NULL)", + "fib_list(MISSING)", + "fib_list(0)", + "fib_list(1)", + "fib_list(2)", + "fib_list(3)", + "fib_list(4)", + "fib_list(5)", + "fib_list(6)", + "fib_list(7)", + "fib_list(8)" + ).forEach { query -> print(query, eval(query)) } } } diff --git a/examples/src/kotlin/org/partiql/examples/EvaluationWithLazyBindings.kt b/examples/src/kotlin/org/partiql/examples/EvaluationWithLazyBindings.kt index b3e8851462..95536a241f 100644 --- a/examples/src/kotlin/org/partiql/examples/EvaluationWithLazyBindings.kt +++ b/examples/src/kotlin/org/partiql/examples/EvaluationWithLazyBindings.kt @@ -1,9 +1,11 @@ package org.partiql.examples -import com.amazon.ion.system.* +import com.amazon.ion.system.IonSystemBuilder import org.partiql.examples.util.Example -import org.partiql.lang.* -import org.partiql.lang.eval.* +import org.partiql.lang.CompilerPipeline +import org.partiql.lang.eval.Bindings +import org.partiql.lang.eval.EvaluationSession +import org.partiql.lang.eval.ExprValue import java.io.PrintStream /** Demonstrates how to supply a global variable whose value is lazily materialized. */ @@ -25,7 +27,8 @@ class EvaluationWithLazyBindings(out: PrintStream) : Example(out) { // invoked to obtain its value, which will then be cached for later use. // "pets" is the PartiQL equivalent of a a "table" (actually an Ion list of structs) pipeline.valueFactory.newFromIonValue( - ion.singleValue("""[ { name: "Nibbler", age: 2 }, { name: "Hobbes", age: 6 } ]""")) + ion.singleValue("""[ { name: "Nibbler", age: 2 }, { name: "Hobbes", age: 6 } ]""") + ) } } print("global variables:", "pets => $ionText") @@ -37,4 +40,4 @@ class EvaluationWithLazyBindings(out: PrintStream) : Example(out) { val result = e.eval(session) print("result:", result) } -} \ No newline at end of file +} diff --git a/examples/src/kotlin/org/partiql/examples/ParserErrorExample.kt b/examples/src/kotlin/org/partiql/examples/ParserErrorExample.kt index 93ddeef2d9..abe85375e1 100644 --- a/examples/src/kotlin/org/partiql/examples/ParserErrorExample.kt +++ b/examples/src/kotlin/org/partiql/examples/ParserErrorExample.kt @@ -1,11 +1,11 @@ package org.partiql.examples -import com.amazon.ion.* -import com.amazon.ion.system.* +import com.amazon.ion.system.IonSystemBuilder import org.partiql.examples.util.Example -import org.partiql.lang.ast.* -import org.partiql.lang.errors.* -import org.partiql.lang.syntax.* +import org.partiql.lang.errors.Property +import org.partiql.lang.syntax.Parser +import org.partiql.lang.syntax.ParserException +import org.partiql.lang.syntax.SqlParser import java.io.PrintStream /** @@ -31,10 +31,10 @@ class ParserErrorExample(out: PrintStream) : Example(out) { val errorContext = e.errorContext!! val errorInformation = "errorCode: ${e.errorCode}" + - "\nLINE_NUMBER: ${errorContext[Property.LINE_NUMBER]}" + - "\nCOLUMN_NUMBER: ${errorContext[Property.COLUMN_NUMBER]}" + - "\nTOKEN_TYPE: ${errorContext[Property.TOKEN_TYPE]}" + - "\nTOKEN_VALUE: ${errorContext[Property.TOKEN_VALUE]}" + "\nLINE_NUMBER: ${errorContext[Property.LINE_NUMBER]}" + + "\nCOLUMN_NUMBER: ${errorContext[Property.COLUMN_NUMBER]}" + + "\nTOKEN_TYPE: ${errorContext[Property.TOKEN_TYPE]}" + + "\nTOKEN_VALUE: ${errorContext[Property.TOKEN_VALUE]}" print("Error message:", e.generateMessage()) print("Error information:", errorInformation) } diff --git a/examples/src/kotlin/org/partiql/examples/ParserExample.kt b/examples/src/kotlin/org/partiql/examples/ParserExample.kt index b6643cb647..b644f607e5 100644 --- a/examples/src/kotlin/org/partiql/examples/ParserExample.kt +++ b/examples/src/kotlin/org/partiql/examples/ParserExample.kt @@ -1,9 +1,11 @@ package org.partiql.examples -import com.amazon.ion.system.* +import com.amazon.ion.system.IonSystemBuilder +import com.amazon.ion.system.IonTextWriterBuilder import org.partiql.examples.util.Example import org.partiql.lang.domains.PartiqlAst -import org.partiql.lang.syntax.* +import org.partiql.lang.syntax.Parser +import org.partiql.lang.syntax.SqlParser import java.io.PrintStream /** @@ -13,7 +15,7 @@ class ParserExample(out: PrintStream) : Example(out) { /** Demonstrates query parsing and SerDe. */ override fun run() { - /// A standard instance of [IonSystem], which is required by [SqlParser]. + // / A standard instance of [IonSystem], which is required by [SqlParser]. val ion = IonSystemBuilder.standard().build() // An instance of [SqlParser]. diff --git a/examples/src/kotlin/org/partiql/examples/PreventJoinVistor.kt b/examples/src/kotlin/org/partiql/examples/PreventJoinVistor.kt index 438914f79a..d57b27f83c 100644 --- a/examples/src/kotlin/org/partiql/examples/PreventJoinVistor.kt +++ b/examples/src/kotlin/org/partiql/examples/PreventJoinVistor.kt @@ -1,9 +1,9 @@ package org.partiql.examples -import com.amazon.ion.system.* +import com.amazon.ion.system.IonSystemBuilder import org.partiql.examples.util.Example import org.partiql.lang.domains.PartiqlAst -import org.partiql.lang.syntax.* +import org.partiql.lang.syntax.SqlParser import java.io.PrintStream /** The exception thrown by when a JOIN clause was detected. */ @@ -11,7 +11,7 @@ private class InvalidAstException(message: String) : RuntimeException(message) /** * Customers wishing to embed PartiQL into their application might wish to restrict the use of certain language - * features or provide custom semantic checking. One way of accomplishing that is by inspecting the AST, this + * features or provide custom semantic checking. One way of accomplishing that is by inspecting the AST, this * example shows how to prevent the use of any kind of JOIN clause. */ @@ -21,7 +21,7 @@ class PreventJoinVisitorExample(out: PrintStream) : Example(out) { private fun hasJoins(sql: String): Boolean = try { val ast = parser.parseAstStatement(sql) - object : PartiqlAst.Visitor(){ + object : PartiqlAst.Visitor() { override fun visitFromSourceJoin(node: PartiqlAst.FromSource.Join) { throw InvalidAstException("JOINs are prevented") } @@ -43,4 +43,3 @@ class PreventJoinVisitorExample(out: PrintStream) : Example(out) { print("Has joins:", hasJoins(queryWithJoin).toString()) } } - diff --git a/examples/src/kotlin/org/partiql/examples/util/Example.kt b/examples/src/kotlin/org/partiql/examples/util/Example.kt index e1b78aaf65..979ea1734a 100644 --- a/examples/src/kotlin/org/partiql/examples/util/Example.kt +++ b/examples/src/kotlin/org/partiql/examples/util/Example.kt @@ -1,20 +1,20 @@ package org.partiql.examples.util -import org.partiql.lang.eval.* -import org.partiql.lang.util.* +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.util.ConfigurableExprValueFormatter import java.io.PrintStream abstract class Example(val out: PrintStream) { - abstract fun run(); - + abstract fun run() + private val formatter = ConfigurableExprValueFormatter.pretty fun print(label: String, value: ExprValue) { print(label, formatter.format(value)) } - + fun print(label: String, data: String) { out.println(label) out.println(" ${data.replace("\n", "\n ")}") } -} \ No newline at end of file +} diff --git a/examples/src/kotlin/org/partiql/examples/util/Main.kt b/examples/src/kotlin/org/partiql/examples/util/Main.kt index 74f60b7a80..307998b5fd 100644 --- a/examples/src/kotlin/org/partiql/examples/util/Main.kt +++ b/examples/src/kotlin/org/partiql/examples/util/Main.kt @@ -2,27 +2,37 @@ package org.partiql.examples.util -import org.partiql.examples.* +import org.partiql.examples.CSVJavaExample +import org.partiql.examples.CsvExprValueExample +import org.partiql.examples.CustomFunctionsExample +import org.partiql.examples.CustomProceduresExample +import org.partiql.examples.EvaluationWithBindings +import org.partiql.examples.EvaluationWithLazyBindings +import org.partiql.examples.ParserErrorExample +import org.partiql.examples.ParserExample +import org.partiql.examples.PartialEvaluationVisitorTransformExample +import org.partiql.examples.PreventJoinVisitorExample +import org.partiql.examples.S3Example +import org.partiql.examples.SimpleExpressionEvaluation import java.io.PrintStream import java.lang.RuntimeException - private val examples = mapOf( - // Java Examples - CSVJavaExample::class.java.simpleName to CSVJavaExample(System.out), - S3Example::class.java.simpleName to S3Example(System.out), - - // Kotlin Examples - CsvExprValueExample::class.java.simpleName to CsvExprValueExample(System.out), - CustomFunctionsExample::class.java.simpleName to CustomFunctionsExample(System.out), - CustomProceduresExample::class.java.simpleName to CustomProceduresExample(System.out), - EvaluationWithBindings::class.java.simpleName to EvaluationWithBindings(System.out), - EvaluationWithLazyBindings::class.java.simpleName to EvaluationWithLazyBindings(System.out), - ParserErrorExample::class.java.simpleName to ParserErrorExample(System.out), - ParserExample::class.java.simpleName to ParserExample(System.out), - PartialEvaluationVisitorTransformExample::class.java.simpleName to PartialEvaluationVisitorTransformExample(System.out), - PreventJoinVisitorExample::class.java.simpleName to PreventJoinVisitorExample(System.out), - SimpleExpressionEvaluation::class.java.simpleName to SimpleExpressionEvaluation(System.out) + // Java Examples + CSVJavaExample::class.java.simpleName to CSVJavaExample(System.out), + S3Example::class.java.simpleName to S3Example(System.out), + + // Kotlin Examples + CsvExprValueExample::class.java.simpleName to CsvExprValueExample(System.out), + CustomFunctionsExample::class.java.simpleName to CustomFunctionsExample(System.out), + CustomProceduresExample::class.java.simpleName to CustomProceduresExample(System.out), + EvaluationWithBindings::class.java.simpleName to EvaluationWithBindings(System.out), + EvaluationWithLazyBindings::class.java.simpleName to EvaluationWithLazyBindings(System.out), + ParserErrorExample::class.java.simpleName to ParserErrorExample(System.out), + ParserExample::class.java.simpleName to ParserExample(System.out), + PartialEvaluationVisitorTransformExample::class.java.simpleName to PartialEvaluationVisitorTransformExample(System.out), + PreventJoinVisitorExample::class.java.simpleName to PreventJoinVisitorExample(System.out), + SimpleExpressionEvaluation::class.java.simpleName to SimpleExpressionEvaluation(System.out) ) fun main(args: Array) { @@ -45,4 +55,3 @@ fun main(args: Array) { fun printHelp(out: PrintStream) { out.println("./gradlew :examples:run --args=\"<${examples.keys.joinToString("|")}>\"") } - From 4fb72d83b7b96004379654fc4dc8a805c904eebb Mon Sep 17 00:00:00 2001 From: Alan Cai Date: Wed, 2 Mar 2022 12:07:05 -0800 Subject: [PATCH 4/8] Remove Runnable.kt --- cli/src/org/partiql/cli/Runnable.kt | 33 ----------------------------- 1 file changed, 33 deletions(-) delete mode 100644 cli/src/org/partiql/cli/Runnable.kt diff --git a/cli/src/org/partiql/cli/Runnable.kt b/cli/src/org/partiql/cli/Runnable.kt deleted file mode 100644 index 316db7267a..0000000000 --- a/cli/src/org/partiql/cli/Runnable.kt +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright 2019 Amazon.com, Inc. or its affiliates. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at: - * - * http://aws.amazon.com/apache2.0/ - * - * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific - * language governing permissions and limitations under the License. - */ - -package org.partiql.cli - -import com.amazon.ion.IonWriter -import org.partiql.lang.eval.ExprValue -import org.partiql.lang.eval.rangeOver - -internal abstract class SqlCommand { - abstract fun run() - - protected fun writeResult(result: ExprValue, writer: IonWriter): Int { - var itemCount = 0 - result.rangeOver().forEach { - it.ionValue.writeTo(writer) - itemCount++ - } - writer.flush() - return itemCount - } -} From 131022f6477c60850446d90174296ff9189246d2 Mon Sep 17 00:00:00 2001 From: Alan Cai Date: Wed, 2 Mar 2022 16:45:16 -0800 Subject: [PATCH 5/8] Resolve testscript/ manual imports --- .../partiql/testscript/compiler/Compiler.kt | 93 ++-- .../compiler/TestScriptExpression.kt | 42 +- .../evaluator/DefaultPtsEquality.kt | 89 ++-- .../testscript/extensions/Interpolation.kt | 67 +-- .../testscript/extensions/ListExtensions.kt | 6 +- .../testscript/junitRunner/PtsRunner.kt | 44 +- .../org/partiql/testscript/parser/Parser.kt | 63 ++- .../parser/ast/builders/BaseBuilder.kt | 12 +- .../parser/ast/builders/ForBuilder.kt | 88 ++-- .../parser/ast/builders/TestBuilder.kt | 23 +- .../partiql/testscript/parser/ParserTests.kt | 453 +++++++++++------- 11 files changed, 605 insertions(+), 375 deletions(-) diff --git a/testscript/src/org/partiql/testscript/compiler/Compiler.kt b/testscript/src/org/partiql/testscript/compiler/Compiler.kt index db5d1dd7b0..ef7f2c4403 100644 --- a/testscript/src/org/partiql/testscript/compiler/Compiler.kt +++ b/testscript/src/org/partiql/testscript/compiler/Compiler.kt @@ -1,11 +1,21 @@ package org.partiql.testscript.compiler -import com.amazon.ion.* +import com.amazon.ion.IonSexp +import com.amazon.ion.IonStruct +import com.amazon.ion.IonSymbol +import com.amazon.ion.IonSystem +import com.amazon.ion.IonType import org.partiql.testscript.Failure import org.partiql.testscript.Result import org.partiql.testscript.Success import org.partiql.testscript.foldToResult -import org.partiql.testscript.parser.ast.* +import org.partiql.testscript.parser.ast.AppendTestNode +import org.partiql.testscript.parser.ast.FileSetDefaultEnvironmentNode +import org.partiql.testscript.parser.ast.InlineSetDefaultEnvironmentNode +import org.partiql.testscript.parser.ast.ModuleNode +import org.partiql.testscript.parser.ast.SetDefaultEnvironmentNode +import org.partiql.testscript.parser.ast.SkipListNode +import org.partiql.testscript.parser.ast.TestNode import java.io.File /** @@ -43,7 +53,7 @@ private class CompileEnvironment(var testEnvironment: IonStruct) { // this way if there are multiple appends to the same skipped test they // will be no-op regardless of order fun invokeDeferred(): List> = - skipListDeferred.union(appendDeferred).map { it.invoke() } + skipListDeferred.union(appendDeferred).map { it.invoke() } } /** @@ -82,8 +92,8 @@ class Compiler(val ion: IonSystem) { val deferredResults = compileEnvironment.invokeDeferred() val errors = results.union(deferredResults) - .filterIsInstance>() - .flatMap { it.errors } + .filterIsInstance>() + .flatMap { it.errors } if (errors.isNotEmpty()) { throw CompilerException(errors.map { it.toPtsError() }) @@ -96,8 +106,9 @@ class Compiler(val ion: IonSystem) { * Changes the current test environment affecting subsequent AST nodes until a new Module is started. */ private fun compileSetDefaultEnvironmentNode( - compileEnvironment: CompileEnvironment, - node: SetDefaultEnvironmentNode): Result = when (node) { + compileEnvironment: CompileEnvironment, + node: SetDefaultEnvironmentNode + ): Result = when (node) { is InlineSetDefaultEnvironmentNode -> { compileEnvironment.testEnvironment = node.environment @@ -106,7 +117,7 @@ class Compiler(val ion: IonSystem) { is FileSetDefaultEnvironmentNode -> { val dirPath = File(node.scriptLocation.inputName).parent val file = File("$dirPath/${node.environmentRelativeFilePath}") - + val lazyDatagram = lazy(LazyThreadSafetyMode.PUBLICATION) { ion.loader.load(file) } when { @@ -117,10 +128,13 @@ class Compiler(val ion: IonSystem) { Failure(FileSetDefaultEnvironmentNotSingleValue(file.absolutePath, node.scriptLocation)) } lazyDatagram.value[0].type != IonType.STRUCT -> { - Failure(FileSetDefaultEnvironmentNotStruct( + Failure( + FileSetDefaultEnvironmentNotStruct( file.absolutePath, lazyDatagram.value[0].type, - node.scriptLocation)) + node.scriptLocation + ) + ) } else -> { compileEnvironment.testEnvironment = lazyDatagram.value[0] as IonStruct @@ -130,18 +144,18 @@ class Compiler(val ion: IonSystem) { } } - /** * Generates and register a [TestExpression] into the compile environment. */ private fun compileTestNode(compileEnvironment: CompileEnvironment, node: TestNode): Result { val testExpression = TestExpression( - id = node.id, - description = node.description, - statement = node.statement, - environment = node.environment ?: compileEnvironment.testEnvironment, - expected = makeExpectedResult(node.expected), - scriptLocation = node.scriptLocation) + id = node.id, + description = node.description, + statement = node.statement, + environment = node.environment ?: compileEnvironment.testEnvironment, + expected = makeExpectedResult(node.expected), + scriptLocation = node.scriptLocation + ) val expressions = compileEnvironment.expressions return if (expressions.containsKey(node.id)) { @@ -179,10 +193,11 @@ class Compiler(val ion: IonSystem) { when (original) { is TestExpression -> { compileEnvironment.expressions[original.id] = AppendedTestExpression( - original.id, - original, - node.additionalData, - node.scriptLocation) + original.id, + original, + node.additionalData, + node.scriptLocation + ) SUCCESS } is SkippedTestExpression -> { @@ -206,25 +221,27 @@ class Compiler(val ion: IonSystem) { val matchers = node.patterns.map { it.toPatternRegex() } expressions.filter { (testId, _) -> matchers.any { it.matches(testId) } } - .forEach { (testId, testExpression) -> - when (testExpression) { - is TestExpression -> { - expressions[testId] = SkippedTestExpression( - testId, - testExpression, - node.scriptLocation) - } - is SkippedTestExpression -> { - // no-op - } - is AppendedTestExpression -> { - expressions[testId] = SkippedTestExpression( - testId, - testExpression.original, - node.scriptLocation) - } + .forEach { (testId, testExpression) -> + when (testExpression) { + is TestExpression -> { + expressions[testId] = SkippedTestExpression( + testId, + testExpression, + node.scriptLocation + ) + } + is SkippedTestExpression -> { + // no-op + } + is AppendedTestExpression -> { + expressions[testId] = SkippedTestExpression( + testId, + testExpression.original, + node.scriptLocation + ) } } + } return SUCCESS } diff --git a/testscript/src/org/partiql/testscript/compiler/TestScriptExpression.kt b/testscript/src/org/partiql/testscript/compiler/TestScriptExpression.kt index 49016ea179..e8d1149a09 100644 --- a/testscript/src/org/partiql/testscript/compiler/TestScriptExpression.kt +++ b/testscript/src/org/partiql/testscript/compiler/TestScriptExpression.kt @@ -4,41 +4,45 @@ import com.amazon.ion.IonStruct import com.amazon.ion.IonValue import org.partiql.testscript.extensions.toIonText import org.partiql.testscript.parser.ScriptLocation -import java.lang.StringBuilder /** - * Top level PTS compiled expression + * Top level PTS compiled expression */ sealed class TestScriptExpression { abstract val id: String abstract val scriptLocation: ScriptLocation } - /** - * Compiled expression for a skipped test. + * Compiled expression for a skipped test. */ -data class SkippedTestExpression(override val id: String, - val original: TestExpression, - override val scriptLocation: ScriptLocation) : TestScriptExpression() +data class SkippedTestExpression( + override val id: String, + val original: TestExpression, + override val scriptLocation: ScriptLocation +) : TestScriptExpression() /** * Compiled expression for a single test. */ -data class TestExpression(override val id: String, - val description: String?, - val statement: String, - val environment: IonStruct, - val expected: ExpectedResult, - override val scriptLocation: ScriptLocation) : TestScriptExpression() +data class TestExpression( + override val id: String, + val description: String?, + val statement: String, + val environment: IonStruct, + val expected: ExpectedResult, + override val scriptLocation: ScriptLocation +) : TestScriptExpression() /** * Compiled expression for an appended test. */ -data class AppendedTestExpression(override val id: String, - val original: TestExpression, - val additionalData: IonStruct, - override val scriptLocation: ScriptLocation) : TestScriptExpression() +data class AppendedTestExpression( + override val id: String, + val original: TestExpression, + val additionalData: IonStruct, + override val scriptLocation: ScriptLocation +) : TestScriptExpression() /** * A test expected result @@ -53,8 +57,8 @@ data class ExpectedSuccess(val expected: IonValue) : ExpectedResult() { } /** - * Singleton for expected errors + * Singleton for expected errors */ object ExpectedError : ExpectedResult() { override fun toString(): String = "ERROR" -} \ No newline at end of file +} diff --git a/testscript/src/org/partiql/testscript/evaluator/DefaultPtsEquality.kt b/testscript/src/org/partiql/testscript/evaluator/DefaultPtsEquality.kt index 431528e658..fbd5a94b0e 100644 --- a/testscript/src/org/partiql/testscript/evaluator/DefaultPtsEquality.kt +++ b/testscript/src/org/partiql/testscript/evaluator/DefaultPtsEquality.kt @@ -1,12 +1,19 @@ package org.partiql.testscript.evaluator -import com.amazon.ion.* -import com.amazon.ion.IonType.* +import com.amazon.ion.IonDecimal +import com.amazon.ion.IonList +import com.amazon.ion.IonSequence +import com.amazon.ion.IonSexp +import com.amazon.ion.IonStruct +import com.amazon.ion.IonSymbol +import com.amazon.ion.IonTimestamp +import com.amazon.ion.IonType +import com.amazon.ion.IonValue import java.lang.IllegalArgumentException /** - * Default definition of Equality for PartiQL PTS. Although similar to PartiQL equivalency definition there are - * some differences as PartiQL `=` operator coerces types and for PTS two values are equivalent if and only if + * Default definition of Equality for PartiQL PTS. Although similar to PartiQL equivalency definition there are + * some differences as PartiQL `=` operator coerces types and for PTS two values are equivalent if and only if * they are of the same type. */ internal object DefaultPtsEquality : PtsEquality { @@ -16,20 +23,26 @@ internal object DefaultPtsEquality : PtsEquality { } // typed nulls - if(!left.isMissing() && !right.isMissing() && (left.isNullValue || right.isNullValue)) { + if (!left.isMissing() && !right.isMissing() && (left.isNullValue || right.isNullValue)) { return left.isNullValue && right.isNullValue } - + return when (left.type!!) { - NULL -> { + IonType.NULL -> { if (left.isMissing() || right.isMissing()) { left.isMissing() && right.isMissing() } else { right.isNullValue } } - BOOL, INT, FLOAT, SYMBOL, STRING, CLOB, BLOB -> left == right - DECIMAL -> { + IonType.BOOL, + IonType.INT, + IonType.FLOAT, + IonType.SYMBOL, + IonType.STRING, + IonType.CLOB, + IonType.BLOB -> left == right + IonType.DECIMAL -> { val leftDecimal = left as IonDecimal val rightDecimal = right as IonDecimal @@ -37,14 +50,14 @@ internal object DefaultPtsEquality : PtsEquality { // for PartiQL 1.0 == 1.00 while that's not true for Ion leftDecimal.bigDecimalValue().compareTo(rightDecimal.bigDecimalValue()) == 0 } - TIMESTAMP -> { + IonType.TIMESTAMP -> { val leftTimestamp = left as IonTimestamp val rightTimestamp = right as IonTimestamp leftTimestamp.timestampValue().compareTo(rightTimestamp.timestampValue()) == 0 } - LIST -> ptsSequenceEquals(left as IonList, right as IonList) - SEXP -> { + IonType.LIST -> ptsSequenceEquals(left as IonList, right as IonList) + IonType.SEXP -> { val leftSexp = left as IonSexp val rightSexp = right as IonSexp @@ -54,51 +67,49 @@ internal object DefaultPtsEquality : PtsEquality { ptsSequenceEquals(leftSexp, rightSexp) } } - STRUCT -> { + IonType.STRUCT -> { val leftStruct = left as IonStruct val rightStruct = right as IonStruct - leftStruct.size() == rightStruct.size() && leftStruct.all { leftElement -> + leftStruct.size() == rightStruct.size() && leftStruct.all { leftElement -> val rightElement = rightStruct[leftElement.fieldName] - - rightElement != null && areEqual(leftElement, rightElement) + + rightElement != null && areEqual(leftElement, rightElement) } } - DATAGRAM -> throw IllegalArgumentException("DATAGRAM are not a valid type in PTS") + IonType.DATAGRAM -> throw IllegalArgumentException("DATAGRAM are not a valid type in PTS") } } - + private fun IonSexp.isBag(): Boolean = - this.size > 1 - && this[0].type == SYMBOL - && (this[0] as IonSymbol).stringValue() == "bag" + this.size > 1 && + this[0].type == IonType.SYMBOL && + (this[0] as IonSymbol).stringValue() == "bag" private fun ptsSequenceEquals(left: IonSequence, right: IonSequence): Boolean = - left.size == right.size && - left.asSequence() - .mapIndexed { index, leftElement -> index to leftElement } - .all { (index, leftElement) -> areEqual(leftElement, right[index]) } + left.size == right.size && + left.asSequence() + .mapIndexed { index, leftElement -> index to leftElement } + .all { (index, leftElement) -> areEqual(leftElement, right[index]) } // bags can contain repeated elements so they are equal if and only if: // * Same size // * All elements in one are contained in the other at the same quantities private fun ptsBagEquals(left: IonSexp, right: IonSexp): Boolean = - when { - left.size != right.size -> false - left.isBag() && right.isBag() -> { - left.all { leftEl -> - val leftQtd = left.count { areEqual(leftEl, it) } - val rightQtd = right.count { areEqual(leftEl, it) } + when { + left.size != right.size -> false + left.isBag() && right.isBag() -> { + left.all { leftEl -> + val leftQtd = left.count { areEqual(leftEl, it) } + val rightQtd = right.count { areEqual(leftEl, it) } - leftQtd == rightQtd - } + leftQtd == rightQtd } - else -> false } + else -> false + } - - private fun IonValue.isMissing(): Boolean = this.isNullValue - && this.hasTypeAnnotation("missing") - && this.typeAnnotations.size == 1 - + private fun IonValue.isMissing(): Boolean = this.isNullValue && + this.hasTypeAnnotation("missing") && + this.typeAnnotations.size == 1 } diff --git a/testscript/src/org/partiql/testscript/extensions/Interpolation.kt b/testscript/src/org/partiql/testscript/extensions/Interpolation.kt index 23c4271c49..1169104e91 100644 --- a/testscript/src/org/partiql/testscript/extensions/Interpolation.kt +++ b/testscript/src/org/partiql/testscript/extensions/Interpolation.kt @@ -1,48 +1,53 @@ package org.partiql.testscript.extensions -import com.amazon.ion.* -import com.amazon.ion.IonType.* +import com.amazon.ion.IonList +import com.amazon.ion.IonSequence +import com.amazon.ion.IonSexp +import com.amazon.ion.IonString +import com.amazon.ion.IonStruct +import com.amazon.ion.IonSymbol +import com.amazon.ion.IonText +import com.amazon.ion.IonValue internal class UndefinedVariableInterpolationException(val variableName: String) : RuntimeException() private val regex = "\\$([\\w{}]+)".toRegex() internal fun IonValue.interpolate(variables: IonStruct): IonValue = - when (this) { - is IonSymbol -> { - val symbolText = stringValue() - - val ionValue = if (symbolText.startsWith('$')) { - val variableName = symbolText.substring(1) - variables[variableName] ?: throw UndefinedVariableInterpolationException(variableName) - } else { - this - } - - ionValue.clone() + when (this) { + is IonSymbol -> { + val symbolText = stringValue() + + val ionValue = if (symbolText.startsWith('$')) { + val variableName = symbolText.substring(1) + variables[variableName] ?: throw UndefinedVariableInterpolationException(variableName) + } else { + this } - is IonString -> system.newString(stringValue().interpolate(variables)) + ionValue.clone() + } - is IonList -> this.foldInterpolating(system.newEmptyList(), variables) + is IonString -> system.newString(stringValue().interpolate(variables)) - is IonSexp -> { - this.foldInterpolating(system.newEmptySexp(), variables) - } + is IonList -> this.foldInterpolating(system.newEmptyList(), variables) - is IonStruct -> this.fold(system.newEmptyStruct()) { struct, el -> - struct.apply { add(el.fieldName, el.interpolate(variables)) } - } - - else -> this.clone() + is IonSexp -> { + this.foldInterpolating(system.newEmptySexp(), variables) + } + is IonStruct -> this.fold(system.newEmptyStruct()) { struct, el -> + struct.apply { add(el.fieldName, el.interpolate(variables)) } } + else -> this.clone() + } + private fun IonSequence.foldInterpolating(target: IonSequence, variables: IonStruct) = - this.fold(target) { acc, el -> - acc.add(el.interpolate(variables)) - acc - } + this.fold(target) { acc, el -> + acc.add(el.interpolate(variables)) + acc + } private fun String.interpolate(variables: IonStruct): String { val matches = regex.findAll(this).map { it.groups[1]!!.value } @@ -55,13 +60,13 @@ private fun String.interpolate(variables: IonStruct): String { match } - val replacement = variables[variableName]?.stringfy() - ?: throw UndefinedVariableInterpolationException(variableName) + val replacement = variables[variableName]?.stringfy() + ?: throw UndefinedVariableInterpolationException(variableName) interpolated.replace("\$$match", replacement) } } -private fun IonValue.stringfy(): String = when(this) { +private fun IonValue.stringfy(): String = when (this) { is IonText -> this.stringValue() // to remove the extra " else -> this.toIonText() } diff --git a/testscript/src/org/partiql/testscript/extensions/ListExtensions.kt b/testscript/src/org/partiql/testscript/extensions/ListExtensions.kt index 82c97da134..9c75a09c90 100644 --- a/testscript/src/org/partiql/testscript/extensions/ListExtensions.kt +++ b/testscript/src/org/partiql/testscript/extensions/ListExtensions.kt @@ -3,10 +3,10 @@ package org.partiql.testscript.extensions internal fun List.crossProduct(other: List): List> { val r = mutableListOf>() for (thisEl in this) { - for(otherEl in other) { + for (otherEl in other) { r.add(thisEl to otherEl) } } - + return r -} \ No newline at end of file +} diff --git a/testscript/src/org/partiql/testscript/junitRunner/PtsRunner.kt b/testscript/src/org/partiql/testscript/junitRunner/PtsRunner.kt index 0dc544d048..7ab5029b12 100644 --- a/testscript/src/org/partiql/testscript/junitRunner/PtsRunner.kt +++ b/testscript/src/org/partiql/testscript/junitRunner/PtsRunner.kt @@ -3,20 +3,23 @@ package org.partiql.testscript.junitRunner import com.amazon.ion.system.IonSystemBuilder import org.junit.runner.Description import org.junit.runner.Runner +import org.junit.runner.notification.Failure import org.junit.runner.notification.RunNotifier -import org.partiql.testscript.parser.Parser +import org.partiql.testscript.compiler.AppendedTestExpression +import org.partiql.testscript.compiler.Compiler +import org.partiql.testscript.compiler.ExpectedResult +import org.partiql.testscript.compiler.SkippedTestExpression +import org.partiql.testscript.compiler.TestExpression +import org.partiql.testscript.compiler.TestScriptExpression import org.partiql.testscript.evaluator.TestFailure import org.partiql.testscript.evaluator.TestResultSuccess +import org.partiql.testscript.extensions.listRecursive +import org.partiql.testscript.extensions.ptsFileFilter import org.partiql.testscript.parser.NamedInputStream +import org.partiql.testscript.parser.Parser import java.io.File import java.io.FileInputStream -import org.junit.runner.notification.Failure -import org.partiql.testscript.compiler.* -import org.partiql.testscript.extensions.listRecursive -import org.partiql.testscript.extensions.ptsFileFilter -import java.io.FileFilter import java.lang.AssertionError -import java.lang.IllegalArgumentException /** * A Junit4 runner that integrates PTS tests into JUnit. @@ -25,9 +28,9 @@ class PtsRunner(private val testClass: Class<*>) : Runner() { private val ion = IonSystemBuilder.standard().build() private val testInstance = testClass - .asSubclass(Junit4PtsTest::class.java) - .getDeclaredConstructor() - .newInstance() + .asSubclass(Junit4PtsTest::class.java) + .getDeclaredConstructor() + .newInstance() private val parser = Parser(ion) private val compiler = Compiler(ion) @@ -35,12 +38,12 @@ class PtsRunner(private val testClass: Class<*>) : Runner() { private val evaluator = testInstance.evaluator private val ionDocs = testInstance.ptsFilePaths - .asSequence() - .map { File(it) } - .map { it.listRecursive(ptsFileFilter) } - .flatMap { it.asSequence() } - .map { NamedInputStream(it.absolutePath, FileInputStream(it)) } - .toList() + .asSequence() + .map { File(it) } + .map { it.listRecursive(ptsFileFilter) } + .flatMap { it.asSequence() } + .map { NamedInputStream(it.absolutePath, FileInputStream(it)) } + .toList() private val ast = parser.parse(ionDocs) private val testExpressions = compiler.compile(ast) @@ -51,7 +54,8 @@ class PtsRunner(private val testClass: Class<*>) : Runner() { testResults.forEach { val testDescription = Description.createTestDescription( testClass, - "${it.test.scriptLocation} ${it.test.id}") + "${it.test.scriptLocation} ${it.test.id}" + ) notifier.fireTestStarted(testDescription) try { @@ -69,11 +73,9 @@ class PtsRunner(private val testClass: Class<*>) : Runner() { notifier.fireTestFailure(Failure(testDescription, AssertionError(errorMessage))) } } - } - catch (e: Exception) { + } catch (e: Exception) { notifier.fireTestFailure(Failure(description, e)) - } - finally { + } finally { notifier.fireTestFinished(testDescription) } } diff --git a/testscript/src/org/partiql/testscript/parser/Parser.kt b/testscript/src/org/partiql/testscript/parser/Parser.kt index 65a6b9c43c..87efdf659b 100644 --- a/testscript/src/org/partiql/testscript/parser/Parser.kt +++ b/testscript/src/org/partiql/testscript/parser/Parser.kt @@ -1,22 +1,33 @@ package org.partiql.testscript.parser -import com.amazon.ion.* +import com.amazon.ion.IonException +import com.amazon.ion.IonString +import com.amazon.ion.IonStruct +import com.amazon.ion.IonSystem +import com.amazon.ion.IonType import org.partiql.testscript.Failure import org.partiql.testscript.Result import org.partiql.testscript.Success import org.partiql.testscript.TestScriptError -import org.partiql.testscript.parser.ast.* +import org.partiql.testscript.parser.ast.AppendTestNode +import org.partiql.testscript.parser.ast.AstNode +import org.partiql.testscript.parser.ast.FileSetDefaultEnvironmentNode +import org.partiql.testscript.parser.ast.InlineSetDefaultEnvironmentNode +import org.partiql.testscript.parser.ast.ModuleNode +import org.partiql.testscript.parser.ast.SetDefaultEnvironmentNode +import org.partiql.testscript.parser.ast.SkipListNode +import org.partiql.testscript.parser.ast.TestNode import org.partiql.testscript.parser.ast.builders.AppendTestBuilder import org.partiql.testscript.parser.ast.builders.ForBuilder import org.partiql.testscript.parser.ast.builders.StructBuilder import org.partiql.testscript.parser.ast.builders.TestBuilder -//TODO replace the validation side of this class by ion-schema if/when https://github.com/amzn/ion-schema-kotlin/issues/120 +// TODO replace the validation side of this class by ion-schema if/when https://github.com/amzn/ion-schema-kotlin/issues/120 // is completed. Ion schema will provide much richer error messages but without the line numbers it's hard to figure // out where a correction must be made /** - * PTS parser. + * PTS parser. */ class Parser(private val ion: IonSystem) { @@ -31,7 +42,7 @@ class Parser(private val ion: IonSystem) { when (result) { is Failure -> errors.addAll(result.errors) is Success -> modules.add(result.value) - } + } } catch (e: IonException) { throw ParserIonException(input.name, e) } @@ -87,28 +98,34 @@ class Parser(private val ion: IonSystem) { } private fun parseSetDefaultEnvironment(reader: IonInputReader): Result { - return when(reader.type) { + return when (reader.type) { IonType.STRUCT -> { val value = reader.ionValueWithLocation() // remove the set_default_environment annotation value.ionValue.clearTypeAnnotations() - Success(InlineSetDefaultEnvironmentNode(value.ionValue as IonStruct, value.scriptLocation)) + Success(InlineSetDefaultEnvironmentNode(value.ionValue as IonStruct, value.scriptLocation)) } IonType.STRING -> { val value = reader.ionValueWithLocation() - Success(FileSetDefaultEnvironmentNode( - (value.ionValue as IonString).stringValue(), - value.scriptLocation)) + Success( + FileSetDefaultEnvironmentNode( + (value.ionValue as IonString).stringValue(), + value.scriptLocation + ) + ) } else -> { - Failure(UnexpectedIonTypeError( + Failure( + UnexpectedIonTypeError( "set_default_environment", listOf(IonType.STRUCT, IonType.STRING), reader.type, - reader.currentScriptLocation())) + reader.currentScriptLocation() + ) + ) } } } @@ -127,11 +144,14 @@ class Parser(private val ion: IonSystem) { if (reader.type == IonType.STRING) { patterns.add(reader.stringValue()) } else { - errors.add(UnexpectedIonTypeError( + errors.add( + UnexpectedIonTypeError( "skip_list[$index]", IonType.STRING, reader.type, - reader.currentScriptLocation())) + reader.currentScriptLocation() + ) + ) } } } @@ -158,27 +178,30 @@ class Parser(private val ion: IonSystem) { } private fun parseTest(reader: IonInputReader): Result = - parseStructFunction(reader, TestBuilder("test", reader.currentScriptLocation())) + parseStructFunction(reader, TestBuilder("test", reader.currentScriptLocation())) private fun parseAppendTest(reader: IonInputReader): Result = - parseStructFunction(reader, AppendTestBuilder(reader.currentScriptLocation())) + parseStructFunction(reader, AppendTestBuilder(reader.currentScriptLocation())) private fun parseFor(reader: IonInputReader): Result> { val location = reader.currentScriptLocation() if (reader.type != IonType.STRUCT) { - return Failure(UnexpectedIonTypeError( + return Failure( + UnexpectedIonTypeError( "for", IonType.STRUCT, reader.type, - location)) + location + ) + ) } val builder = ForBuilder(ion, location) - reader.stepIn { seq -> + reader.stepIn { seq -> seq.forEach { _ -> builder.setValue(reader.fieldName, reader) } } return builder.build() } -} \ No newline at end of file +} diff --git a/testscript/src/org/partiql/testscript/parser/ast/builders/BaseBuilder.kt b/testscript/src/org/partiql/testscript/parser/ast/builders/BaseBuilder.kt index 0b8202db2b..7f4d5fa731 100644 --- a/testscript/src/org/partiql/testscript/parser/ast/builders/BaseBuilder.kt +++ b/testscript/src/org/partiql/testscript/parser/ast/builders/BaseBuilder.kt @@ -2,7 +2,13 @@ package org.partiql.testscript.parser.ast.builders import com.amazon.ion.IonType import org.partiql.testscript.Result -import org.partiql.testscript.parser.* +import org.partiql.testscript.parser.DuplicatedFieldError +import org.partiql.testscript.parser.IonValueWithLocation +import org.partiql.testscript.parser.MissingRequiredError +import org.partiql.testscript.parser.ParserError +import org.partiql.testscript.parser.ScriptLocation +import org.partiql.testscript.parser.UnexpectedFieldError +import org.partiql.testscript.parser.UnexpectedIonTypeError internal abstract class BaseBuilder(val location: ScriptLocation) { protected val errors = mutableListOf() @@ -12,9 +18,7 @@ internal abstract class BaseBuilder(val location: ScriptLocation) { errors.add(MissingRequiredError(label, location)) } } - - - + abstract fun build(): Result } diff --git a/testscript/src/org/partiql/testscript/parser/ast/builders/ForBuilder.kt b/testscript/src/org/partiql/testscript/parser/ast/builders/ForBuilder.kt index 7f6ece99fb..74d578b332 100644 --- a/testscript/src/org/partiql/testscript/parser/ast/builders/ForBuilder.kt +++ b/testscript/src/org/partiql/testscript/parser/ast/builders/ForBuilder.kt @@ -1,15 +1,27 @@ package org.partiql.testscript.parser.ast.builders -import com.amazon.ion.* +import com.amazon.ion.IonStruct +import com.amazon.ion.IonSymbol +import com.amazon.ion.IonSystem +import com.amazon.ion.IonType +import com.amazon.ion.IonValue import org.partiql.testscript.Failure import org.partiql.testscript.Result import org.partiql.testscript.Success import org.partiql.testscript.TestScriptError import org.partiql.testscript.extensions.UndefinedVariableInterpolationException -import org.partiql.testscript.extensions.interpolate import org.partiql.testscript.extensions.crossProduct +import org.partiql.testscript.extensions.interpolate import org.partiql.testscript.extensions.toIonText -import org.partiql.testscript.parser.* +import org.partiql.testscript.parser.EmptyError +import org.partiql.testscript.parser.InvalidTemplateValueError +import org.partiql.testscript.parser.IonInputReader +import org.partiql.testscript.parser.IonValueWithLocation +import org.partiql.testscript.parser.MissingRequiredError +import org.partiql.testscript.parser.MissingTemplateVariableError +import org.partiql.testscript.parser.ScriptLocation +import org.partiql.testscript.parser.UnexpectedFieldError +import org.partiql.testscript.parser.UnexpectedIonTypeError import org.partiql.testscript.parser.ast.TestNode import org.partiql.testscript.parser.ast.TestTemplate import org.partiql.testscript.parser.ast.VariableSet @@ -19,7 +31,7 @@ internal class ForBuilder(private val ion: IonSystem, private val location: Scri private var variableSets: MutableList>? = null private val errors = mutableListOf() - + fun setValue(name: String, reader: IonInputReader) { when (name) { "template" -> { @@ -29,7 +41,7 @@ internal class ForBuilder(private val ion: IonSystem, private val location: Scri } "variable_sets" -> { variableSets = mutableListOf() - + setVariableSet(reader) } else -> errors.add(UnexpectedFieldError("for.$name", reader.currentScriptLocation())) @@ -39,10 +51,13 @@ internal class ForBuilder(private val ion: IonSystem, private val location: Scri private fun setTemplate(reader: IonInputReader) { if (reader.type != IonType.LIST) { val error = Failure( - UnexpectedIonTypeError("for.template", - IonType.LIST, - reader.type, - reader.currentScriptLocation())) + UnexpectedIonTypeError( + "for.template", + IonType.LIST, + reader.type, + reader.currentScriptLocation() + ) + ) templates!!.add(error) } else { reader.stepIn() @@ -72,11 +87,13 @@ internal class ForBuilder(private val ion: IonSystem, private val location: Scri private fun setVariableSet(reader: IonInputReader) { if (reader.type != IonType.LIST) { val error = Failure( - UnexpectedIonTypeError( - "for.variable_sets", - IonType.LIST, - reader.type, - reader.currentScriptLocation())) + UnexpectedIonTypeError( + "for.variable_sets", + IonType.LIST, + reader.type, + reader.currentScriptLocation() + ) + ) variableSets!!.add(error) } else { @@ -87,11 +104,14 @@ internal class ForBuilder(private val ion: IonSystem, private val location: Scri val value = reader.ionValueWithLocation() Success(VariableSet(value.ionValue as IonStruct, value.scriptLocation)) } else { - Failure(UnexpectedIonTypeError( + Failure( + UnexpectedIonTypeError( "variable_sets[$index]", IonType.STRUCT, reader.type, - reader.currentScriptLocation())) + reader.currentScriptLocation() + ) + ) } variableSets!!.add(result) @@ -118,10 +138,10 @@ internal class ForBuilder(private val ion: IonSystem, private val location: Scri val testBuilder = TestBuilder("for.template", location) - val id = IonValueWithLocation( - ion.newSymbol("${testTemplate.id}\$\$${variableSet.variables.toIonText()}"), - location) + ion.newSymbol("${testTemplate.id}\$\$${variableSet.variables.toIonText()}"), + location + ) testBuilder.setValue("id", id) @@ -189,8 +209,8 @@ internal class ForBuilder(private val ion: IonSystem, private val location: Scri val (validVariableSets, variableSetsWithErrors) = variableSets!!.partition { it is Success } templatesWithErrors.map { it as Failure } - .union(variableSetsWithErrors.map { it as Failure }) - .forEach { errors.addAll(it.errors) } + .union(variableSetsWithErrors.map { it as Failure }) + .forEach { errors.addAll(it.errors) } val testNodes = mutableListOf() validTemplates.crossProduct(validVariableSets).forEach { @@ -213,15 +233,14 @@ internal class ForBuilder(private val ion: IonSystem, private val location: Scri private fun interpolate(target: IonValue, variableSet: VariableSet): Result = try { Success(target.interpolate(variableSet.variables)) - } - catch (e: UndefinedVariableInterpolationException) { + } catch (e: UndefinedVariableInterpolationException) { Failure(MissingTemplateVariableError(e.variableName, variableSet.scriptLocation)) } } -private class TestTemplateBuilder(path: String, location: ScriptLocation) : - StructBuilder(path, location) { - +private class TestTemplateBuilder(path: String, location: ScriptLocation) : + StructBuilder(path, location) { + private fun validateTemplate(label: String, field: IonValueWithLocation?, expected: IonType) { if (field == null) { return @@ -237,14 +256,14 @@ private class TestTemplateBuilder(path: String, location: ScriptLocation) : } override fun build(): Result { - val id = fieldMap.remove("id") - val statement = fieldMap.remove("statement") - val expected = fieldMap.remove("expected") - val description = fieldMap.remove("description") + val id = fieldMap.remove("id") + val statement = fieldMap.remove("statement") + val expected = fieldMap.remove("expected") + val description = fieldMap.remove("description") val environment = fieldMap.remove("environment") validateUnexpectedFields() - + validateRequired("$path.id", id) validateType("$path.id", id, IonType.SYMBOL) @@ -259,13 +278,16 @@ private class TestTemplateBuilder(path: String, location: ScriptLocation) : validateTemplate("$path.expected", expected, IonType.SEXP) return if (errors.isEmpty()) { - Success(TestTemplate( + Success( + TestTemplate( id = (id!!.ionValue as IonSymbol).stringValue(), description = description?.ionValue, statement = statement!!.ionValue, environment = environment?.ionValue, expected = expected!!.ionValue, - scriptLocation = location)) + scriptLocation = location + ) + ) } else { Failure(errors) } diff --git a/testscript/src/org/partiql/testscript/parser/ast/builders/TestBuilder.kt b/testscript/src/org/partiql/testscript/parser/ast/builders/TestBuilder.kt index e2bf3a1738..1ed8033740 100644 --- a/testscript/src/org/partiql/testscript/parser/ast/builders/TestBuilder.kt +++ b/testscript/src/org/partiql/testscript/parser/ast/builders/TestBuilder.kt @@ -1,10 +1,20 @@ package org.partiql.testscript.parser.ast.builders -import com.amazon.ion.* +import com.amazon.ion.IonSexp +import com.amazon.ion.IonString +import com.amazon.ion.IonStruct +import com.amazon.ion.IonSymbol +import com.amazon.ion.IonType import org.partiql.testscript.Failure import org.partiql.testscript.Result import org.partiql.testscript.Success -import org.partiql.testscript.parser.* +import org.partiql.testscript.parser.EmptyError +import org.partiql.testscript.parser.InvalidExpectedErrorSizeError +import org.partiql.testscript.parser.InvalidExpectedSuccessSizeError +import org.partiql.testscript.parser.InvalidExpectedTagError +import org.partiql.testscript.parser.IonValueWithLocation +import org.partiql.testscript.parser.ScriptLocation +import org.partiql.testscript.parser.UnexpectedIonTypeError import org.partiql.testscript.parser.ast.TestNode internal class TestBuilder(path: String, location: ScriptLocation) : StructBuilder(path, location) { @@ -59,15 +69,18 @@ internal class TestBuilder(path: String, location: ScriptLocation) : StructBuild validateExpected(expected) return if (errors.isEmpty()) { - Success(TestNode( + Success( + TestNode( id = (id!!.ionValue as IonSymbol).stringValue(), description = (description?.ionValue as IonString?)?.stringValue(), statement = (statement!!.ionValue as IonString).stringValue(), environment = environment?.ionValue as IonStruct?, expected = expected!!.ionValue as IonSexp, - scriptLocation = location)) + scriptLocation = location + ) + ) } else { Failure(errors) } } -} \ No newline at end of file +} diff --git a/testscript/test/org/partiql/testscript/parser/ParserTests.kt b/testscript/test/org/partiql/testscript/parser/ParserTests.kt index c792f2cbc6..eb4ffd0ddc 100644 --- a/testscript/test/org/partiql/testscript/parser/ParserTests.kt +++ b/testscript/test/org/partiql/testscript/parser/ParserTests.kt @@ -3,7 +3,12 @@ package org.partiql.testscript.parser import com.amazon.ion.IonSexp import com.amazon.ion.IonStruct import org.junit.jupiter.api.Test -import org.partiql.testscript.parser.ast.* +import org.partiql.testscript.parser.ast.AppendTestNode +import org.partiql.testscript.parser.ast.FileSetDefaultEnvironmentNode +import org.partiql.testscript.parser.ast.InlineSetDefaultEnvironmentNode +import org.partiql.testscript.parser.ast.ModuleNode +import org.partiql.testscript.parser.ast.SkipListNode +import org.partiql.testscript.parser.ast.TestNode class ParserTests : BaseParseTests() { @@ -11,9 +16,10 @@ class ParserTests : BaseParseTests() { * We use `#` instead of `$` in test fixtures because escaping `$` in a kotlin * multiline string is messy, e.g. `"""${"$"}"""` results in `"$"` */ - + @Test - fun singleFullTest() = assertParse(""" + fun singleFullTest() = assertParse( + """ |test::{ | id: test_1, | description: "a test", @@ -21,60 +27,99 @@ class ParserTests : BaseParseTests() { | environment: {myTable: [{a: 1}]}, | expected: (success (bag {a: 1})) |}""".trimMargin(), - expected = singleModulesList(TestNode(id = "test_1", - description = "a test", - statement = "SELECT * FROM myTable", - environment = ion.singleValue("{myTable: [{a: 1}]}") as IonStruct, - expected = ion.singleValue("(success (bag {a: 1}))") as IonSexp, - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", - 1)))) - - @Test - fun singleTestWithRequiredOnly() = assertParse(""" + expected = singleModulesList( + TestNode( + id = "test_1", + description = "a test", + statement = "SELECT * FROM myTable", + environment = ion.singleValue("{myTable: [{a: 1}]}") as IonStruct, + expected = ion.singleValue("(success (bag {a: 1}))") as IonSexp, + scriptLocation = ScriptLocation( + "$inputBasePath/input[0].sqlts", + 1 + ) + ) + ) + ) + + @Test + fun singleTestWithRequiredOnly() = assertParse( + """ |test::{ | id: test_1, | statement: "SELECT * FROM myTable", | expected: (success (bag {a: 1})) |}""".trimMargin(), - expected = singleModulesList(TestNode(id = "test_1", - description = null, - statement = "SELECT * FROM myTable", - environment = null, - expected = ion.singleValue("(success (bag {a: 1}))") as IonSexp, - scriptLocation = ScriptLocation( - "$inputBasePath/input[0].sqlts", - 1)))) - - @Test - fun singleSetDefaultEnvironmentNode() = assertParse("set_default_environment::{ foo: [1,2,3,4,5] }", - expected = singleModulesList( - InlineSetDefaultEnvironmentNode(ion.singleValue("{ foo: [1,2,3,4,5] }") as IonStruct, - ScriptLocation("$inputBasePath/input[0].sqlts", 1)))) - - @Test - fun singleSetDefaultEnvironmentFromFileNode() = assertParse("set_default_environment::\"some_file.ion\"", - expected = singleModulesList( - FileSetDefaultEnvironmentNode("some_file.ion", - ScriptLocation("$inputBasePath/input[0].sqlts", 1)))) - - @Test - fun singleSkipList() = assertParse("""skip_list::[ "test_1", "test_2" ]""", - expected = singleModulesList(SkipListNode(listOf("test_1", "test_2"), - ScriptLocation("$inputBasePath/input[0].sqlts", 1)))) - - @Test - fun singleAppendTest() = assertParse(""" + expected = singleModulesList( + TestNode( + id = "test_1", + description = null, + statement = "SELECT * FROM myTable", + environment = null, + expected = ion.singleValue("(success (bag {a: 1}))") as IonSexp, + scriptLocation = ScriptLocation( + "$inputBasePath/input[0].sqlts", + 1 + ) + ) + ) + ) + + @Test + fun singleSetDefaultEnvironmentNode() = assertParse( + "set_default_environment::{ foo: [1,2,3,4,5] }", + expected = singleModulesList( + InlineSetDefaultEnvironmentNode( + ion.singleValue("{ foo: [1,2,3,4,5] }") as IonStruct, + ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ) + ) + ) + + @Test + fun singleSetDefaultEnvironmentFromFileNode() = assertParse( + "set_default_environment::\"some_file.ion\"", + expected = singleModulesList( + FileSetDefaultEnvironmentNode( + "some_file.ion", + ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ) + ) + ) + + @Test + fun singleSkipList() = assertParse( + """skip_list::[ "test_1", "test_2" ]""", + expected = singleModulesList( + SkipListNode( + listOf("test_1", "test_2"), + ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ) + ) + ) + + @Test + fun singleAppendTest() = assertParse( + """ |append_test::{ | pattern: "test.*", | additional_data: { foo: 1, bar: {} } |}""".trimMargin(), - expected = singleModulesList(AppendTestNode(pattern = "test.*", - additionalData = ion.singleValue("{ foo: 1, bar: {} }") as IonStruct, - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", - 1)))) - - @Test - fun allFunctions() = assertParse(""" + expected = singleModulesList( + AppendTestNode( + pattern = "test.*", + additionalData = ion.singleValue("{ foo: 1, bar: {} }") as IonStruct, + scriptLocation = ScriptLocation( + "$inputBasePath/input[0].sqlts", + 1 + ) + ) + ) + ) + + @Test + fun allFunctions() = assertParse( + """ |set_default_environment::{ foo: [1,2,3,4,5] } | |test::{ @@ -104,50 +149,88 @@ class ParserTests : BaseParseTests() { | ] |} """.trimMargin(), - expected = singleModulesList(InlineSetDefaultEnvironmentNode(ion.singleValue("{ foo: [1,2,3,4,5] }") as IonStruct, - ScriptLocation("$inputBasePath/input[0].sqlts", - 1)), - - TestNode(id = "test_1", - description = null, - statement = "SELECT * FROM myTable", - environment = null, - expected = ion.singleValue("(success (bag {a: 1}))") as IonSexp, - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", - 3)), - - SkipListNode(listOf("test_1"), - ScriptLocation("$inputBasePath/input[0].sqlts", 9)), - - AppendTestNode(pattern = "test.*", - additionalData = ion.singleValue("{ foo: 1, bar: {} }") as IonStruct, - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", - 11)), - - TestNode(id = "testTemplate\$\${value:1,expected:(success 2)}", - description = null, - statement = "1 + 1", - environment = null, - expected = ion.singleValue("(success 2)") as IonSexp, - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", - 26)))) - - @Test - fun multipleDocuments() = assertParse("set_default_environment::{ foo: [1] }", - "set_default_environment::{ foo: [2] }", - expected = listOf(ModuleNode(listOf(InlineSetDefaultEnvironmentNode( - ion.singleValue("{ foo: [1] }") as IonStruct, - ScriptLocation("$inputBasePath/input[0].sqlts", 1))), - ScriptLocation("$inputBasePath/input[0].sqlts", 0)), - - ModuleNode(listOf(InlineSetDefaultEnvironmentNode(ion.singleValue( - "{ foo: [2] }") as IonStruct, - ScriptLocation("$inputBasePath/input[1].sqlts", 1))), - ScriptLocation("$inputBasePath/input[1].sqlts", 0)))) + expected = singleModulesList( + InlineSetDefaultEnvironmentNode( + ion.singleValue("{ foo: [1,2,3,4,5] }") as IonStruct, + ScriptLocation( + "$inputBasePath/input[0].sqlts", + 1 + ) + ), + + TestNode( + id = "test_1", + description = null, + statement = "SELECT * FROM myTable", + environment = null, + expected = ion.singleValue("(success (bag {a: 1}))") as IonSexp, + scriptLocation = ScriptLocation( + "$inputBasePath/input[0].sqlts", + 3 + ) + ), + + SkipListNode( + listOf("test_1"), + ScriptLocation("$inputBasePath/input[0].sqlts", 9) + ), + + AppendTestNode( + pattern = "test.*", + additionalData = ion.singleValue("{ foo: 1, bar: {} }") as IonStruct, + scriptLocation = ScriptLocation( + "$inputBasePath/input[0].sqlts", + 11 + ) + ), + + TestNode( + id = "testTemplate\$\${value:1,expected:(success 2)}", + description = null, + statement = "1 + 1", + environment = null, + expected = ion.singleValue("(success 2)") as IonSexp, + scriptLocation = ScriptLocation( + "$inputBasePath/input[0].sqlts", + 26 + ) + ) + ) + ) + + @Test + fun multipleDocuments() = assertParse( + "set_default_environment::{ foo: [1] }", + "set_default_environment::{ foo: [2] }", + expected = listOf( + ModuleNode( + listOf( + InlineSetDefaultEnvironmentNode( + ion.singleValue("{ foo: [1] }") as IonStruct, + ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ) + ), + ScriptLocation("$inputBasePath/input[0].sqlts", 0) + ), + + ModuleNode( + listOf( + InlineSetDefaultEnvironmentNode( + ion.singleValue( + "{ foo: [2] }" + ) as IonStruct, + ScriptLocation("$inputBasePath/input[1].sqlts", 1) + ) + ), + ScriptLocation("$inputBasePath/input[1].sqlts", 0) + ) + ) + ) @Test fun testMissingId() = - assertParseError(input = """ + assertParseError( + input = """ |test::{ | description: "a test", | statement: "SELECT * FROM myTable", @@ -155,14 +238,16 @@ class ParserTests : BaseParseTests() { | expected: (success (bag {a: 1})) |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Missing required field: test.id - """.trimMargin()) + """.trimMargin() + ) @Test fun testMissingStatement() = - assertParseError(input = """ + assertParseError( + input = """ |test::{ | id: test_1, | description: "a test", @@ -170,14 +255,16 @@ class ParserTests : BaseParseTests() { | expected: (success (bag {a: 1})) |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Missing required field: test.statement - """.trimMargin()) + """.trimMargin() + ) @Test fun testMissingExpected() = - assertParseError(input = """ + assertParseError( + input = """ |test::{ | id: test_1, | description: "a test", @@ -185,37 +272,43 @@ class ParserTests : BaseParseTests() { | environment: {myTable: [{a: 1}]}, |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Missing required field: test.expected - """.trimMargin()) + """.trimMargin() + ) @Test fun testMissingAllRequired() = - assertParseError(input = """ + assertParseError( + input = """ |test::{ | description: "a test", | environment: {myTable: [{a: 1}]}, |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Missing required field: test.expected | $inputBasePath/input[0].sqlts:1 - Missing required field: test.id | $inputBasePath/input[0].sqlts:1 - Missing required field: test.statement - """.trimMargin()) + """.trimMargin() + ) @Test fun testWrongType() = - assertParseError(input = """test::"should be a struct" """, - expectedErrorMessage = """ + assertParseError( + input = """test::"should be a struct" """, + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Wrong type for test. Expected [STRUCT], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test fun testWrongIdType() = - assertParseError(input = """ + assertParseError( + input = """ |test::{ | id: "should be symbol", | description: "a test", @@ -224,14 +317,16 @@ class ParserTests : BaseParseTests() { | expected: (success (bag {a: 1})) |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:2 - Wrong type for test.id. Expected [SYMBOL], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test fun testWrongDescriptionType() = - assertParseError(input = """ + assertParseError( + input = """ |test::{ | id: test_1, | description: should_be_text, @@ -240,14 +335,16 @@ class ParserTests : BaseParseTests() { | expected: (success (bag {a: 1})) |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:3 - Wrong type for test.description. Expected [STRING], got SYMBOL - """.trimMargin()) + """.trimMargin() + ) @Test fun testWrongStatementType() = - assertParseError(input = """ + assertParseError( + input = """ |test::{ | id: test_1, | description: "a test", @@ -256,14 +353,16 @@ class ParserTests : BaseParseTests() { | expected: (success (bag {a: 1})) |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:4 - Wrong type for test.statement. Expected [STRING], got SYMBOL - """.trimMargin()) + """.trimMargin() + ) @Test fun testWrongEnvironmentType() = - assertParseError(input = """ + assertParseError( + input = """ |test::{ | id: test_1, | description: "a test", @@ -272,14 +371,16 @@ class ParserTests : BaseParseTests() { | expected: (success (bag {a: 1})) |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:5 - Wrong type for test.environment. Expected [STRUCT], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test fun testExpectedWrongType() = - assertParseError(input = """ + assertParseError( + input = """ |test::{ | id: test_1, | description: "a test", @@ -288,14 +389,16 @@ class ParserTests : BaseParseTests() { | expected: "should be a s-exp" |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:6 - Wrong type for test.expected. Expected [SEXP], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test fun testUnknownField() = - assertParseError(input = """ + assertParseError( + input = """ |test::{ | id: test_1, | description: "a test", @@ -305,14 +408,16 @@ class ParserTests : BaseParseTests() { | shouldNotBeHere: 1 |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:7 - Unexpected field: test.shouldNotBeHere - """.trimMargin()) + """.trimMargin() + ) @Test fun testWrongExpectedValue() = - assertParseError(input = """ + assertParseError( + input = """ |test::{ | id: test_1, | description: "a test", @@ -321,14 +426,16 @@ class ParserTests : BaseParseTests() { | expected: (invalid) |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:6 - Invalid test.expected tag, must be either 'success' or 'error' got 'invalid' - """.trimMargin()) + """.trimMargin() + ) @Test fun testExpectedErrorWithElements() = - assertParseError(input = """ + assertParseError( + input = """ |test::{ | id: test_1, | description: "a test", @@ -337,14 +444,16 @@ class ParserTests : BaseParseTests() { | expected: (error a) |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:6 - test.expected error can only have a single element, e.g. (error) - """.trimMargin()) + """.trimMargin() + ) @Test fun testExpectedSuccessEmpty() = - assertParseError(input = """ + assertParseError( + input = """ |test::{ | id: test_1, | description: "a test", @@ -353,14 +462,16 @@ class ParserTests : BaseParseTests() { | expected: (success) |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:6 - test.expected success must have two elements, e.g. (success (bag {a: 1})) - """.trimMargin()) + """.trimMargin() + ) @Test fun testExpectedSuccessWithTooManyElements() = - assertParseError(input = """ + assertParseError( + input = """ |test::{ | id: test_1, | description: "a test", @@ -369,100 +480,118 @@ class ParserTests : BaseParseTests() { | expected: (success a b) |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:6 - test.expected success must have two elements, e.g. (success (bag {a: 1})) - """.trimMargin()) + """.trimMargin() + ) @Test fun setDefaultEnvironmentWrongType() = - assertParseError(input = """ set_default_environment::'should be a struct or string' """, - expectedErrorMessage = """ + assertParseError( + input = """ set_default_environment::'should be a struct or string' """, + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Wrong type for set_default_environment. Expected [STRUCT, STRING], got SYMBOL - """.trimMargin()) + """.trimMargin() + ) @Test fun skipListWrongType() = - assertParseError(input = """ skip_list::"should be a list" """, - expectedErrorMessage = """ + assertParseError( + input = """ skip_list::"should be a list" """, + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Wrong type for skip_list. Expected [LIST], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test fun skipListWrongElementType() = - assertParseError(input = """ skip_list::["test_1", 'should be a string'] """, - expectedErrorMessage = """ + assertParseError( + input = """ skip_list::["test_1", 'should be a string'] """, + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Wrong type for skip_list[1]. Expected [STRING], got SYMBOL - """.trimMargin()) + """.trimMargin() + ) @Test fun appendTestWrongType() = - assertParseError(input = """ append_test::"should be a struct" """, - expectedErrorMessage = """ + assertParseError( + input = """ append_test::"should be a struct" """, + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Wrong type for append_test. Expected [STRUCT], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test fun appendTestMissingPattern() = - assertParseError(input = """ + assertParseError( + input = """ |append_test::{ | additional_data: {} |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Missing required field: append_test.pattern - """.trimMargin()) + """.trimMargin() + ) @Test fun appendTestMissingAdditionalData() = - assertParseError(input = """ + assertParseError( + input = """ |append_test::{ | pattern: "test", |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Missing required field: append_test.additional_data - """.trimMargin()) + """.trimMargin() + ) @Test fun appendTestWrongPatternType() = - assertParseError(input = """ + assertParseError( + input = """ |append_test::{ | pattern: 'should be a string', | additional_data: {} |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:2 - Wrong type for append_test.pattern. Expected [STRING], got SYMBOL - """.trimMargin()) + """.trimMargin() + ) @Test fun appendTestWrongAdditionalDataType() = - assertParseError(input = """ + assertParseError( + input = """ |append_test::{ | pattern: "test", | additional_data: "should be a struct" |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:3 - Wrong type for append_test.additional_data. Expected [STRUCT], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test fun appendTestUnknownField() = - assertParseError(input = """ + assertParseError( + input = """ |append_test::{ | pattern: "test", | additional_data: {}, | shouldNotBeHere: 1 |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:4 - Unexpected field: append_test.shouldNotBeHere - """.trimMargin()) - -} \ No newline at end of file + """.trimMargin() + ) +} From d88b7a45643aa305e98ae448ab6aba89e48344af Mon Sep 17 00:00:00 2001 From: Alan Cai Date: Wed, 2 Mar 2022 16:48:27 -0800 Subject: [PATCH 6/8] Exclude partiql-domain.kt from ktlint checks --- lang/build.gradle | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/lang/build.gradle b/lang/build.gradle index d5f7e67dff..67fb3f387a 100644 --- a/lang/build.gradle +++ b/lang/build.gradle @@ -66,6 +66,8 @@ sourceSets { } } +def partiqlDomainFile = 'partiql-domains.kt' + task generatePigDomains { group = "code generation" @@ -73,7 +75,7 @@ task generatePigDomains { // https://github.com/partiql/partiql-ir-generator/issues/5 def typeUniverse = new File(projectDir, "resources/org/partiql/type-domains/partiql.ion") - def outputFile = new File(projectDir, "src/org/partiql/lang/domains/partiql-domains.kt") + def outputFile = new File(projectDir, "src/org/partiql/lang/domains/$partiqlDomainFile") def targetLanguage = new TargetLanguage.Kotlin("org.partiql.lang.domains") def cmd = new Command.Generate(typeUniverse, outputFile, targetLanguage) @@ -93,6 +95,11 @@ task generatePigDomains { compileKotlin.dependsOn generatePigDomains +ktlint { + filter { + exclude("**/$partiqlDomainFile") + } +} dokka { outputFormat = "html" From 5df233ce366c293ab89bd581dab8335852e51d52 Mon Sep 17 00:00:00 2001 From: Alan Cai Date: Wed, 2 Mar 2022 17:45:42 -0800 Subject: [PATCH 7/8] Resolve lang/ manual style checks --- .../lang/{Exceptions.kt => SqlException.kt} | 24 +- ...V0AstSserializer.kt => V0AstSerializer.kt} | 0 .../lang/eval/NaturalExprValueComparators.kt | 32 ++- .../StaticTypeInferenceVisitorTransform.kt | 245 +++++++++------- .../partiql/lang/mappers/IonSchemaMapper.kt | 129 ++++++--- ...Exceptions.kt => TypeNotFoundException.kt} | 2 +- .../org/partiql/lang/eval/CastTestBase.kt | 243 ++++++++-------- ...ngCompilerCustomAnyOfTypeOperationTests.kt | 130 +++++---- .../lang/eval/builtins/TimestampParserTest.kt | 264 ++++++++++-------- .../TimestampTemporalAccessorTests.kt | 52 ++-- .../TimestampFormatPatternParserTest.kt | 52 ++-- ...electJoinTests.kt => SqlParserJoinTest.kt} | 94 ++++--- ...tsProvider.kt => ArgumentsProviderBase.kt} | 0 13 files changed, 725 insertions(+), 542 deletions(-) rename lang/src/org/partiql/lang/{Exceptions.kt => SqlException.kt} (87%) rename lang/src/org/partiql/lang/ast/passes/{V0AstSserializer.kt => V0AstSerializer.kt} (100%) rename lang/src/org/partiql/lang/mappers/{Exceptions.kt => TypeNotFoundException.kt} (87%) rename lang/test/org/partiql/lang/syntax/{SqlParserSelectJoinTests.kt => SqlParserJoinTest.kt} (87%) rename lang/test/org/partiql/lang/util/{ArgumentsProvider.kt => ArgumentsProviderBase.kt} (100%) diff --git a/lang/src/org/partiql/lang/Exceptions.kt b/lang/src/org/partiql/lang/SqlException.kt similarity index 87% rename from lang/src/org/partiql/lang/Exceptions.kt rename to lang/src/org/partiql/lang/SqlException.kt index 5ad0a7cf0e..a0843ab4cf 100644 --- a/lang/src/org/partiql/lang/Exceptions.kt +++ b/lang/src/org/partiql/lang/SqlException.kt @@ -40,12 +40,13 @@ import org.partiql.lang.errors.UNKNOWN * [SqlException]. This is the constructor for the second configuration explained above. * */ -open class SqlException(override var message: String, - val errorCode: ErrorCode, - val errorContext: PropertyValueMap? = null, - cause: Throwable? = null) - : RuntimeException(message, cause) { - +open class SqlException( + override var message: String, + val errorCode: ErrorCode, + val errorContext: PropertyValueMap? = null, + cause: Throwable? = null +) : + RuntimeException(message, cause) { /** * Given the [errorCode], error context as a [propertyValueMap] and optional [cause] creates an @@ -57,7 +58,7 @@ open class SqlException(override var message: String, * @param cause for this exception */ constructor(errorCode: ErrorCode, propertyValueMap: PropertyValueMap, cause: Throwable? = null) : - this("",errorCode, propertyValueMap, cause) + this("", errorCode, propertyValueMap, cause) /** * Auto-generated message has the structure @@ -77,10 +78,10 @@ open class SqlException(override var message: String, * */ fun generateMessage(): String = - "${errorCategory(errorCode)}: ${errorLocation(errorContext)}: ${errorMessage(errorCode, errorContext)}" + "${errorCategory(errorCode)}: ${errorLocation(errorContext)}: ${errorMessage(errorCode, errorContext)}" - private fun errorMessage(errorCode: ErrorCode?, propertyValueMap: PropertyValueMap?): String = - errorCode?.getErrorMessage(propertyValueMap) ?: UNKNOWN + private fun errorMessage(errorCode: ErrorCode?, propertyValueMap: PropertyValueMap?): String = + errorCode?.getErrorMessage(propertyValueMap) ?: UNKNOWN private fun errorLocation(propertyValueMap: PropertyValueMap?): String { val lineNo = propertyValueMap?.get(Property.LINE_NUMBER)?.longValue() @@ -90,7 +91,7 @@ open class SqlException(override var message: String, } private fun errorCategory(errorCode: ErrorCode?): String = - errorCode?.errorCategory() ?: UNKNOWN + errorCode?.errorCategory() ?: UNKNOWN override fun toString(): String = when (this.message.isNotBlank()) { @@ -109,4 +110,3 @@ open class SqlException(override var message: String, } } } - diff --git a/lang/src/org/partiql/lang/ast/passes/V0AstSserializer.kt b/lang/src/org/partiql/lang/ast/passes/V0AstSerializer.kt similarity index 100% rename from lang/src/org/partiql/lang/ast/passes/V0AstSserializer.kt rename to lang/src/org/partiql/lang/ast/passes/V0AstSerializer.kt diff --git a/lang/src/org/partiql/lang/eval/NaturalExprValueComparators.kt b/lang/src/org/partiql/lang/eval/NaturalExprValueComparators.kt index 6169f96c99..4828544e89 100644 --- a/lang/src/org/partiql/lang/eval/NaturalExprValueComparators.kt +++ b/lang/src/org/partiql/lang/eval/NaturalExprValueComparators.kt @@ -16,7 +16,11 @@ package org.partiql.lang.eval import org.partiql.lang.errors.ErrorCode import org.partiql.lang.eval.NaturalExprValueComparators.NullOrder -import org.partiql.lang.util.* +import org.partiql.lang.util.compareTo +import org.partiql.lang.util.isNaN +import org.partiql.lang.util.isNegInf +import org.partiql.lang.util.isPosInf +import org.partiql.lang.util.isZero /** * Provides a total, natural ordering over [ExprValue]. This ordering is consistent with @@ -73,9 +77,11 @@ enum class NaturalExprValueComparators(private val nullOrder: NullOrder) : Compa * the left type is the specified condition and the right type isn't this implies * that the left value is less than the right and vice versa. */ - private inline fun handle(leftTypeCond: Boolean, - rightTypeCond: Boolean, - sameTypeHandler: () -> Int): Int? = when { + private inline fun handle( + leftTypeCond: Boolean, + rightTypeCond: Boolean, + sameTypeHandler: () -> Int + ): Int? = when { leftTypeCond && rightTypeCond -> sameTypeHandler() leftTypeCond -> LESS @@ -83,15 +89,17 @@ enum class NaturalExprValueComparators(private val nullOrder: NullOrder) : Compa else -> null } - private inline fun ifCompared(value: Int?, handler: (Int) -> Unit): Unit { + private inline fun ifCompared(value: Int?, handler: (Int) -> Unit) { if (value != null) { handler(value) } } - private fun compareOrdered(left: Iterable, - right: Iterable, - comparator: Comparator): Int { + private fun compareOrdered( + left: Iterable, + right: Iterable, + comparator: Comparator + ): Int { val lIter = left.iterator() val rIter = right.iterator() @@ -111,9 +119,11 @@ enum class NaturalExprValueComparators(private val nullOrder: NullOrder) : Compa } } - private fun compareUnordered(left: Iterable, - right: Iterable, - entityCmp: Comparator): Int { + private fun compareUnordered( + left: Iterable, + right: Iterable, + entityCmp: Comparator + ): Int { val pairCmp = object : Comparator> { override fun compare(o1: Pair, o2: Pair): Int { val cmp = entityCmp.compare(o1.first, o2.first) diff --git a/lang/src/org/partiql/lang/eval/visitors/StaticTypeInferenceVisitorTransform.kt b/lang/src/org/partiql/lang/eval/visitors/StaticTypeInferenceVisitorTransform.kt index 6c6bcfbf09..9f434867a4 100644 --- a/lang/src/org/partiql/lang/eval/visitors/StaticTypeInferenceVisitorTransform.kt +++ b/lang/src/org/partiql/lang/eval/visitors/StaticTypeInferenceVisitorTransform.kt @@ -33,7 +33,27 @@ import org.partiql.lang.eval.ExprValueType import org.partiql.lang.eval.builtins.createBuiltinFunctionSignatures import org.partiql.lang.eval.delegate import org.partiql.lang.eval.getStartingSourceLocationMeta -import org.partiql.lang.types.* +import org.partiql.lang.types.AnyOfType +import org.partiql.lang.types.AnyType +import org.partiql.lang.types.BagType +import org.partiql.lang.types.BoolType +import org.partiql.lang.types.CollectionType +import org.partiql.lang.types.DecimalType +import org.partiql.lang.types.FloatType +import org.partiql.lang.types.FunctionSignature +import org.partiql.lang.types.IntType +import org.partiql.lang.types.ListType +import org.partiql.lang.types.MissingType +import org.partiql.lang.types.NullType +import org.partiql.lang.types.NumberConstraint +import org.partiql.lang.types.SexpType +import org.partiql.lang.types.SingleType +import org.partiql.lang.types.StaticType +import org.partiql.lang.types.StringType +import org.partiql.lang.types.StructType +import org.partiql.lang.types.TypedOpParameter +import org.partiql.lang.types.UnknownArguments +import org.partiql.lang.types.toTypedOpParameter import org.partiql.lang.util.cartesianProduct /** @@ -79,8 +99,10 @@ internal class StaticTypeInferenceVisitorTransform( * - 1 is the top-most statement with a `FROM` clause (i.e. select-from-where or DML operation), * - Values > 1 are for each subsequent level of nested sub-query. */ - private inner class VisitorTransform(private val parentEnv: Bindings, - private val currentScopeDepth: Int) : VisitorTransformBase() { + private inner class VisitorTransform( + private val parentEnv: Bindings, + private val currentScopeDepth: Int + ) : VisitorTransformBase() { /** Specifies the current scope search order--default is LEXICAL. */ private var scopeOrder = ScopeSearchOrder.LEXICAL @@ -201,8 +223,10 @@ internal class StaticTypeInferenceVisitorTransform( private fun addLocal(name: String, type: StaticType) { val existing = localsOnlyEnv[BindingName(name, BindingCase.INSENSITIVE)] if (existing != null) { - TODO("A variable named '$name' was already defined in this scope. " + - "This wouldn't be the case if StaticTypeVisitorTransform was executed first.") + TODO( + "A variable named '$name' was already defined in this scope. " + + "This wouldn't be the case if StaticTypeVisitorTransform was executed first." + ) } localsMap[name] = type // this requires a new instance because of how [Bindings.ofMap] works @@ -237,8 +261,10 @@ internal class StaticTypeInferenceVisitorTransform( override fun transformExprId(node: PartiqlAst.Expr.Id): PartiqlAst.Expr { val bindingName = BindingName(node.name.text, node.case.toBindingCase()) - val foundType = findBind(bindingName, node.qualifier) ?: error("No such variable named ${node.name.text}. " + - "This wouldn't be the case if StaticTypeVisitorTransform was executed first.") + val foundType = findBind(bindingName, node.qualifier) ?: error( + "No such variable named ${node.name.text}. " + + "This wouldn't be the case if StaticTypeVisitorTransform was executed first." + ) return node.withStaticType(foundType) } @@ -403,8 +429,8 @@ internal class StaticTypeInferenceVisitorTransform( val nAry = super.transformExprNot(node) as PartiqlAst.Expr.Not val args = listOf(nAry.expr) return if (hasValidOperandTypes(args, { it is BoolType }, "NOT", nAry.metas)) { - transformNAry(nAry, args) { recurseForNAryOperations(nAry, it, ::getTypeForNAryLogicalOperations) } - } else { + transformNAry(nAry, args) { recurseForNAryOperations(nAry, it, ::getTypeForNAryLogicalOperations) } + } else { nAry.withStaticType(StaticType.BOOL) } } @@ -412,8 +438,8 @@ internal class StaticTypeInferenceVisitorTransform( override fun transformExprAnd(node: PartiqlAst.Expr.And): PartiqlAst.Expr { val nAry = super.transformExprAnd(node) as PartiqlAst.Expr.And return if (hasValidOperandTypes(nAry.operands, { it is BoolType }, "AND", nAry.metas)) { - transformNAry(nAry, nAry.operands) { recurseForNAryOperations(nAry, it, ::getTypeForNAryLogicalOperations) } - } else { + transformNAry(nAry, nAry.operands) { recurseForNAryOperations(nAry, it, ::getTypeForNAryLogicalOperations) } + } else { nAry.withStaticType(StaticType.BOOL) } } @@ -421,8 +447,8 @@ internal class StaticTypeInferenceVisitorTransform( override fun transformExprOr(node: PartiqlAst.Expr.Or): PartiqlAst.Expr { val nAry = super.transformExprOr(node) as PartiqlAst.Expr.Or return if (hasValidOperandTypes(nAry.operands, { it is BoolType }, "OR", nAry.metas)) { - transformNAry(nAry, nAry.operands) { recurseForNAryOperations(nAry, it, ::getTypeForNAryLogicalOperations) } - } else { + transformNAry(nAry, nAry.operands) { recurseForNAryOperations(nAry, it, ::getTypeForNAryLogicalOperations) } + } else { nAry.withStaticType(StaticType.BOOL) } } @@ -461,8 +487,8 @@ internal class StaticTypeInferenceVisitorTransform( // check if any non-unknown operand has no text type. if so, then data type mismatch return if (hasValidOperandTypes(nAry.operands, { it.isText() }, "||", nAry.metas)) { - transformNAry(nAry, nAry.operands) { recurseForNAryOperations(nAry, operandsTypes, ::getTypeForNAryStringConcat) } - } else { + transformNAry(nAry, nAry.operands) { recurseForNAryOperations(nAry, operandsTypes, ::getTypeForNAryStringConcat) } + } else { nAry.withStaticType(StaticType.STRING) } } @@ -474,8 +500,8 @@ internal class StaticTypeInferenceVisitorTransform( // check if any non-unknown operand has no text type. if so, then data type mismatch return if (hasValidOperandTypes(args, { it.isText() }, "LIKE", nAry.metas)) { - transformNAry(nAry, args) { recurseForNAryOperations(nAry, it, ::getTypeForNAryLike) } - } else { + transformNAry(nAry, args) { recurseForNAryOperations(nAry, it, ::getTypeForNAryLike) } + } else { nAry.withStaticType(StaticType.BOOL) } } @@ -507,7 +533,8 @@ internal class StaticTypeInferenceVisitorTransform( operands: List, operandTypeValidator: (StaticType) -> Boolean, op: String, - metas: MetaContainer): Boolean { + metas: MetaContainer + ): Boolean { val operandsTypes = operands.map { it.getStaticType() } var hasValidOperands = true @@ -533,20 +560,20 @@ internal class StaticTypeInferenceVisitorTransform( // check if [argType] could be a numeric type return if (hasValidOperandTypes(operands, { it.isNumeric() }, op, expr.metas)) { - val allTypes = argType.allTypes - val possibleReturnTypes = allTypes.map { st -> - when (st) { - is IntType, is FloatType, is DecimalType -> st - is NullType -> StaticType.NULL - else -> StaticType.MISSING - } - }.distinct() - - when (possibleReturnTypes.size) { - 1 -> possibleReturnTypes.single() - else -> StaticType.unionOf(*possibleReturnTypes.toTypedArray()) + val allTypes = argType.allTypes + val possibleReturnTypes = allTypes.map { st -> + when (st) { + is IntType, is FloatType, is DecimalType -> st + is NullType -> StaticType.NULL + else -> StaticType.MISSING } - } else { + }.distinct() + + when (possibleReturnTypes.size) { + 1 -> possibleReturnTypes.single() + else -> StaticType.unionOf(*possibleReturnTypes.toTypedArray()) + } + } else { // continuation type of all numeric types to prevent incompatible types and unknown errors from propagating StaticType.unionOf(StaticType.ALL_TYPES.filter { it.isNumeric() }.toSet()) } @@ -555,34 +582,34 @@ internal class StaticTypeInferenceVisitorTransform( private fun computeReturnTypeForArithmeticNAry(expr: PartiqlAst.Expr, operands: List, op: String): StaticType { // check if all operands could be a numeric type return if (hasValidOperandTypes(operands, { it.isNumeric() }, op, expr.metas)) { - operands.map { it.getStaticType() }.reduce { lastType, currentType -> - when { - lastType is MissingType || currentType is MissingType -> StaticType.MISSING - lastType is NullType || currentType is NullType -> StaticType.NULL - else -> { - val leftTypes = lastType.allTypes - val rightTypes = currentType.allTypes - - val possibleResultTypes: List = - leftTypes.flatMap { type1 -> - rightTypes.map { type2 -> - computeBinaryArithmeticResultType(type1, type2) - } - }.distinct() - - when (possibleResultTypes.size) { - 0 -> error("We always expect there to be at least one possible result type, even if is MISSING") - 1 -> { - // returning StaticType.MISSING from this branch is an error condition because the - // arithmetic operation can *never* succeed. - possibleResultTypes.first() + operands.map { it.getStaticType() }.reduce { lastType, currentType -> + when { + lastType is MissingType || currentType is MissingType -> StaticType.MISSING + lastType is NullType || currentType is NullType -> StaticType.NULL + else -> { + val leftTypes = lastType.allTypes + val rightTypes = currentType.allTypes + + val possibleResultTypes: List = + leftTypes.flatMap { type1 -> + rightTypes.map { type2 -> + computeBinaryArithmeticResultType(type1, type2) } - else -> AnyOfType(possibleResultTypes.toSet()) + }.distinct() + + when (possibleResultTypes.size) { + 0 -> error("We always expect there to be at least one possible result type, even if is MISSING") + 1 -> { + // returning StaticType.MISSING from this branch is an error condition because the + // arithmetic operation can *never* succeed. + possibleResultTypes.first() } + else -> AnyOfType(possibleResultTypes.toSet()) } } } - } else { + } + } else { // continuation type of all numeric types to prevent incompatible types and unknown errors from propagating StaticType.unionOf(StaticType.ALL_TYPES.filter { it.isNumeric() }.toSet()) } @@ -640,20 +667,20 @@ internal class StaticTypeInferenceVisitorTransform( } }.distinct() - return when(finalTypes.size) { + return when (finalTypes.size) { 1 -> finalTypes.first() else -> StaticType.unionOf(*finalTypes.toTypedArray()) } } private fun computeReturnTypeForBinaryIn(left: StaticType, right: StaticType): StaticType = - when(right) { - is NullType -> when(left) { + when (right) { + is NullType -> when (left) { is MissingType -> StaticType.MISSING else -> StaticType.NULL } is MissingType -> StaticType.MISSING - is CollectionType -> when(left) { + is CollectionType -> when (left) { is NullType -> StaticType.NULL is MissingType -> StaticType.MISSING else -> { @@ -671,11 +698,10 @@ internal class StaticTypeInferenceVisitorTransform( StaticType.unionOf(possibleTypes).flatten() } } - else -> when(left) { + else -> when (left) { is NullType -> StaticType.unionOf(StaticType.NULL, StaticType.MISSING) else -> StaticType.MISSING } - } /** @@ -861,7 +887,7 @@ internal class StaticTypeInferenceVisitorTransform( /** * Computes the return type of the function call based on the [FunctionSignature.unknownArguments] */ - private fun computeReturnTypeForFunctionCall(signature: FunctionSignature, arguments: List, functionMetas: MetaContainer) : StaticType { + private fun computeReturnTypeForFunctionCall(signature: FunctionSignature, arguments: List, functionMetas: MetaContainer): StaticType { // Check for all the possible invalid number of argument cases. Throws an error if invalid number of arguments found. if (!signature.arity.contains(arguments.size)) { handleIncorrectNumberOfArgumentsToFunctionCallError(signature.name, signature.arity, arguments.size, functionMetas.getSourceLocation()) @@ -876,7 +902,7 @@ internal class StaticTypeInferenceVisitorTransform( /** * Computes return type for functions with [FunctionSignature.unknownArguments] as [UnknownArguments.PASS_THRU] */ - private fun returnTypeForPassThruFunction(signature: FunctionSignature, arguments: List) : StaticType { + private fun returnTypeForPassThruFunction(signature: FunctionSignature, arguments: List): StaticType { return when { matchesAllArguments(arguments, signature) -> signature.returnType matchesAtLeastOneArgument(arguments, signature) -> StaticType.unionOf(signature.returnType, StaticType.MISSING) @@ -900,8 +926,7 @@ internal class StaticTypeInferenceVisitorTransform( if (actualType.isUnknown()) { handleNullOrMissingFunctionArgument(functionName, actualExpr.metas.getSourceLocation()) allArgsValid = false - } - else { + } else { val actualNonUnknownType = actualType.filterNullMissing() if (actualNonUnknownType.typeDomain.intersect(expectedType.typeDomain).isEmpty()) { handleInvalidArgumentTypeForFunction( @@ -920,7 +945,7 @@ internal class StaticTypeInferenceVisitorTransform( /** * Computes return type for functions with [FunctionSignature.unknownArguments] as [UnknownArguments.PROPAGATE] */ - private fun returnTypeForPropagatingFunction(signature: FunctionSignature, arguments: List) : StaticType { + private fun returnTypeForPropagatingFunction(signature: FunctionSignature, arguments: List): StaticType { val requiredArgs = arguments.zip(signature.requiredParameters) val restOfArgs = arguments.drop(signature.requiredParameters.size) @@ -976,24 +1001,26 @@ internal class StaticTypeInferenceVisitorTransform( actual.getStaticType().typeDomain.intersect(expected.typeDomain).isNotEmpty() } - val optionalArgumentMatches = when(signature.optionalParameter) { + val optionalArgumentMatches = when (signature.optionalParameter) { null -> true - else -> arguments - .getOrNull(signature.requiredParameters.size) - ?.getStaticType()?.typeDomain - ?.intersect(signature.optionalParameter.typeDomain) - ?.isNotEmpty() - ?: true + else -> + arguments + .getOrNull(signature.requiredParameters.size) + ?.getStaticType()?.typeDomain + ?.intersect(signature.optionalParameter.typeDomain) + ?.isNotEmpty() + ?: true } - val variadicArgumentsMatch = when(signature.variadicParameter) { + val variadicArgumentsMatch = when (signature.variadicParameter) { null -> true - else -> arguments - .drop(signature.requiredParameters.size) - .all { arg -> - val argType = arg.getStaticType() - argType.typeDomain.intersect(signature.variadicParameter.type.typeDomain).isNotEmpty() - } + else -> + arguments + .drop(signature.requiredParameters.size) + .all { arg -> + val argType = arg.getStaticType() + argType.typeDomain.intersect(signature.variadicParameter.type.typeDomain).isNotEmpty() + } } return requiredArgumentsMatch && optionalArgumentMatches && variadicArgumentsMatch @@ -1008,15 +1035,17 @@ internal class StaticTypeInferenceVisitorTransform( // Checks if the actual StaticType is subtype of expected StaticType ( filtering the null/missing for PROPAGATING functions fun isSubType(actual: StaticType, expected: StaticType) = when (signature.unknownArguments) { - UnknownArguments.PROPAGATE -> when(actual) { - is AnyOfType -> actual.copy(types = actual.types.filter { - !it.isNullOrMissing() - }.toSet()) + UnknownArguments.PROPAGATE -> when (actual) { + is AnyOfType -> actual.copy( + types = actual.types.filter { + !it.isNullOrMissing() + }.toSet() + ) else -> actual } UnknownArguments.PASS_THRU -> actual } - .isSubTypeOf(expected) + .isSubTypeOf(expected) val requiredArgumentsMatch = arguments .zip(signature.requiredParameters) @@ -1025,7 +1054,7 @@ internal class StaticTypeInferenceVisitorTransform( isSubType(st, expected) } - val optionalArgumentMatches = when(signature.optionalParameter) { + val optionalArgumentMatches = when (signature.optionalParameter) { null -> true else -> { val st = arguments @@ -1038,16 +1067,17 @@ internal class StaticTypeInferenceVisitorTransform( } } - val variadicArgumentsMatch = when(signature.variadicParameter) { + val variadicArgumentsMatch = when (signature.variadicParameter) { null -> true - else -> arguments - // We make an assumption here that either the optional or the variadic arguments are passed to the function. - // This "drop" may not hold true if both, optional and variadic arguments, are allowed at the same time. - .drop(signature.requiredParameters.size) - .all { arg -> - val st = arg.getStaticType() - isSubType(st, signature.variadicParameter.type) - } + else -> + arguments + // We make an assumption here that either the optional or the variadic arguments are passed to the function. + // This "drop" may not hold true if both, optional and variadic arguments, are allowed at the same time. + .drop(signature.requiredParameters.size) + .all { arg -> + val st = arg.getStaticType() + isSubType(st, signature.variadicParameter.type) + } } return requiredArgumentsMatch && optionalArgumentMatches && variadicArgumentsMatch @@ -1337,9 +1367,7 @@ internal class StaticTypeInferenceVisitorTransform( if (exprType.isUnknown()) { handleExpressionAlwaysReturnsNullOrMissingError(expr.getStartingSourceLocationMeta()) - } - - else if (exprType.allTypes.none { it == expectedType }) { + } else if (exprType.allTypes.none { it == expectedType }) { handleIncompatibleDataTypeForExprError( expectedType = expectedType, actualType = exprType, @@ -1398,7 +1426,6 @@ internal class StaticTypeInferenceVisitorTransform( val valueType = getUnpivotValueType(fromExprType) addLocal(asSymbolicName.text, valueType) - node.atAlias?.let { val valueHasMissing = valueType.typeDomain.contains(ExprValueType.MISSING) val valueOnlyHasMissing = valueHasMissing && valueType.typeDomain.size == 1 @@ -1425,7 +1452,7 @@ internal class StaticTypeInferenceVisitorTransform( override fun transformExprPath(node: PartiqlAst.Expr.Path): PartiqlAst.Expr { val path = super.transformExprPath(node) as PartiqlAst.Expr.Path var currentType = path.root.getStaticType() - val newComponents = path.steps.map { pathComponent -> + val newComponents = path.steps.map { pathComponent -> currentType = when (pathComponent) { is PartiqlAst.PathStep.PathExpr -> inferPathComponentExprType(currentType, pathComponent) is PartiqlAst.PathStep.PathUnpivot -> TODO("PathUnpivot is not implemented yet") @@ -1446,13 +1473,14 @@ internal class StaticTypeInferenceVisitorTransform( private fun inferPathComponentExprType( previousComponentType: StaticType, - currentPathComponent: PartiqlAst.PathStep.PathExpr): StaticType = + currentPathComponent: PartiqlAst.PathStep.PathExpr + ): StaticType = when (previousComponentType) { is AnyType -> StaticType.ANY is StructType -> inferStructLookupType(currentPathComponent, previousComponentType.fields, previousComponentType.contentClosed) is ListType, is SexpType -> { - val previous = previousComponentType as CollectionType // help Kotlin's type inference to be more specific + val previous = previousComponentType as CollectionType // help Kotlin's type inference to be more specific if (currentPathComponent.index.getStaticType() is IntType) { previous.elementType } else { @@ -1466,8 +1494,7 @@ internal class StaticTypeInferenceVisitorTransform( val prevTypes = previousComponentType.allTypes if (prevTypes.any { it is AnyType }) { StaticType.ANY - } - else { + } else { val staticTypes = prevTypes.map { inferPathComponentExprType(it, currentPathComponent) } AnyOfType(staticTypes.toSet()).flatten() } @@ -1480,7 +1507,8 @@ internal class StaticTypeInferenceVisitorTransform( private fun inferStructLookupType( currentPathComponent: PartiqlAst.PathStep.PathExpr, structFields: Map, - contentClosed: Boolean): StaticType = + contentClosed: Boolean + ): StaticType = when (currentPathComponent.index) { is PartiqlAst.Expr.Lit -> { if (currentPathComponent.index.value is StringElement) { @@ -1488,7 +1516,8 @@ internal class StaticTypeInferenceVisitorTransform( val caseSensitivity = currentPathComponent.case val lookupName = BindingName( currentPathComponent.index.value.stringValue, - caseSensitivity.toBindingCase()) + caseSensitivity.toBindingCase() + ) bindings[lookupName] ?: if (contentClosed) { StaticType.MISSING } else { @@ -1559,8 +1588,10 @@ internal class StaticTypeInferenceVisitorTransform( // TODO: Make the name optional in StaticType StructType(projectionFields, contentClosed) } - is PartiqlAst.Projection.ProjectStar -> error("Encountered a SelectListItemStar." + - " This wouldn't be the case if SelectStarVisitorTransform ran before this.") + is PartiqlAst.Projection.ProjectStar -> error( + "Encountered a SelectListItemStar." + + " This wouldn't be the case if SelectStarVisitorTransform ran before this." + ) is PartiqlAst.Projection.ProjectValue -> newProjection.value.getStaticType() is PartiqlAst.Projection.ProjectPivot -> TODO("PartiqlAst.Projection.ProjectPivot is not implemented yet") } diff --git a/lang/src/org/partiql/lang/mappers/IonSchemaMapper.kt b/lang/src/org/partiql/lang/mappers/IonSchemaMapper.kt index 6ea3a48014..4a7cadc1e7 100644 --- a/lang/src/org/partiql/lang/mappers/IonSchemaMapper.kt +++ b/lang/src/org/partiql/lang/mappers/IonSchemaMapper.kt @@ -3,7 +3,27 @@ package org.partiql.lang.mappers import com.amazon.ionelement.api.ionBool import com.amazon.ionelement.api.ionInt import org.partiql.ionschema.model.IonSchemaModel -import org.partiql.lang.types.* +import org.partiql.lang.types.AnyOfType +import org.partiql.lang.types.AnyType +import org.partiql.lang.types.BagType +import org.partiql.lang.types.BlobType +import org.partiql.lang.types.BoolType +import org.partiql.lang.types.ClobType +import org.partiql.lang.types.DateType +import org.partiql.lang.types.DecimalType +import org.partiql.lang.types.FloatType +import org.partiql.lang.types.IntType +import org.partiql.lang.types.ListType +import org.partiql.lang.types.MissingType +import org.partiql.lang.types.NullType +import org.partiql.lang.types.NumberConstraint +import org.partiql.lang.types.SexpType +import org.partiql.lang.types.StaticType +import org.partiql.lang.types.StringType +import org.partiql.lang.types.StructType +import org.partiql.lang.types.SymbolType +import org.partiql.lang.types.TimeType +import org.partiql.lang.types.TimestampType internal const val ISL_META_KEY = "ISL" @@ -27,13 +47,13 @@ class IonSchemaMapper(private val staticType: StaticType) { return IonSchemaModel.build { schema( // header - listOf(headerStatement(openFieldList(), importList(import("partiql.isl")))) - // other top-level type statements - + remaining.mapValues { typeStatement(it.value) }.values.toList() - // type statement for `topLevelTypeName` - + typeStatement(staticType.toTypeDefinition(topLevelTypeName, typeDefName = topLevelTypeName)) - // footer - + footerStatement(openFieldList()) + listOf(headerStatement(openFieldList(), importList(import("partiql.isl")))) + + // other top-level type statements + remaining.mapValues { typeStatement(it.value) }.values.toList() + + // type statement for `topLevelTypeName` + typeStatement(staticType.toTypeDefinition(topLevelTypeName, typeDefName = topLevelTypeName)) + + // footer + footerStatement(openFieldList()) ) } } @@ -42,11 +62,16 @@ class IonSchemaMapper(private val staticType: StaticType) { * Creates a top-level or an inline ISL type definition */ private fun StaticType.toTypeDefinition(topLevelTypeName: String, typeDefName: String? = null, constraints: Set = emptySet()): IonSchemaModel.TypeDefinition = - IonSchemaModel.build { typeDefinition(typeDefName, constraints = when { - constraints.isEmpty() -> constraintList(getConstraints(topLevelTypeName, typeDefName).toList()) - else -> constraintList(constraints.toList()) - })} - + IonSchemaModel.build { + typeDefinition( + typeDefName, + constraints = when { + constraints.isEmpty() -> constraintList(getConstraints(topLevelTypeName, typeDefName).toList()) + else -> constraintList(constraints.toList()) + } + ) + } + /** * Returns a set of all constraints for the StaticType */ @@ -81,7 +106,7 @@ class IonSchemaMapper(private val staticType: StaticType) { // Examples: type:int, type:{type:list,element:int}, type:custom // We only create type constraint here val isNullable = ionBool(isNullable(this) || nullable) - + // Get type definitions stored in metas val typeDefsFromMetas = metas[ISL_META_KEY] as? List // Get type definition for `typeDefName` if exists. Note that `typeDefName` may be null but there may still be a valid type definition. @@ -140,11 +165,13 @@ class IonSchemaMapper(private val staticType: StaticType) { is IonSchemaModel.TypeReference.NamedType -> this is IonSchemaModel.TypeReference.InlineType -> when (this.type.constraints.items.size) { 1 -> when (val constraint = this.type.constraints.items.first()) { - is IonSchemaModel.Constraint.TypeConstraint -> when(val typeFromConstraint = constraint.type) { - is IonSchemaModel.TypeReference.NamedType -> IonSchemaModel.build { namedType( - typeFromConstraint.name.text, - ionBool(this@flatten.nullable.booleanValue || typeFromConstraint.nullable.booleanValue) - ) } + is IonSchemaModel.Constraint.TypeConstraint -> when (val typeFromConstraint = constraint.type) { + is IonSchemaModel.TypeReference.NamedType -> IonSchemaModel.build { + namedType( + typeFromConstraint.name.text, + ionBool(this@flatten.nullable.booleanValue || typeFromConstraint.nullable.booleanValue) + ) + } is IonSchemaModel.TypeReference.InlineType -> IonSchemaModel.build { inlineType( typeFromConstraint.type, @@ -179,8 +206,12 @@ class IonSchemaMapper(private val staticType: StaticType) { } else -> { // Examples: any_of:[int, string], any_of:[int, custom], any_of:[custom1, custom2] - IonSchemaModel.build { anyOf(nonNullableTypes.map { - it.toTypeReference(topLevelTypeName, nullable) }) + IonSchemaModel.build { + anyOf( + nonNullableTypes.map { + it.toTypeReference(topLevelTypeName, nullable) + } + ) } } } @@ -205,7 +236,7 @@ class IonSchemaMapper(private val staticType: StaticType) { private fun StaticType.getOtherConstraints(topLevelTypeName: String, typeDefName: String? = null): Set { // Get type definitions from metas, if present val typeDefsFromMetas = metas[ISL_META_KEY] as? List ?: listOf() - + // If there are multiple type definitions, only consider constraints for the relevant one // The type def we are interested in has name as `typeDefName` (which may be null, for instance, if we are getting constraints for a struct field) // Once the correct type def is identified, get all constraints excluding @@ -227,10 +258,14 @@ class IonSchemaMapper(private val staticType: StaticType) { codepointLength(equalsNumber(ionInt(lengthConstraint.length.value.toLong()))) } is NumberConstraint.UpTo -> IonSchemaModel.build { - codepointLength(equalsRange(numberRange( - inclusive(ionInt(0)), - inclusive(ionInt(lengthConstraint.length.value.toLong())) - ))) + codepointLength( + equalsRange( + numberRange( + inclusive(ionInt(0)), + inclusive(ionInt(lengthConstraint.length.value.toLong())) + ) + ) + ) } } } @@ -241,12 +276,20 @@ class IonSchemaMapper(private val staticType: StaticType) { IntType.IntRangeConstraint.UNCONSTRAINED -> emptyList() else -> { constraintsFromISL = constraintsFromISL.filterNot { it is IonSchemaModel.Constraint.ValidValues } - listOf(IonSchemaModel.build { - validValues(rangeOfValidValues(numRange(numberRange( - inclusive(ionInt(constraint.validRange.first)), - inclusive(ionInt(constraint.validRange.last)) - )))) - }) + listOf( + IonSchemaModel.build { + validValues( + rangeOfValidValues( + numRange( + numberRange( + inclusive(ionInt(constraint.validRange.first)), + inclusive(ionInt(constraint.validRange.last)) + ) + ) + ) + ) + } + ) } } } @@ -373,14 +416,18 @@ class IonSchemaMapper(private val staticType: StaticType) { } else -> { when (val reference = value.toTypeReference(topLevelTypeName)) { - is IonSchemaModel.TypeReference.NamedType -> inlineType(typeDefinition( - null, - constraintList(typeConstraint(reference), occurs(occursRequired()))), + is IonSchemaModel.TypeReference.NamedType -> inlineType( + typeDefinition( + null, + constraintList(typeConstraint(reference), occurs(occursRequired())) + ), reference.nullable ) - is IonSchemaModel.TypeReference.InlineType -> inlineType(typeDefinition( - null, - constraintList(reference.type.constraints.items + occurs(occursRequired()))), + is IonSchemaModel.TypeReference.InlineType -> inlineType( + typeDefinition( + null, + constraintList(reference.type.constraints.items + occurs(occursRequired())) + ), reference.nullable ) is IonSchemaModel.TypeReference.ImportedType -> TODO("Imported types are not supported yet") @@ -429,9 +476,9 @@ private fun IonSchemaModel.TypeDefinition.getTypeConstraintName(): String? { * A top-level type is found when ISL type definition in metas has "name" attribute present - this is only possible if * it was a top-level type in original ISL (ISL used to create the StaticType instance) */ -private fun StaticType.addTopLevelTypesFromMetas() : Map { +private fun StaticType.addTopLevelTypesFromMetas(): Map { val typeDefs = this.metas[ISL_META_KEY] as? List ?: emptyList() - return typeDefs.filter {it.name != null }.map { it.name!!.text to it }.toMap() + return typeDefs.filter { it.name != null }.map { it.name!!.text to it }.toMap() } /** @@ -500,7 +547,7 @@ fun StaticType.getBaseTypeName(): String = when (this) { } } is DateType -> "date" - is TimeType -> when(withTimeZone) { + is TimeType -> when (withTimeZone) { false -> "time" true -> "time_with_time_zone" } diff --git a/lang/src/org/partiql/lang/mappers/Exceptions.kt b/lang/src/org/partiql/lang/mappers/TypeNotFoundException.kt similarity index 87% rename from lang/src/org/partiql/lang/mappers/Exceptions.kt rename to lang/src/org/partiql/lang/mappers/TypeNotFoundException.kt index 5349f314f9..6af09ce720 100644 --- a/lang/src/org/partiql/lang/mappers/Exceptions.kt +++ b/lang/src/org/partiql/lang/mappers/TypeNotFoundException.kt @@ -10,4 +10,4 @@ package org.partiql.lang.mappers * @param name the type name corresponding to the ISL Type * @param message the message for this exception */ -class TypeNotFoundException(name: String, message: String = "Type not found") : RuntimeException("$message : $name") \ No newline at end of file +class TypeNotFoundException(name: String, message: String = "Type not found") : RuntimeException("$message : $name") diff --git a/lang/test/org/partiql/lang/eval/CastTestBase.kt b/lang/test/org/partiql/lang/eval/CastTestBase.kt index e02d2ae6c3..0b4963f763 100644 --- a/lang/test/org/partiql/lang/eval/CastTestBase.kt +++ b/lang/test/org/partiql/lang/eval/CastTestBase.kt @@ -17,7 +17,11 @@ import org.partiql.lang.eval.ExprValueType.SEXP import org.partiql.lang.eval.ExprValueType.STRUCT import org.partiql.lang.eval.ExprValueType.TIMESTAMP import org.partiql.lang.syntax.ParserException -import org.partiql.lang.util.* +import org.partiql.lang.util.getOffsetHHmm +import org.partiql.lang.util.honorTypedOpParameters +import org.partiql.lang.util.legacyCastBehavior +import org.partiql.lang.util.legacyTypingMode +import org.partiql.lang.util.permissiveTypingMode import java.time.ZoneOffset /** @@ -65,13 +69,15 @@ abstract class CastTestBase : EvaluatorTestBase() { * @param additionalAssertBlock The additional block of assertions on the resulting value. Only valid for * non-error cases and defaults to no-op. */ - data class CastCase(val funcName: String, - val source: String, - val type: String, - val expected: String?, - val expectedErrorCode: ErrorCode?, - val quality: CastQualityStatus?, - val additionalAssertBlock: AssertExprValue.() -> Unit = { }) { + data class CastCase( + val funcName: String, + val source: String, + val type: String, + val expected: String?, + val expectedErrorCode: ErrorCode?, + val quality: CastQualityStatus?, + val additionalAssertBlock: AssertExprValue.() -> Unit = { } + ) { val expression = when (funcName.toUpperCase()) { "IS" -> "($source) IS $type" else -> "$funcName($source AS $type)" @@ -149,10 +155,12 @@ abstract class CastTestBase : EvaluatorTestBase() { * @param compileOptionBlock The optional lambda with a receiver to a [CompileOptions.Builder] to * configure it. */ - data class ConfiguredCastCase(val castCase: CastCase, - val description: String = "", - val configurePipeline: CompilerPipeline.Builder.() -> Unit = {}, - val compileOptionBlock: CompileOptions.Builder.() -> Unit = {}) { + data class ConfiguredCastCase( + val castCase: CastCase, + val description: String = "", + val configurePipeline: CompilerPipeline.Builder.() -> Unit = {}, + val compileOptionBlock: CompileOptions.Builder.() -> Unit = {} + ) { private val additionalDescription = when (description) { "" -> "" else -> " - $description" @@ -214,18 +222,22 @@ abstract class CastTestBase : EvaluatorTestBase() { companion object : EvaluatorTestBase() { /** Partial application of the source expression and the expected Ion value without type. Assumes [Implemented] logic*/ - fun case(source: String, - expected: String?, - quality: CastQuality, - additionalAssertBlock: AssertExprValue.() -> Unit = { }): (String) -> CastCase = { + fun case( + source: String, + expected: String?, + quality: CastQuality, + additionalAssertBlock: AssertExprValue.() -> Unit = { } + ): (String) -> CastCase = { CastCase("CAST", source, it, expected, null, Implemented(quality), additionalAssertBlock) } /** Partial application of the source expression and the expected Ion value without type. */ - fun case(source: String, - expected: String?, - qualityStatus: CastQualityStatus, - additionalAssertBlock: AssertExprValue.() -> Unit = { }): (String) -> CastCase = { + fun case( + source: String, + expected: String?, + qualityStatus: CastQualityStatus, + additionalAssertBlock: AssertExprValue.() -> Unit = { } + ): (String) -> CastCase = { CastCase("CAST", source, it, expected, null, qualityStatus, additionalAssertBlock) } @@ -442,8 +454,8 @@ abstract class CastTestBase : EvaluatorTestBase() { // numbers case("5", "5d0", CastQuality.LOSSLESS), case("5 ", "5d0", CastQuality.LOSSLESS), - case("`0e0`", "0.", CastQuality.LOSSLESS), // TODO formalize this behavior - case("`1e0`", "1.", CastQuality.LOSSLESS), // TODO formalize this behavior + case("`0e0`", "0.", CastQuality.LOSSLESS), // TODO formalize this behavior + case("`1e0`", "1.", CastQuality.LOSSLESS), // TODO formalize this behavior case("1.1", "1.1d0", CastQuality.LOSSLESS), case("-20.1", "-20.1d0", CastQuality.LOSSLESS), // timestamp @@ -606,9 +618,9 @@ abstract class CastTestBase : EvaluatorTestBase() { case("""`{{"1.0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), case("""`{{"2e10"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), case("`{{}}`", ErrorCode.EVALUATOR_INVALID_CAST), - case("`{{MA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 0 - case("`{{MS4w}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 1.0 - case("`{{MmUxMA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 2e10 + case("`{{MA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 0 + case("`{{MS4w}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 1.0 + case("`{{MmUxMA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 2e10 // list case("`[]`", ErrorCode.EVALUATOR_INVALID_CAST), case("['hello']", ErrorCode.EVALUATOR_INVALID_CAST), @@ -650,9 +662,9 @@ abstract class CastTestBase : EvaluatorTestBase() { case("""`{{"1.0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), case("""`{{"2e10"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), case("`{{}}`", ErrorCode.EVALUATOR_INVALID_CAST), - case("`{{MA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 0 - case("`{{MS4w}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 1.0 - case("`{{MmUxMA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 2e10 + case("`{{MA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 0 + case("`{{MS4w}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 1.0 + case("`{{MmUxMA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 2e10 // list case("`[]`", ErrorCode.EVALUATOR_INVALID_CAST), case("['hello']", ErrorCode.EVALUATOR_INVALID_CAST), @@ -739,8 +751,8 @@ abstract class CastTestBase : EvaluatorTestBase() { case("""`{{"1.0"}}`""", """{{MS4w}}""", CastQuality.LOSSLESS), case("""`{{"2e10"}}`""", """{{MmUxMA==}}""", CastQuality.LOSSLESS), case("`{{}}`", """{{}}""", CastQuality.LOSSLESS), - case("`{{MA==}}`", """{{MA==}}""", CastQuality.LOSSLESS), // 0 - case("`{{MS4w}}`", """{{MS4w}}""", CastQuality.LOSSLESS), // 1.0 + case("`{{MA==}}`", """{{MA==}}""", CastQuality.LOSSLESS), // 0 + case("`{{MS4w}}`", """{{MS4w}}""", CastQuality.LOSSLESS), // 1.0 case("`{{MmUxMA==}}`", """{{MmUxMA==}}""", CastQuality.LOSSLESS), // 2e10 // list case("`[]`", ErrorCode.EVALUATOR_INVALID_CAST), @@ -800,9 +812,9 @@ abstract class CastTestBase : EvaluatorTestBase() { case("`{a:12d0}`", ErrorCode.EVALUATOR_INVALID_CAST), case("{'b':`-4d0`}", ErrorCode.EVALUATOR_INVALID_CAST), // bag - case("<<>>", "[]", CastQuality.LOSSLESS), // TODO bag verification - case("<<`14d0`>>", "[14d0]", CastQuality.LOSSLESS), // TODO bag verification - case("<<`20d0`>>", "[20d0]", CastQuality.LOSSLESS) // TODO bag verification + case("<<>>", "[]", CastQuality.LOSSLESS), // TODO bag verification + case("<<`14d0`>>", "[14d0]", CastQuality.LOSSLESS), // TODO bag verification + case("<<`20d0`>>", "[20d0]", CastQuality.LOSSLESS) // TODO bag verification ).types(ExprValueType.LIST.sqlTextNames), listOf( // booleans @@ -919,22 +931,22 @@ abstract class CastTestBase : EvaluatorTestBase() { case("`{{MS4w}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 1.0 case("`{{MmUxMA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 2e10 // list - case("`[]`", "[]", CastQuality.LOSSLESS), // TODO bag verification + case("`[]`", "[]", CastQuality.LOSSLESS), // TODO bag verification case("['hello']", "[\"hello\"]", CastQuality.LOSSLESS), // TODO bag verification case("`[-2d0, 0d0]`", "[-2d0, 0d0]", CastQuality.LOSSLESS), // TODO bag verification // sexp - case("`()`", "[]", CastQuality.LOSSLESS), // TODO bag verification - case("`(1d0)`", "[1d0]", CastQuality.LOSSLESS), // TODO bag verification - case("`(0d0)`", "[0d0]", CastQuality.LOSSLESS), // TODO bag verification + case("`()`", "[]", CastQuality.LOSSLESS), // TODO bag verification + case("`(1d0)`", "[1d0]", CastQuality.LOSSLESS), // TODO bag verification + case("`(0d0)`", "[0d0]", CastQuality.LOSSLESS), // TODO bag verification // struct case("`{}`", ErrorCode.EVALUATOR_INVALID_CAST), case("{}", ErrorCode.EVALUATOR_INVALID_CAST), case("`{a:12d0}`", ErrorCode.EVALUATOR_INVALID_CAST), case("{'b':`-4d0`}", ErrorCode.EVALUATOR_INVALID_CAST), // bag - case("<<>>", "[]", CastQuality.LOSSLESS), // TODO bag verification - case("<<`14d0`>>", "[14d0]", CastQuality.LOSSLESS), // TODO bag verification - case("<<`20d0`>>", "[20d0]", CastQuality.LOSSLESS) // TODO bag verification + case("<<>>", "[]", CastQuality.LOSSLESS), // TODO bag verification + case("<<`14d0`>>", "[14d0]", CastQuality.LOSSLESS), // TODO bag verification + case("<<`20d0`>>", "[20d0]", CastQuality.LOSSLESS) // TODO bag verification ).types(ExprValueType.BAG.sqlTextNames) ).flatten() @@ -962,9 +974,9 @@ abstract class CastTestBase : EvaluatorTestBase() { case("""`{{"1.0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), case("""`{{"2e10"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), case("`{{}}`", ErrorCode.EVALUATOR_INVALID_CAST), - case("`{{MA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 0 - case("`{{MS4w}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 1.0 - case("`{{MmUxMA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 2e10 + case("`{{MA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 0 + case("`{{MS4w}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 1.0 + case("`{{MmUxMA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 2e10 // list case("`[]`", ErrorCode.EVALUATOR_INVALID_CAST), case("['hello']", ErrorCode.EVALUATOR_INVALID_CAST), @@ -1041,7 +1053,7 @@ abstract class CastTestBase : EvaluatorTestBase() { case("'123'", "123.", CastQuality.LOSSLESS), case("'1234'", "1234.", CastQuality.LOSSLESS), case("'123.45'", "123.45", CastQuality.LOSSLESS) - ).types(ExprValueType.DECIMAL.sqlTextNames.map {"${it}(3)"}), + ).types(ExprValueType.DECIMAL.sqlTextNames.map { "$it(3)" }), // DECIMAL(5,2) ; LEGACY mode does not respect DECIMAL's precison or scale listOf( case("12", "12.", CastQuality.LOSSLESS), @@ -1054,17 +1066,17 @@ abstract class CastTestBase : EvaluatorTestBase() { case("'1234'", "1234.", CastQuality.LOSSLESS), case("'123.45'", "123.45", CastQuality.LOSSLESS), case("'123.459'", "123.459", CastQuality.LOSSLESS) - ).types(ExprValueType.DECIMAL.sqlTextNames.map {"${it}(5, 2)"}), + ).types(ExprValueType.DECIMAL.sqlTextNames.map { "$it(5, 2)" }), // DECIMAL(4,4) ; LEGACY mode does not respect DECIMAL's precison or scale; precision = scale is valid here listOf( case("0.1", "1d-1", CastQuality.LOSSLESS), case("0.1234", "0.1234", CastQuality.LOSSLESS), case("0.12345", "0.12345", CastQuality.LOSSLESS) - ).types(ExprValueType.DECIMAL.sqlTextNames.map { "${it}(4,4)" }), + ).types(ExprValueType.DECIMAL.sqlTextNames.map { "$it(4,4)" }), // DECIMAL(2, 4) ; LEGACY mode does not respect DECIMAL's precison or scale; precision < scale is valid in legacy mode listOf( case("1", "1d0", CastQuality.LOSSLESS) - ).types(ExprValueType.DECIMAL.sqlTextNames.map { "${it}(2,4)" }), + ).types(ExprValueType.DECIMAL.sqlTextNames.map { "$it(2,4)" }), // VARCHAR(4) legacy mode doesn't care about params listOf( // from string types @@ -1122,9 +1134,9 @@ abstract class CastTestBase : EvaluatorTestBase() { case("""`{{"1.0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), case("""`{{"2e10"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), case("`{{}}`", ErrorCode.EVALUATOR_INVALID_CAST), - case("`{{MA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 0 - case("`{{MS4w}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 1.0 - case("`{{MmUxMA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 2e10 + case("`{{MA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 0 + case("`{{MS4w}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 1.0 + case("`{{MmUxMA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 2e10 // list case("`[]`", ErrorCode.EVALUATOR_INVALID_CAST), case("['hello']", ErrorCode.EVALUATOR_INVALID_CAST), @@ -1201,7 +1213,7 @@ abstract class CastTestBase : EvaluatorTestBase() { case("'123'", "123.", CastQuality.LOSSLESS), case("'1234'", ErrorCode.EVALUATOR_CAST_FAILED), case("'123.45'", "123.", CastQuality.LOSSY) - ).types(ExprValueType.DECIMAL.sqlTextNames.map {"${it}(3)"}), + ).types(ExprValueType.DECIMAL.sqlTextNames.map { "$it(3)" }), // DECIMAL(5,2) listOf( case("12", "12.00", CastQuality.LOSSLESS), @@ -1214,17 +1226,17 @@ abstract class CastTestBase : EvaluatorTestBase() { case("'1234'", ErrorCode.EVALUATOR_CAST_FAILED), case("'123.45'", "123.45", CastQuality.LOSSLESS), case("'123.459'", "123.46", CastQuality.LOSSY) - ).types(ExprValueType.DECIMAL.sqlTextNames.map {"${it}(5, 2)"}), + ).types(ExprValueType.DECIMAL.sqlTextNames.map { "$it(5, 2)" }), // DECIMAL(4,4) precision = scale is valid in honor_params listOf( case("0.1", "1.000d-1", CastQuality.LOSSLESS), case("0.1234", "0.1234", CastQuality.LOSSLESS), case("0.12345", "0.1235", CastQuality.LOSSY) - ).types(ExprValueType.DECIMAL.sqlTextNames.map { "${it}(4,4)" }), + ).types(ExprValueType.DECIMAL.sqlTextNames.map { "$it(4,4)" }), // DECIMAL(2, 4) is a compilation failure in this mode listOf( case("1", ErrorCode.SEMANTIC_INVALID_DECIMAL_ARGUMENTS) - ).types(ExprValueType.DECIMAL.sqlTextNames.map { "${it}(2,4)" }), + ).types(ExprValueType.DECIMAL.sqlTextNames.map { "$it(2,4)" }), // VARCHAR(4) should truncate to size <= 4 listOf( // from string types @@ -1277,18 +1289,18 @@ abstract class CastTestBase : EvaluatorTestBase() { case("TIME '23:12:12.1267-05:30'", "TIME WITH TIME ZONE", "23:12:12.1267${defaultTimezoneOffset.getOffsetHHmm()}", CastQuality.LOSSLESS), case("TIME '23:12:12.1267+05:30'", "TIME (3)", "23:12:12.127", CastQuality.LOSSY), case("TIME '23:12:12.1267-05:30'", "TIME (3) WITH TIME ZONE", "23:12:12.127${defaultTimezoneOffset.getOffsetHHmm()}", CastQuality.LOSSY), - case("TIME (3) '23:12:12.1267'", "TIME","23:12:12.127", CastQuality.LOSSLESS), - case("TIME (3) '23:12:12.1267-05:30'", "TIME","23:12:12.127", CastQuality.LOSSLESS), - case("TIME (3) '23:12:12.1267+05:30'", "TIME WITH TIME ZONE","23:12:12.127${defaultTimezoneOffset.getOffsetHHmm()}", CastQuality.LOSSLESS), - case("TIME (3) '23:12:12.1267-05:30'", "TIME (9)","23:12:12.127000000", CastQuality.LOSSLESS), + case("TIME (3) '23:12:12.1267'", "TIME", "23:12:12.127", CastQuality.LOSSLESS), + case("TIME (3) '23:12:12.1267-05:30'", "TIME", "23:12:12.127", CastQuality.LOSSLESS), + case("TIME (3) '23:12:12.1267+05:30'", "TIME WITH TIME ZONE", "23:12:12.127${defaultTimezoneOffset.getOffsetHHmm()}", CastQuality.LOSSLESS), + case("TIME (3) '23:12:12.1267-05:30'", "TIME (9)", "23:12:12.127000000", CastQuality.LOSSLESS), case("TIME WITH TIME ZONE '23:12:12.1267'", "TIME", "23:12:12.1267", CastQuality.LOSSLESS), case("TIME WITH TIME ZONE '23:12:12.1267-05:30'", "TIME WITH TIME ZONE", "23:12:12.1267-05:30", CastQuality.LOSSLESS), - case("TIME WITH TIME ZONE '23:12:12.1267+05:30'", "TIME (3) WITH TIME ZONE","23:12:12.127+05:30", CastQuality.LOSSY), + case("TIME WITH TIME ZONE '23:12:12.1267+05:30'", "TIME (3) WITH TIME ZONE", "23:12:12.127+05:30", CastQuality.LOSSY), case("TIME WITH TIME ZONE '23:12:12.1267-05:30'", "TIME", "23:12:12.1267", CastQuality.LOSSY), case("TIME (3) WITH TIME ZONE '23:12:12.1267'", "TIME", "23:12:12.127", CastQuality.LOSSLESS), case("TIME (3) WITH TIME ZONE '23:12:12.1267-05:30'", "TIME WITH TIME ZONE", "23:12:12.127-05:30", CastQuality.LOSSLESS), case("TIME (3) WITH TIME ZONE '23:12:12.1267+05:30'", "TIME (5)", "23:12:12.12700", CastQuality.LOSSY), - case("TIME (3) WITH TIME ZONE '23:12:12.1267-05:30'", "TIME (5) WITH TIME ZONE","23:12:12.12700-05:30", CastQuality.LOSSLESS), + case("TIME (3) WITH TIME ZONE '23:12:12.1267-05:30'", "TIME (5) WITH TIME ZONE", "23:12:12.12700-05:30", CastQuality.LOSSLESS), // CAST( AS ) case("`2007-02-23T12:14:33.079Z`", "TIME", "12:14:33.079", CastQuality.LOSSY), case("`2007-02-23T12:14:33.079-08:00`", "TIME", "12:14:33.079", CastQuality.LOSSY), @@ -1365,8 +1377,9 @@ abstract class CastTestBase : EvaluatorTestBase() { ).types(ExprValueType.STRING.sqlTextNames) ).flatten() + listOf(MISSING, NULL, BOOL, INT, FLOAT, DECIMAL, TIMESTAMP, CLOB, BLOB, LIST, SEXP, STRUCT, BAG) - .map { listOf(case("DATE '2007-10-10'", ErrorCode.EVALUATOR_INVALID_CAST)).types(it.sqlTextNames) - }.flatten() + .map { + listOf(case("DATE '2007-10-10'", ErrorCode.EVALUATOR_INVALID_CAST)).types(it.sqlTextNames) + }.flatten() private val typingModes: Map Unit> = mapOf( "LEGACY_TYPING_MODE" to { cob -> cob.legacyTypingMode() }, @@ -1374,7 +1387,7 @@ abstract class CastTestBase : EvaluatorTestBase() { ) val castBehaviors: Map Unit> = mapOf( - "LEGACY_CAST" to { cob -> cob.legacyCastBehavior() } , + "LEGACY_CAST" to { cob -> cob.legacyCastBehavior() }, "HONOR_PARAM_CAST" to { cob -> cob.honorTypedOpParameters() } ) @@ -1394,29 +1407,33 @@ abstract class CastTestBase : EvaluatorTestBase() { } } - private val castPermissiveConfiguredTestCases = (legacyCastTestCases.toPermissive().map { case -> - ConfiguredCastCase(case, "LEGACY_CAST, PERMISSIVE_TYPING_MODE") { - legacyCastBehavior() - permissiveTypingMode() - } - } + honorParamCastTestCases.toPermissive().map { case -> - ConfiguredCastCase(case, "HONOR_PARAM_CAST, PERMISSIVE_TYPING_MODE") { - honorTypedOpParameters() - permissiveTypingMode() + private val castPermissiveConfiguredTestCases = ( + legacyCastTestCases.toPermissive().map { case -> + ConfiguredCastCase(case, "LEGACY_CAST, PERMISSIVE_TYPING_MODE") { + legacyCastBehavior() + permissiveTypingMode() + } + } + honorParamCastTestCases.toPermissive().map { case -> + ConfiguredCastCase(case, "HONOR_PARAM_CAST, PERMISSIVE_TYPING_MODE") { + honorTypedOpParameters() + permissiveTypingMode() + } } - }) + ) - private val castLegacyConfiguredTestCases = (legacyCastTestCases.map { case -> - ConfiguredCastCase(case, "LEGACY_CAST, LEGACY_ERROR_MODE") { - legacyCastBehavior() - legacyTypingMode() - } - } + honorParamCastTestCases.map { case -> - ConfiguredCastCase(case, "HONOR_PARAM_CAST, LEGACY_ERROR_MODE") { - honorTypedOpParameters() - legacyTypingMode() + private val castLegacyConfiguredTestCases = ( + legacyCastTestCases.map { case -> + ConfiguredCastCase(case, "LEGACY_CAST, LEGACY_ERROR_MODE") { + legacyCastBehavior() + legacyTypingMode() + } + } + honorParamCastTestCases.map { case -> + ConfiguredCastCase(case, "HONOR_PARAM_CAST, LEGACY_ERROR_MODE") { + honorTypedOpParameters() + legacyTypingMode() + } } - }) + ) private val castDefaultTimezoneOffsetConfiguration = // Configuring default timezone offset through CompileOptions @@ -1465,7 +1482,7 @@ abstract class CastTestBase : EvaluatorTestBase() { case.copy( castCase = newCastCase ) - }.distinctBy {case -> + }.distinctBy { case -> // dedupe by source and compilation option function Pair(case.castCase.source, case.compileOptionBlock) } @@ -1491,37 +1508,41 @@ abstract class CastTestBase : EvaluatorTestBase() { ) } - private val canCastConfiguredTestCases = (legacyCastTestCases.flatMap { case -> - typingModes.map { (typingModeName, typingModeConfig) -> - ConfiguredCastCase(case.toCanCast(), "LEGACY_CAST, $typingModeName") { - legacyCastBehavior() - typingModeConfig(this) + private val canCastConfiguredTestCases = ( + legacyCastTestCases.flatMap { case -> + typingModes.map { (typingModeName, typingModeConfig) -> + ConfiguredCastCase(case.toCanCast(), "LEGACY_CAST, $typingModeName") { + legacyCastBehavior() + typingModeConfig(this) + } } - } - } + honorParamCastTestCases.flatMap { case -> - typingModes.map { (typingModeName, typingModeConfig) -> - ConfiguredCastCase(case.toCanCast(), "HONOR_PARAM_CAST, $typingModeName") { - honorTypedOpParameters() - typingModeConfig(this) + } + honorParamCastTestCases.flatMap { case -> + typingModes.map { (typingModeName, typingModeConfig) -> + ConfiguredCastCase(case.toCanCast(), "HONOR_PARAM_CAST, $typingModeName") { + honorTypedOpParameters() + typingModeConfig(this) + } } } - }) + ) - private val canLosslessCastConfiguredTestCases = (legacyCastTestCases.flatMap { case -> - typingModes.map { (typingModeName, typingModeConfig) -> - ConfiguredCastCase(case.toCanLosslessCast(), "LEGACY_CAST, $typingModeName") { - legacyCastBehavior() - typingModeConfig(this) + private val canLosslessCastConfiguredTestCases = ( + legacyCastTestCases.flatMap { case -> + typingModes.map { (typingModeName, typingModeConfig) -> + ConfiguredCastCase(case.toCanLosslessCast(), "LEGACY_CAST, $typingModeName") { + legacyCastBehavior() + typingModeConfig(this) + } } - } - } + honorParamCastTestCases.flatMap { case -> - typingModes.map { (typingModeName, typingModeConfig) -> - ConfiguredCastCase(case.toCanLosslessCast(), "HONOR_PARAM_CAST, $typingModeName") { - honorTypedOpParameters() - typingModeConfig(this) + } + honorParamCastTestCases.flatMap { case -> + typingModes.map { (typingModeName, typingModeConfig) -> + ConfiguredCastCase(case.toCanLosslessCast(), "HONOR_PARAM_CAST, $typingModeName") { + honorTypedOpParameters() + typingModeConfig(this) + } } } - }) + ) internal val allConfiguredTestCases = castConfiguredTestCases + @@ -1574,7 +1595,7 @@ abstract class CastTestBase : EvaluatorTestBase() { internal val allConfiguredDateTimeTestCases = configuredDateTimeTestCases + - canCastConfiguredDateTimeTestCases + - canLosslessCastConfiguredDateTimeTestCases + canCastConfiguredDateTimeTestCases + + canLosslessCastConfiguredDateTimeTestCases } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerCustomAnyOfTypeOperationTests.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerCustomAnyOfTypeOperationTests.kt index 757b82c8d1..d44efddbef 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerCustomAnyOfTypeOperationTests.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerCustomAnyOfTypeOperationTests.kt @@ -12,7 +12,6 @@ import org.partiql.lang.types.NumberConstraint import org.partiql.lang.types.SexpType import org.partiql.lang.types.StaticType import org.partiql.lang.types.StringType -import org.partiql.lang.types.StringType.* import org.partiql.lang.types.StructType import org.partiql.lang.types.TypedOpParameter import org.partiql.lang.util.ArgumentsProviderBase @@ -33,10 +32,11 @@ import org.partiql.lang.util.permissiveTypingMode * - `CAN_LOSSLESS_CAST( AS ES_ANY)` * - ` IS ES_ANY` */ -class EvaluatingCompilerCustomAnyOfTypeOperationTests: CastTestBase() { +class EvaluatingCompilerCustomAnyOfTypeOperationTests : CastTestBase() { companion object { val customTypes = listOf( - CustomType("ES_ANY", esAny)) + CustomType("ES_ANY", esAny) + ) // Cases that pass the input to the output directly (for IS testing) private val esAnyCastIdentityCases = listOf( @@ -82,9 +82,11 @@ class EvaluatingCompilerCustomAnyOfTypeOperationTests: CastTestBase() { case("[1, `{{\"woof\"}}`, 9999.0]", ErrorCode.EVALUATOR_CAST_FAILED), // sexp // This round trips from , , )>)> to , , )>)> - case("`(a b [2099-01-21T12:34:56Z])`", + case( + "`(a b [2099-01-21T12:34:56Z])`", "[\"a\", \"b\", [\"2099-01-21T12:34:56Z\"]]", - FixSemantics(CastQuality.LOSSLESS)), + FixSemantics(CastQuality.LOSSLESS) + ), case("`(a b [2099-01-21T12:34:56Z, {{Ymxhcmc=}}])`", ErrorCode.EVALUATOR_CAST_FAILED), // bag case("<<99, 20000, MISSING>>", "[99, 20000, null]", FixSemantics(CastQuality.LOSSY)) { @@ -93,32 +95,38 @@ class EvaluatingCompilerCustomAnyOfTypeOperationTests: CastTestBase() { }, case("<<99, 20000, MISSING, `{{}}`>>", ErrorCode.EVALUATOR_CAST_FAILED), // struct - case("`{a: 1000, b: 1312000.1e0, c: 9999000.0, d: null}`", + case( + "`{a: 1000, b: 1312000.1e0, c: 9999000.0, d: null}`", "{a: 1000, b: 1312000, c: 9999000, d: null}", - CastQuality.LOSSY), - case("`{a: 1000, b: 1312000.1e0, c: 9999000.0, d: null, e:[{f:({{}})}]}`", - ErrorCode.EVALUATOR_CAST_FAILED) + CastQuality.LOSSY + ), + case( + "`{a: 1000, b: 1312000.1e0, c: 9999000.0, d: null, e:[{f:({{}})}]}`", + ErrorCode.EVALUATOR_CAST_FAILED + ) ).types(listOf("ES_ANY")) ).flatten() private val esAnyCastCases = esAnyCastIdentityCases + esAnyCastConvertOrFailCases // TODO consider refactoring into CastTestBase (with parameter) - fun List.toConfiguredCases(): List = (flatMap { case -> - castBehaviors.map { (castBehaviorName, castBehaviorConfig) -> - ConfiguredCastCase(case, "$castBehaviorName, LEGACY_TYPING_MODE") { - castBehaviorConfig(this) - legacyTypingMode() + fun List.toConfiguredCases(): List = ( + flatMap { case -> + castBehaviors.map { (castBehaviorName, castBehaviorConfig) -> + ConfiguredCastCase(case, "$castBehaviorName, LEGACY_TYPING_MODE") { + castBehaviorConfig(this) + legacyTypingMode() + } } - } - } + toPermissive().flatMap { case -> - castBehaviors.map { (castBehaviorName, castBehaviorConfig) -> - ConfiguredCastCase(case, "$castBehaviorName, PERMISSIVE_TYPING_MODE") { - castBehaviorConfig(this) - permissiveTypingMode() + } + toPermissive().flatMap { case -> + castBehaviors.map { (castBehaviorName, castBehaviorConfig) -> + ConfiguredCastCase(case, "$castBehaviorName, PERMISSIVE_TYPING_MODE") { + castBehaviorConfig(this) + permissiveTypingMode() + } } } - }).map { + ).map { it.copy( configurePipeline = { customDataTypes(customTypes) @@ -126,17 +134,19 @@ class EvaluatingCompilerCustomAnyOfTypeOperationTests: CastTestBase() { ) } - fun List.toConfiguredHonorParamMode(): List = (map { case -> - ConfiguredCastCase(case, "HONOR_PARAMS, LEGACY_TYPING_MODE") { - honorTypedOpParameters() - legacyTypingMode() - } - } + toPermissive().map { case -> - ConfiguredCastCase(case, "HONOR_PARAMS, PERMISSIVE_TYPING_MODE") { - honorTypedOpParameters() - permissiveTypingMode() + fun List.toConfiguredHonorParamMode(): List = ( + map { case -> + ConfiguredCastCase(case, "HONOR_PARAMS, LEGACY_TYPING_MODE") { + honorTypedOpParameters() + legacyTypingMode() + } + } + toPermissive().map { case -> + ConfiguredCastCase(case, "HONOR_PARAMS, PERMISSIVE_TYPING_MODE") { + honorTypedOpParameters() + permissiveTypingMode() + } } - }).map { + ).map { it.copy( configurePipeline = { customDataTypes(customTypes) @@ -197,21 +207,23 @@ class EvaluatingCompilerCustomAnyOfTypeOperationTests: CastTestBase() { fun esAnyIs(configuredCastCase: CastTestBase.ConfiguredCastCase) = configuredCastCase.assertCase() class EsAnyIsConfiguredCastCases : ArgumentsProviderBase() { override fun getParameters(): List { - val esAnyIsBaseCases = (esAnyCastIdentityCases.map { case -> - case.copy( - funcName = "IS", - expected = "true", - expectedErrorCode = null, - additionalAssertBlock = { } - ) - } + esAnyCastConvertOrFailCases.map { case -> - case.copy( - funcName = "IS", - expected = "false", - expectedErrorCode = null, - additionalAssertBlock = { } + val esAnyIsBaseCases = ( + esAnyCastIdentityCases.map { case -> + case.copy( + funcName = "IS", + expected = "true", + expectedErrorCode = null, + additionalAssertBlock = { } + ) + } + esAnyCastConvertOrFailCases.map { case -> + case.copy( + funcName = "IS", + expected = "false", + expectedErrorCode = null, + additionalAssertBlock = { } + ) + } ) - }) return esAnyIsBaseCases.toConfiguredCases() + // Take the bad union of type cases and rewrite them for `IS`. @@ -227,12 +239,14 @@ class EvaluatingCompilerCustomAnyOfTypeOperationTests: CastTestBase() { additionalAssertBlock = { } ), configurePipeline = { - customDataTypes(listOf( - CustomType( - "ES_ANY", - typedOpParameter = badType + customDataTypes( + listOf( + CustomType( + "ES_ANY", + typedOpParameter = badType + ) ) - )) + ) }, description = "${configuredCase.description} $badType" ) @@ -250,7 +264,7 @@ class EvaluatingCompilerCustomAnyOfTypeOperationTests: CastTestBase() { // duplicate types anyOfType( StaticType.STRING, - StringType(StringLengthConstraint.Constrained(NumberConstraint.UpTo(500))) + StringType(StringType.StringLengthConstraint.Constrained(NumberConstraint.UpTo(500))) ), anyOfType( StaticType.INT, @@ -275,12 +289,14 @@ class EvaluatingCompilerCustomAnyOfTypeOperationTests: CastTestBase() { additionalAssertBlock = { } ), configurePipeline = { - customDataTypes(listOf( - CustomType( - "ES_ANY", - typedOpParameter = badType + customDataTypes( + listOf( + CustomType( + "ES_ANY", + typedOpParameter = badType + ) ) - )) + ) }, description = "${case.description} $badType" ).let { @@ -301,4 +317,4 @@ class EvaluatingCompilerCustomAnyOfTypeOperationTests: CastTestBase() { } } } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/builtins/TimestampParserTest.kt b/lang/test/org/partiql/lang/eval/builtins/TimestampParserTest.kt index aa9f34ba70..85a6e9dc42 100644 --- a/lang/test/org/partiql/lang/eval/builtins/TimestampParserTest.kt +++ b/lang/test/org/partiql/lang/eval/builtins/TimestampParserTest.kt @@ -1,50 +1,50 @@ package org.partiql.lang.eval.builtins -import com.amazon.ion.* -import org.partiql.lang.errors.* -import org.partiql.lang.eval.* -import junitparams.* -import junitparams.naming.* -import org.junit.* +import com.amazon.ion.Timestamp +import junitparams.JUnitParamsRunner +import junitparams.Parameters +import junitparams.naming.TestCaseName import org.junit.Test -import org.junit.runner.* -import java.lang.reflect.* -import java.time.format.* -import java.time.temporal.* +import org.junit.runner.RunWith +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.eval.EvaluationException +import java.lang.reflect.Type +import java.time.format.DateTimeParseException import kotlin.test.* @RunWith(JUnitParamsRunner::class) class TimestampParserTest { - data class ParseTimestampTestCase( + data class ParseTimestampTestCase( val pattern: String, val timestamp: String, - val expectedResult: Timestamp) - - @Test - @Parameters - @TestCaseName - fun parseTimestampTest(testCase: ParseTimestampTestCase) { - val result = TimestampParser.parseTimestamp(testCase.timestamp, testCase.pattern) - assertEquals(testCase.expectedResult, result) - } - - //Note: for timestamp fields that may be 1 or 2 digits (i.e. hour, minute, day, month) a single - //instance of the specifier parses zero padded values without difficulty. HOWEVER... - //Multiple repeated values REQUIRE zero padding. For example, a pattern of "y MM" will parse "2007 06" - //without difficulty but not "2007 6". However, a pattern of "y M" will parse either. + val expectedResult: Timestamp + ) + + @Test + @Parameters + @TestCaseName + fun parseTimestampTest(testCase: ParseTimestampTestCase) { + val result = TimestampParser.parseTimestamp(testCase.timestamp, testCase.pattern) + assertEquals(testCase.expectedResult, result) + } + + // Note: for timestamp fields that may be 1 or 2 digits (i.e. hour, minute, day, month) a single + // instance of the specifier parses zero padded values without difficulty. HOWEVER... + // Multiple repeated values REQUIRE zero padding. For example, a pattern of "y MM" will parse "2007 06" + // without difficulty but not "2007 6". However, a pattern of "y M" will parse either. fun parametersForParseTimestampTest(): List = listOf( - //Year - //Single "y" symbol parses arbitary year. + // Year + // Single "y" symbol parses arbitary year. ParseTimestampTestCase("y", "7", Timestamp.valueOf("0007T")), ParseTimestampTestCase("y", "0007", Timestamp.valueOf("0007T")), ParseTimestampTestCase("y", "2007", Timestamp.valueOf("2007T")), - //Zero padding is required when three or four "y" symbols are used. + // Zero padding is required when three or four "y" symbols are used. ParseTimestampTestCase("yyy", "0007", Timestamp.valueOf("0007T")), ParseTimestampTestCase("yyyy", "0007", Timestamp.valueOf("0007T")), - //Two "y" symbols parses 2 digit year + // Two "y" symbols parses 2 digit year ParseTimestampTestCase("yy", "00", Timestamp.valueOf("2000T")), ParseTimestampTestCase("yy", "01", Timestamp.valueOf("2001T")), ParseTimestampTestCase("yy", "69", Timestamp.valueOf("2069T")), @@ -52,68 +52,68 @@ class TimestampParserTest { ParseTimestampTestCase("yy", "71", Timestamp.valueOf("1971T")), ParseTimestampTestCase("yy", "99", Timestamp.valueOf("1999T")), - //Month - //Zero padding is optional with single "M" symbol + // Month + // Zero padding is optional with single "M" symbol ParseTimestampTestCase("y M", "2007 6", Timestamp.valueOf("2007-06T")), ParseTimestampTestCase("y M", "2007 6", Timestamp.valueOf("2007-06T")), ParseTimestampTestCase("y M", "2007 06", Timestamp.valueOf("2007-06T")), - //Two "M" symbols requires zero padding + // Two "M" symbols requires zero padding ParseTimestampTestCase("y MM", "2007 06", Timestamp.valueOf("2007-06T")), - //Three "M" symbols require three letter month abbreviation + // Three "M" symbols require three letter month abbreviation ParseTimestampTestCase("y MMM", "2007 Jun", Timestamp.valueOf("2007-06T")), ParseTimestampTestCase("y MMM", "2007 jun", Timestamp.valueOf("2007-06T")), - //Four "M" symbols requires full month name + // Four "M" symbols requires full month name ParseTimestampTestCase("y MMMM", "2007 june", Timestamp.valueOf("2007-06T")), - //Day - //Zero padding is optional with a single "d" symbol + // Day + // Zero padding is optional with a single "d" symbol ParseTimestampTestCase("y M d", "2007 6 5", Timestamp.valueOf("2007-06-05T")), ParseTimestampTestCase("y M d", "2007 6 05", Timestamp.valueOf("2007-06-05T")), - //Two "d" symbols require zero padding + // Two "d" symbols require zero padding ParseTimestampTestCase("y M dd", "2007 6 05", Timestamp.valueOf("2007-06-05T")), - //Hour + // Hour ParseTimestampTestCase("y M d H", "2007 6 5 9", Timestamp.valueOf("2007-06-05T09:00-00:00")), ParseTimestampTestCase("y M d h a", "2007 6 5 9 am", Timestamp.valueOf("2007-06-05T09:00-00:00")), ParseTimestampTestCase("y M d h a", "2007 6 5 9 pm", Timestamp.valueOf("2007-06-05T21:00-00:00")), ParseTimestampTestCase("y M d H", "2007 6 5 09", Timestamp.valueOf("2007-06-05T09:00-00:00")), ParseTimestampTestCase("y M d HH", "2007 6 5 09", Timestamp.valueOf("2007-06-05T09:00-00:00")), - //Minute (same rules with 1 "m" vs "mm") + // Minute (same rules with 1 "m" vs "mm") ParseTimestampTestCase("y M d H m", "2007 6 5 9 8", Timestamp.valueOf("2007-06-05T09:08-00:00")), ParseTimestampTestCase("y M d H m", "2007 6 5 9 08", Timestamp.valueOf("2007-06-05T09:08-00:00")), ParseTimestampTestCase("y M d H mm", "2007 6 5 9 08", Timestamp.valueOf("2007-06-05T09:08-00:00")), - //Second + // Second ParseTimestampTestCase("y M d H m s", "2007 6 5 9 8 6", Timestamp.valueOf("2007-06-05T09:08:06-00:00")), ParseTimestampTestCase("y M d H m s", "2007 6 5 9 8 06", Timestamp.valueOf("2007-06-05T09:08:06-00:00")), ParseTimestampTestCase("y M d H m ss", "2007 6 5 9 8 06", Timestamp.valueOf("2007-06-05T09:08:06-00:00")), - //12-hour mode + // 12-hour mode ParseTimestampTestCase("y M d h m s a", "2007 6 5 1 2 6 PM", Timestamp.valueOf("2007-06-05T13:02:06-00:00")), ParseTimestampTestCase("y M d h m s a", "2007 6 5 1 2 6 AM", Timestamp.valueOf("2007-06-05T01:02:06-00:00")), ParseTimestampTestCase("y M d h m s a", "2007 6 5 1 2 6 pm", Timestamp.valueOf("2007-06-05T13:02:06-00:00")), ParseTimestampTestCase("y M d h m s a", "2007 6 5 1 2 6 am", Timestamp.valueOf("2007-06-05T01:02:06-00:00")), - //Second fraction, where precision of the fraction is specified by the number of S symbols. + // Second fraction, where precision of the fraction is specified by the number of S symbols. // S -> 1/10th of a second // SS -> 1/100th of a second // SSS -> 1/1000th of a second (millisecond) // ... // up to 1 nanosecond (9 'S' symbols) - //Zero padding is required in this here because the value is intended to be on the right of a decimal point. + // Zero padding is required in this here because the value is intended to be on the right of a decimal point. ParseTimestampTestCase("y M d H m s S", "2007 6 5 9 8 6 2", Timestamp.valueOf("2007-06-05T09:08:06.2-00:00")), ParseTimestampTestCase("y M d H m s SS", "2007 6 5 9 8 6 25", Timestamp.valueOf("2007-06-05T09:08:06.25-00:00")), ParseTimestampTestCase("y M d H m s SSS", "2007 6 5 9 8 6 256", Timestamp.valueOf("2007-06-05T09:08:06.256-00:00")), ParseTimestampTestCase("y M d H m s SSSSSSSSS", "2007 6 5 9 8 6 123456789", Timestamp.valueOf("2007-06-05T09:08:06.123456789-00:00")), - //Nanosecond - //Zero padding is optional + // Nanosecond + // Zero padding is optional ParseTimestampTestCase("y M d H m s n", "2007 6 5 9 8 6 100", Timestamp.valueOf("2007-06-05T09:08:06.0000001-00:00")), ParseTimestampTestCase("y M d H m s n", "2007 6 5 9 8 6 00100", Timestamp.valueOf("2007-06-05T09:08:06.0000001-00:00")), ParseTimestampTestCase("y M d H m s n", "2007 6 5 9 8 6 123456789", Timestamp.valueOf("2007-06-05T09:08:06.123456789-00:00")), - //Ion timestamp precision variants + // Ion timestamp precision variants ParseTimestampTestCase("y'T'", "1969T", Timestamp.valueOf("1969T")), ParseTimestampTestCase("y-MM'T'", "1969-07T", Timestamp.valueOf("1969-07T")), ParseTimestampTestCase("y-MM-dd'T'", "1969-07-20T", Timestamp.valueOf("1969-07-20T")), @@ -129,10 +129,10 @@ class TimestampParserTest { ParseTimestampTestCase("y-MM-dd'T'H:m:ss.SSSSSSSS", "1969-07-20T20:18:00.12345678", Timestamp.valueOf("1969-07-20T20:18:00.12345678-00:00")), ParseTimestampTestCase("y-MM-dd'T'H:m:ss.SSSSSSSSS", "1969-07-20T20:18:00.123456789", Timestamp.valueOf("1969-07-20T20:18:00.123456789-00:00")), - //Ion timestamp with explicit unknown offset. The "-00:00" at the end of the timestamp string signifies - //an unknown offset. ("+00:00" signifies UTC/GMT.) - //Note: these are tests removed because there's no way I can determine to reliably handle negative zero offset - //indicating unknown offset, even with an ugly hack. + // Ion timestamp with explicit unknown offset. The "-00:00" at the end of the timestamp string signifies + // an unknown offset. ("+00:00" signifies UTC/GMT.) + // Note: these are tests removed because there's no way I can determine to reliably handle negative zero offset + // indicating unknown offset, even with an ugly hack. // ParseTimestampTestCase("y-MM-dd'T'H:m:ssXXXXX", "1969-07-20T20:18:00-00:00", Timestamp.valueOf("1969-07-20T20:18:00-00:00")), // ParseTimestampTestCase("y M d H m XXXXX", "1969 07 20 20 01 -00:00", Timestamp.valueOf("1969-07-20T20:01-00:00")), // ParseTimestampTestCase("y M d H m XXXXX", "1969 07 20 20 01 -0", Timestamp.valueOf("1969-07-20T20:01-00:00")), @@ -140,16 +140,16 @@ class TimestampParserTest { // ParseTimestampTestCase("y M d H m XXXXX", "1969 07 20 20 01 -0000", Timestamp.valueOf("1969-07-20T20:01-00:00")), // ParseTimestampTestCase("y M d H m XXXXX", "1969 07 20 20 01 -00:00", Timestamp.valueOf("1969-07-20T20:01-00:00")), - //Known offsets (caveat: DateTimeFormatter throws exception if offset is longer than +/- 18h while Ion timestamp is +/- 24h) - //Note that DateTimeFormatter is unfortunately unable to recognize a negative zero offset as an unknown offset like Ion does. + // Known offsets (caveat: DateTimeFormatter throws exception if offset is longer than +/- 18h while Ion timestamp is +/- 24h) + // Note that DateTimeFormatter is unfortunately unable to recognize a negative zero offset as an unknown offset like Ion does. - //Capital X allows the use of "Z" to represent zero offset from GMT. + // Capital X allows the use of "Z" to represent zero offset from GMT. ParseTimestampTestCase("y M d H m X", "1969 07 20 20 01 Z", Timestamp.valueOf("1969-07-20T20:01Z")), ParseTimestampTestCase("y M d H m XX", "1969 07 20 20 01 Z", Timestamp.valueOf("1969-07-20T20:01Z")), ParseTimestampTestCase("y M d H m XXX", "1969 07 20 20 01 Z", Timestamp.valueOf("1969-07-20T20:01Z")), ParseTimestampTestCase("y M d H m XXXX", "1969 07 20 20 01 Z", Timestamp.valueOf("1969-07-20T20:01Z")), ParseTimestampTestCase("y M d H m XXXXX", "1969 07 20 20 01 Z", Timestamp.valueOf("1969-07-20T20:01Z")), - + ParseTimestampTestCase("y M d H m X", "1969 07 20 20 01 Z", Timestamp.valueOf("1969-07-20T20:01Z")), ParseTimestampTestCase("y M d H m X", "1969 07 20 20 01 +0000", Timestamp.valueOf("1969-07-20T20:01Z")), ParseTimestampTestCase("y M d H m X", "1969 07 20 20 01 -0000", Timestamp.valueOf("1969-07-20T20:01Z")), @@ -173,9 +173,9 @@ class TimestampParserTest { ParseTimestampTestCase("y M d H m xxx", "1969 07 20 20 01 +00:00", Timestamp.valueOf("1969-07-20T20:01Z")), ParseTimestampTestCase("y M d H m xxx", "1969 07 20 20 01 -00:00", Timestamp.valueOf("1969-07-20T20:01Z")), - //This might be a bug in Java's DateTimeFormatter, but lowercase 'x' cannot parse "+0000" like uppercase "X" can - //even though by all appearances, it should. - //ParseTimestampTestCase("y M d H m x", "1969 07 20 20 01 +0000", Timestamp.valueOf("1969-07-20T20:01Z")), + // This might be a bug in Java's DateTimeFormatter, but lowercase 'x' cannot parse "+0000" like uppercase "X" can + // even though by all appearances, it should. + // ParseTimestampTestCase("y M d H m x", "1969 07 20 20 01 +0000", Timestamp.valueOf("1969-07-20T20:01Z")), ParseTimestampTestCase("y M d H m x", "1969 07 20 20 01 +0100", Timestamp.valueOf("1969-07-20T20:01+01:00")), ParseTimestampTestCase("y M d H m x", "1969 07 20 20 01 +02", Timestamp.valueOf("1969-07-20T20:01+02:00")), ParseTimestampTestCase("y M d H m x", "1969 07 20 20 01 -02", Timestamp.valueOf("1969-07-20T20:01-02:00")), @@ -189,15 +189,15 @@ class TimestampParserTest { ParseTimestampTestCase("y M d H m xxxxx", "1969 07 20 20 01 -18:00", Timestamp.valueOf("1969-07-20T20:01-18:00")), ParseTimestampTestCase("y M d H m xxxxx", "1969 07 20 20 01 +18:00", Timestamp.valueOf("1969-07-20T20:01+18:00")), - //Date format with whitespace surrounding the string + // Date format with whitespace surrounding the string ParseTimestampTestCase(" y M d ", " 2007 6 5 ", Timestamp.valueOf("2007-06-05T")), ParseTimestampTestCase("'\t'y M d'\t'", "\t2007 6 5\t", Timestamp.valueOf("2007-06-05T")), - //Crazy delimiters + // Crazy delimiters ParseTimestampTestCase("'Some'y'crazy'M'delimiter'd'here'", "Some2007crazy6delimiter5here", Timestamp.valueOf("2007-06-05T")), ParseTimestampTestCase("'😸'y'😸'M'😸'd'😸'", "😸2007😸6😸5😸", Timestamp.valueOf("2007-06-05T")), - //No delimiters at all + // No delimiters at all ParseTimestampTestCase("yyyyMMddHHmmss", "20070605040302", Timestamp.valueOf("2007-06-05T04:03:02-00:00")) ) @@ -208,9 +208,9 @@ class TimestampParserTest { try { val ts = TimestampParser.parseTimestamp(testCase.timestampString, testCase.formatPattern) fail("The unexpectedly parsed timestamp was: " + ts) - } catch(ex: EvaluationException) { + } catch (ex: EvaluationException) { assertEquals(testCase.expectedErrorCode, ex.errorCode) - if(testCase.expectedCauseType == null) { + if (testCase.expectedCauseType == null) { assertNull(ex.cause) } else { assertNotNull(ex.cause) @@ -225,190 +225,216 @@ class TimestampParserTest { // the test case failed for the expected reason. The error messages are not meant to be part of the // contract we expose to the client. - if(testCase.expectedCauseMessage != null) { + if (testCase.expectedCauseMessage != null) { assertEquals(testCase.expectedCauseMessage, ex.cause!!.message) } } } } - data class ParseFailureTestCase(val formatPattern: String, - val timestampString: String, - val expectedCauseType: Type? = null, - val expectedCauseMessage: String? = null, - val expectedErrorCode: ErrorCode = ErrorCode.EVALUATOR_CUSTOM_TIMESTAMP_PARSE_FAILURE) - + data class ParseFailureTestCase( + val formatPattern: String, + val timestampString: String, + val expectedCauseType: Type? = null, + val expectedCauseMessage: String? = null, + val expectedErrorCode: ErrorCode = ErrorCode.EVALUATOR_CUSTOM_TIMESTAMP_PARSE_FAILURE + ) fun parametersForParseTimestampExceptionTest() = listOf( - //Year outside of range (year is 0) + // Year outside of range (year is 0) ParseFailureTestCase( "yyyy-MM-dd", "0000-01-01", DateTimeParseException::class.java, - "Text '0000-01-01' could not be parsed: Invalid value for YearOfEra (valid values 1 - 999999999/1000000000): 0"), + "Text '0000-01-01' could not be parsed: Invalid value for YearOfEra (valid values 1 - 999999999/1000000000): 0" + ), - //Month outside of range + // Month outside of range ParseFailureTestCase( "yyyy-MM-dd", "2017-00-01", DateTimeParseException::class.java, - "Text '2017-00-01' could not be parsed: Invalid value for MonthOfYear (valid values 1 - 12): 0"), + "Text '2017-00-01' could not be parsed: Invalid value for MonthOfYear (valid values 1 - 12): 0" + ), ParseFailureTestCase( "yyyy-MM-dd", "2017-13-01", DateTimeParseException::class.java, - "Text '2017-13-01' could not be parsed: Invalid value for MonthOfYear (valid values 1 - 12): 13"), + "Text '2017-13-01' could not be parsed: Invalid value for MonthOfYear (valid values 1 - 12): 13" + ), - //Day outside of range + // Day outside of range ParseFailureTestCase( "yyyy-MM-dd", "2017-01-00", DateTimeParseException::class.java, - "Text '2017-01-00' could not be parsed: Invalid value for DayOfMonth (valid values 1 - 28/31): 0"), + "Text '2017-01-00' could not be parsed: Invalid value for DayOfMonth (valid values 1 - 28/31): 0" + ), ParseFailureTestCase( "yyyy-MM-dd", "2017-01-32", DateTimeParseException::class.java, - "Text '2017-01-32' could not be parsed: Invalid value for DayOfMonth (valid values 1 - 28/31): 32"), + "Text '2017-01-32' could not be parsed: Invalid value for DayOfMonth (valid values 1 - 28/31): 32" + ), - //Hour outside of range (AM/PM) - //ParseFailureTestCase("2017-01-01 00:01 PM", "yyyy-MM-dd hh:mm a", ""), //In 12 hour mode, 0 is considered 12... + // Hour outside of range (AM/PM) + // ParseFailureTestCase("2017-01-01 00:01 PM", "yyyy-MM-dd hh:mm a", ""), //In 12 hour mode, 0 is considered 12... ParseFailureTestCase( "yyyy-MM-dd hh:mm a", "2017-01-01 13:01 PM", DateTimeParseException::class.java, - "Text '2017-01-01 13:01 PM' could not be parsed: Invalid value for ClockHourOfAmPm (valid values 1 - 12): 13"), + "Text '2017-01-01 13:01 PM' could not be parsed: Invalid value for ClockHourOfAmPm (valid values 1 - 12): 13" + ), - //Hour outside of range (24hr) + // Hour outside of range (24hr) ParseFailureTestCase( "yyyy-MM-dd HH:mm", "2017-01-01 24:01", DateTimeParseException::class.java, - "Text '2017-01-01 24:01' could not be parsed: Invalid value for HourOfDay (valid values 0 - 23): 24"), + "Text '2017-01-01 24:01' could not be parsed: Invalid value for HourOfDay (valid values 0 - 23): 24" + ), - //Minute outside of range + // Minute outside of range ParseFailureTestCase( "yyyy-MM-dd HH:mm", "2017-01-01 01:60", DateTimeParseException::class.java, - "Text '2017-01-01 01:60' could not be parsed: Invalid value for MinuteOfHour (valid values 0 - 59): 60"), + "Text '2017-01-01 01:60' could not be parsed: Invalid value for MinuteOfHour (valid values 0 - 59): 60" + ), - //Second outside of range + // Second outside of range ParseFailureTestCase( "yyyy-MM-dd HH:mm:ss", "2017-01-01 01:01:60", DateTimeParseException::class.java, - "Text '2017-01-01 01:01:60' could not be parsed: Invalid value for SecondOfMinute (valid values 0 - 59): 60"), + "Text '2017-01-01 01:01:60' could not be parsed: Invalid value for SecondOfMinute (valid values 0 - 59): 60" + ), - //Whitespace surrounding custom timestamp + // Whitespace surrounding custom timestamp ParseFailureTestCase( "yyyy-MM-dd", " 2017-01-01", DateTimeParseException::class.java, - "Text ' 2017-01-01' could not be parsed at index 0"), + "Text ' 2017-01-01' could not be parsed at index 0" + ), ParseFailureTestCase( "yyyy-MM-dd", "2017-01-01 ", DateTimeParseException::class.java, - "Text '2017-01-01 ' could not be parsed, unparsed text found at index 10"), + "Text '2017-01-01 ' could not be parsed, unparsed text found at index 10" + ), ParseFailureTestCase( "yyyy-MM-dd", " 2017-01-01 ", DateTimeParseException::class.java, - "Text ' 2017-01-01 ' could not be parsed at index 0"), + "Text ' 2017-01-01 ' could not be parsed at index 0" + ), ParseFailureTestCase( "yyyy-MM-dd", "2017-01-01 ", DateTimeParseException::class.java, - "Text '2017-01-01 ' could not be parsed, unparsed text found at index 10"), + "Text '2017-01-01 ' could not be parsed, unparsed text found at index 10" + ), - //Required zero padding not present (Zero padding required because 2 or more consecutive format symbols) + // Required zero padding not present (Zero padding required because 2 or more consecutive format symbols) ParseFailureTestCase( - "yyy M d H m s", //a 3 digit year doesn't seem to make sense but the DateTimeFormatter allows it. + "yyy M d H m s", // a 3 digit year doesn't seem to make sense but the DateTimeFormatter allows it. "7 6 5 9 8 6", DateTimeParseException::class.java, - "Text '7 6 5 9 8 6' could not be parsed at index 0"), + "Text '7 6 5 9 8 6' could not be parsed at index 0" + ), ParseFailureTestCase( "yyyy M d H m s", "7 6 5 9 8 6", DateTimeParseException::class.java, - "Text '7 6 5 9 8 6' could not be parsed at index 0"), + "Text '7 6 5 9 8 6' could not be parsed at index 0" + ), ParseFailureTestCase( "y MM d H m s", "7 6 5 9 8 6", DateTimeParseException::class.java, - "Text '7 6 5 9 8 6' could not be parsed at index 2"), + "Text '7 6 5 9 8 6' could not be parsed at index 2" + ), ParseFailureTestCase( "y M dd H m s", "7 6 5 9 8 6", DateTimeParseException::class.java, - "Text '7 6 5 9 8 6' could not be parsed at index 4"), + "Text '7 6 5 9 8 6' could not be parsed at index 4" + ), ParseFailureTestCase( "y M d HH m s", "7 6 5 9 8 6", DateTimeParseException::class.java, - "Text '7 6 5 9 8 6' could not be parsed at index 6"), + "Text '7 6 5 9 8 6' could not be parsed at index 6" + ), ParseFailureTestCase( "y M d H mm s", "7 6 5 9 8 6", DateTimeParseException::class.java, - "Text '7 6 5 9 8 6' could not be parsed at index 8"), + "Text '7 6 5 9 8 6' could not be parsed at index 8" + ), ParseFailureTestCase( "y M d H m ss", "7 6 5 9 8 6", DateTimeParseException::class.java, - "Text '7 6 5 9 8 6' could not be parsed at index 10"), + "Text '7 6 5 9 8 6' could not be parsed at index 10" + ), - //1 digit offset. Ideally this would not be a failure case but they appear to have left 1 digit offsets - //out of the JDK8 spec: https://bugs.openjdk.java.net/browse/JDK-8066806 + // 1 digit offset. Ideally this would not be a failure case but they appear to have left 1 digit offsets + // out of the JDK8 spec: https://bugs.openjdk.java.net/browse/JDK-8066806 ParseFailureTestCase( "y M d H m x", "1969 07 20 20 01 +2", DateTimeParseException::class.java, - "Text '1969 07 20 20 01 +2' could not be parsed at index 17"), + "Text '1969 07 20 20 01 +2' could not be parsed at index 17" + ), - //Offset exceeds allowable range - //Note: Java's DateTimeFormatter only allows +/- 18h but IonJava's Timestamp allows +/- 23:59. + // Offset exceeds allowable range + // Note: Java's DateTimeFormatter only allows +/- 18h but IonJava's Timestamp allows +/- 23:59. ParseFailureTestCase( "y M d H m x", "1969 07 20 20 01 +2400", - DateTimeParseException::class.java), - // Note: exception message differs in JDK versions later than 1.8 - //"Text '1969 07 20 20 01 +2400' could not be parsed: Zone offset not in valid range: -18:00 to +18:00"), + DateTimeParseException::class.java + ), + // Note: exception message differs in JDK versions later than 1.8 + // "Text '1969 07 20 20 01 +2400' could not be parsed: Zone offset not in valid range: -18:00 to +18:00"), ParseFailureTestCase( "yyyy M d H m x", "1969 07 20 20 01 -2400", - DateTimeParseException::class.java), - // Note: exception message differs in JDK versions later than 1.8 - //"Text '1969 07 20 20 01 -2400' could not be parsed: Zone offset not in valid range: -18:00 to +18:00"), + DateTimeParseException::class.java + ), + // Note: exception message differs in JDK versions later than 1.8 + // "Text '1969 07 20 20 01 -2400' could not be parsed: Zone offset not in valid range: -18:00 to +18:00"), - //Offset not ending on a minute boundary (error condition detected by TimestampParser) + // Offset not ending on a minute boundary (error condition detected by TimestampParser) ParseFailureTestCase( "yyyy M d H m xxxxx", "1969 07 20 20 01 +01:00:01", - expectedErrorCode = ErrorCode.EVALUATOR_PRECISION_LOSS_WHEN_PARSING_TIMESTAMP), + expectedErrorCode = ErrorCode.EVALUATOR_PRECISION_LOSS_WHEN_PARSING_TIMESTAMP + ), - //Three digit offset + // Three digit offset ParseFailureTestCase( "yyyy M d H m x", "1969 07 20 20 01 -240", - DateTimeParseException::class.java) - // Note: exception message differs in JDK versions later than 1.8 - // "Text '1969 07 20 20 01 -240' could not be parsed, unparsed text found at index 20") + DateTimeParseException::class.java ) + // Note: exception message differs in JDK versions later than 1.8 + // "Text '1969 07 20 20 01 -240' could not be parsed, unparsed text found at index 20") + ) @Test @Parameters @@ -416,7 +442,7 @@ class TimestampParserTest { try { TimestampParser.parseTimestamp("doesn't matter shouldn't get parsed anyway", testCase.pattern) fail("didn't throw") - } catch(ex: EvaluationException) { + } catch (ex: EvaluationException) { assertEquals(testCase.expectedErrorCode, ex.errorCode) assertNull(ex.cause) } @@ -436,5 +462,5 @@ class TimestampParserTest { InvalidFormatPatternTestCase("mmm", ErrorCode.EVALUATOR_INVALID_TIMESTAMP_FORMAT_PATTERN_SYMBOL), InvalidFormatPatternTestCase("sss", ErrorCode.EVALUATOR_INVALID_TIMESTAMP_FORMAT_PATTERN_SYMBOL) ) - //unterminated quote + // unterminated quote } diff --git a/lang/test/org/partiql/lang/eval/builtins/TimestampTemporalAccessorTests.kt b/lang/test/org/partiql/lang/eval/builtins/TimestampTemporalAccessorTests.kt index 35dab018af..783a86944e 100644 --- a/lang/test/org/partiql/lang/eval/builtins/TimestampTemporalAccessorTests.kt +++ b/lang/test/org/partiql/lang/eval/builtins/TimestampTemporalAccessorTests.kt @@ -1,6 +1,5 @@ package org.partiql.lang.eval.builtins - import com.amazon.ion.Timestamp import junitparams.JUnitParamsRunner import junitparams.Parameters @@ -22,7 +21,7 @@ class TimestampTemporalAccessorTests { fun createRng(): Random { val rng = Random() val seed = rng.nextLong() - System.out.println("Randomly generated seed is ${seed}. Use this to reproduce failures in dev environment.") + System.out.println("Randomly generated seed is $seed. Use this to reproduce failures in dev environment.") rng.setSeed(seed) return rng } @@ -31,12 +30,16 @@ class TimestampTemporalAccessorTests { @Parameters @TestCaseName("formatRandomTimesWithSymbol_{0}") fun formatRandomTimesWithAllDateFormatSymbolsTest(formatSymbol: String) { - System.out.println(String.format("Generating %,d random dates, formatting each of them with \"%s\" comparing the result...", - ITERATION_COUNT, formatSymbol)) + System.out.println( + String.format( + "Generating %,d random dates, formatting each of them with \"%s\" comparing the result...", + ITERATION_COUNT, formatSymbol + ) + ) val rng = createRng() - val formatter = DateTimeFormatter.ofPattern(formatSymbol); + val formatter = DateTimeFormatter.ofPattern(formatSymbol) (0..ITERATION_COUNT).toList().parallelStream().forEach { _ -> val timestamp = rng.nextTimestamp() @@ -49,11 +52,11 @@ class TimestampTemporalAccessorTests { assertEquals(formattedOffsetDateTime, formattedTimestamp) } } - fun parametersForFormatRandomTimesWithAllDateFormatSymbolsTest() : Set = TIMESTAMP_FORMAT_SYMBOLS + fun parametersForFormatRandomTimesWithAllDateFormatSymbolsTest(): Set = TIMESTAMP_FORMAT_SYMBOLS @Test fun timestampWithUnknownOffset() { - //Note: Ion spec allows representation of unknown offset with "0" + // Note: Ion spec allows representation of unknown offset with "0" val timestamp = Timestamp.forSecond(1969, 7, 20, 20, 18, 36, null) assertNull(timestamp.localOffset) @@ -84,23 +87,22 @@ class TimestampTemporalAccessorTests { * Java's DateTimeFormatter with something else later. */ fun parametersForHandleUnsupportedFormatSymbolsTest(): List = listOf( - UnsupportedSymbolTestCase("G", UnsupportedTemporalTypeException::class.java), //Era, e.g. "AD" - UnsupportedSymbolTestCase("u", UnsupportedTemporalTypeException::class.java), //Year of era, e.g. "1978"; "78", (this is always positive, even for BC values) - UnsupportedSymbolTestCase("Q", UnsupportedTemporalTypeException::class.java), //Quarter of year (1-4) - UnsupportedSymbolTestCase("q", UnsupportedTemporalTypeException::class.java), //Quarter of year e.g. "Q3" "3rd quarter", - UnsupportedSymbolTestCase("E", UnsupportedTemporalTypeException::class.java), //Day of week - UnsupportedSymbolTestCase("F", UnsupportedTemporalTypeException::class.java), //Week of month - UnsupportedSymbolTestCase("K", UnsupportedTemporalTypeException::class.java), //hour of am-pm (0-11) - UnsupportedSymbolTestCase("k", UnsupportedTemporalTypeException::class.java), //clock of am-pm (1-24) - UnsupportedSymbolTestCase("A", UnsupportedTemporalTypeException::class.java), //Millsecond of day (0-85,499,999) - UnsupportedSymbolTestCase("N", UnsupportedTemporalTypeException::class.java), //Nano of day (0-85,499,999,999,999) - UnsupportedSymbolTestCase("Y", UnsupportedTemporalTypeException::class.java), //Week based year - UnsupportedSymbolTestCase("w", UnsupportedTemporalTypeException::class.java), //Week of week based year - UnsupportedSymbolTestCase("W", UnsupportedTemporalTypeException::class.java), //Week of month - UnsupportedSymbolTestCase("e", UnsupportedTemporalTypeException::class.java), //Localized day of week (number) - UnsupportedSymbolTestCase("c", UnsupportedTemporalTypeException::class.java), //Localized day of week (week name, e.g. "Tue" or "Tuesday") - UnsupportedSymbolTestCase("VV", DateTimeException::class.java), //time zone id, e.g "America/Los_Angeles; Z; -08:30" - ion timestamp does not know timezone, only offset - UnsupportedSymbolTestCase("z", DateTimeException::class.java) //time zone name, e.g. "Pacific Standard Time" - ion timestamp does not know timezone, only offset + UnsupportedSymbolTestCase("G", UnsupportedTemporalTypeException::class.java), // Era, e.g. "AD" + UnsupportedSymbolTestCase("u", UnsupportedTemporalTypeException::class.java), // Year of era, e.g. "1978"; "78", (this is always positive, even for BC values) + UnsupportedSymbolTestCase("Q", UnsupportedTemporalTypeException::class.java), // Quarter of year (1-4) + UnsupportedSymbolTestCase("q", UnsupportedTemporalTypeException::class.java), // Quarter of year e.g. "Q3" "3rd quarter", + UnsupportedSymbolTestCase("E", UnsupportedTemporalTypeException::class.java), // Day of week + UnsupportedSymbolTestCase("F", UnsupportedTemporalTypeException::class.java), // Week of month + UnsupportedSymbolTestCase("K", UnsupportedTemporalTypeException::class.java), // hour of am-pm (0-11) + UnsupportedSymbolTestCase("k", UnsupportedTemporalTypeException::class.java), // clock of am-pm (1-24) + UnsupportedSymbolTestCase("A", UnsupportedTemporalTypeException::class.java), // Millsecond of day (0-85,499,999) + UnsupportedSymbolTestCase("N", UnsupportedTemporalTypeException::class.java), // Nano of day (0-85,499,999,999,999) + UnsupportedSymbolTestCase("Y", UnsupportedTemporalTypeException::class.java), // Week based year + UnsupportedSymbolTestCase("w", UnsupportedTemporalTypeException::class.java), // Week of week based year + UnsupportedSymbolTestCase("W", UnsupportedTemporalTypeException::class.java), // Week of month + UnsupportedSymbolTestCase("e", UnsupportedTemporalTypeException::class.java), // Localized day of week (number) + UnsupportedSymbolTestCase("c", UnsupportedTemporalTypeException::class.java), // Localized day of week (week name, e.g. "Tue" or "Tuesday") + UnsupportedSymbolTestCase("VV", DateTimeException::class.java), // time zone id, e.g "America/Los_Angeles; Z; -08:30" - ion timestamp does not know timezone, only offset + UnsupportedSymbolTestCase("z", DateTimeException::class.java) // time zone name, e.g. "Pacific Standard Time" - ion timestamp does not know timezone, only offset ) } - diff --git a/lang/test/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternParserTest.kt b/lang/test/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternParserTest.kt index b6673afff9..1ef8206d83 100644 --- a/lang/test/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternParserTest.kt +++ b/lang/test/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternParserTest.kt @@ -1,11 +1,11 @@ package org.partiql.lang.eval.builtins.timestamp -import org.partiql.lang.util.* -import junitparams.* -import org.junit.* +import junitparams.JUnitParamsRunner +import junitparams.Parameters import org.junit.Test -import org.junit.runner.* -import kotlin.test.* +import org.junit.runner.RunWith +import org.partiql.lang.util.softAssert +import kotlin.test.assertEquals @RunWith(JUnitParamsRunner::class) internal class TimestampFormatPatternParserTest { @@ -35,7 +35,7 @@ internal class TimestampFormatPatternParserTest { "mm" to listOf(MinuteOfHourPatternSymbol(TimestampFieldFormat.ZERO_PADDED_NUMBER)), "s" to listOf(SecondOfMinutePatternPatternSymbol(TimestampFieldFormat.NUMBER)), - "ss" to listOf(SecondOfMinutePatternPatternSymbol( TimestampFieldFormat.ZERO_PADDED_NUMBER)), + "ss" to listOf(SecondOfMinutePatternPatternSymbol(TimestampFieldFormat.ZERO_PADDED_NUMBER)), "x" to listOf(OffsetPatternSymbol(OffsetFieldFormat.ZERO_PADDED_HOUR)), @@ -86,7 +86,8 @@ internal class TimestampFormatPatternParserTest { SecondOfMinutePatternPatternSymbol(TimestampFieldFormat.ZERO_PADDED_NUMBER), TextItem("."), FractionOfSecondPatternSymbol(3), - OffsetPatternSymbol(OffsetFieldFormat.ZERO_PADDED_HOUR_COLON_MINUTE_OR_Z)), + OffsetPatternSymbol(OffsetFieldFormat.ZERO_PADDED_HOUR_COLON_MINUTE_OR_Z) + ), "yyyyMMddHHmmssSSSXXXXX" to listOf( YearPatternSymbol(YearFormat.FOUR_DIGIT_ZERO_PADDED), @@ -96,7 +97,8 @@ internal class TimestampFormatPatternParserTest { MinuteOfHourPatternSymbol(TimestampFieldFormat.ZERO_PADDED_NUMBER), SecondOfMinutePatternPatternSymbol(TimestampFieldFormat.ZERO_PADDED_NUMBER), FractionOfSecondPatternSymbol(3), - OffsetPatternSymbol(OffsetFieldFormat.ZERO_PADDED_HOUR_COLON_MINUTE_OR_Z)) + OffsetPatternSymbol(OffsetFieldFormat.ZERO_PADDED_HOUR_COLON_MINUTE_OR_Z) + ) ) @Test @@ -106,28 +108,27 @@ internal class TimestampFormatPatternParserTest { assertEquals(pair.second, formatPattern.formatItems) } - @Test fun mostPreciseField() { - //NOTE: we can't parameterize this unless we want to expose TimestampParser.FormatPatternPrecision as public. + // NOTE: we can't parameterize this unless we want to expose TimestampParser.FormatPatternPrecision as public. softAssert { - for((pattern, expectedResult, expectedHas2DigitYear) in parametersForExaminePatternTest) { + for ((pattern, expectedResult, expectedHas2DigitYear) in parametersForExaminePatternTest) { val result = FormatPattern.fromString(pattern) assertThat(result.leastSignificantField) - .withFailMessage("Pattern '${pattern}' was used, '${expectedResult}' was expected but result was '${result.leastSignificantField}'") + .withFailMessage("Pattern '$pattern' was used, '$expectedResult' was expected but result was '${result.leastSignificantField}'") .isEqualTo(expectedResult) assertThat(result.has2DigitYear) - .withFailMessage("has2DigitYear expected: ${expectedHas2DigitYear} but was ${result.has2DigitYear}, pattern was: '${pattern}'") + .withFailMessage("has2DigitYear expected: $expectedHas2DigitYear but was ${result.has2DigitYear}, pattern was: '$pattern'") .isEqualTo(expectedHas2DigitYear) } } } - private data class MostPreciseFieldTestCase( val pattern: String, val expectedResult: TimestampField, - val expectedHas2DigitYear: Boolean = false) + val expectedHas2DigitYear: Boolean = false + ) private val parametersForExaminePatternTest = listOf( @@ -141,47 +142,46 @@ internal class TimestampFormatPatternParserTest { MostPreciseFieldTestCase("M d, y", TimestampField.DAY_OF_MONTH), - //Delimited with "/" + // Delimited with "/" MostPreciseFieldTestCase("y/M", TimestampField.MONTH_OF_YEAR), MostPreciseFieldTestCase("y/M/d", TimestampField.DAY_OF_MONTH), MostPreciseFieldTestCase("y/M/d/s", TimestampField.SECOND_OF_MINUTE), - //delimited with "-" + // delimited with "-" MostPreciseFieldTestCase("y-M", TimestampField.MONTH_OF_YEAR), MostPreciseFieldTestCase("yy-M", TimestampField.MONTH_OF_YEAR, expectedHas2DigitYear = true), MostPreciseFieldTestCase("y-M-d", TimestampField.DAY_OF_MONTH), MostPreciseFieldTestCase("y-M-d-s", TimestampField.SECOND_OF_MINUTE), - //delimited with "':'" + // delimited with "':'" MostPreciseFieldTestCase("y:M", TimestampField.MONTH_OF_YEAR), MostPreciseFieldTestCase("yy:M", TimestampField.MONTH_OF_YEAR, expectedHas2DigitYear = true), MostPreciseFieldTestCase("y:M:d", TimestampField.DAY_OF_MONTH), MostPreciseFieldTestCase("y:M:d:s", TimestampField.SECOND_OF_MINUTE), - //delimited with "'1'" + // delimited with "'1'" MostPreciseFieldTestCase("'1'y'1'", TimestampField.YEAR), MostPreciseFieldTestCase("'1'yy'1'", TimestampField.YEAR, expectedHas2DigitYear = true), MostPreciseFieldTestCase("'1'y'1'M'1'", TimestampField.MONTH_OF_YEAR), MostPreciseFieldTestCase("'1'y'1'M'1'd'1'", TimestampField.DAY_OF_MONTH), MostPreciseFieldTestCase("'1'y'1'M'1'd'1's'1'", TimestampField.SECOND_OF_MINUTE), - //delimited with "'😸'" + // delimited with "'😸'" MostPreciseFieldTestCase("'😸'y'😸'", TimestampField.YEAR), MostPreciseFieldTestCase("'😸'yy'😸'", TimestampField.YEAR, expectedHas2DigitYear = true), MostPreciseFieldTestCase("'😸'y'😸'M'😸'", TimestampField.MONTH_OF_YEAR), MostPreciseFieldTestCase("'😸'y'😸'M'😸'd'😸'", TimestampField.DAY_OF_MONTH), MostPreciseFieldTestCase("'😸'y'😸'M'😸'd'😸's'😸'", TimestampField.SECOND_OF_MINUTE), - //delimited with "'話家'" + // delimited with "'話家'" MostPreciseFieldTestCase("'話家'y'話家'", TimestampField.YEAR), MostPreciseFieldTestCase("'話家'yy'話家'", TimestampField.YEAR, expectedHas2DigitYear = true), MostPreciseFieldTestCase("'話家'y'話家'M'話家'", TimestampField.MONTH_OF_YEAR), MostPreciseFieldTestCase("'話家'y'話家'M'話家'd'話家'", TimestampField.DAY_OF_MONTH), MostPreciseFieldTestCase("'話家'y'話家'M'話家'd'話家's'話家'", TimestampField.SECOND_OF_MINUTE), - //Valid symbols within quotes should not influence the result + // Valid symbols within quotes should not influence the result MostPreciseFieldTestCase("y'M d s'", TimestampField.YEAR), - MostPreciseFieldTestCase("y'y'", TimestampField.YEAR)) - - -} \ No newline at end of file + MostPreciseFieldTestCase("y'y'", TimestampField.YEAR) + ) +} diff --git a/lang/test/org/partiql/lang/syntax/SqlParserSelectJoinTests.kt b/lang/test/org/partiql/lang/syntax/SqlParserJoinTest.kt similarity index 87% rename from lang/test/org/partiql/lang/syntax/SqlParserSelectJoinTests.kt rename to lang/test/org/partiql/lang/syntax/SqlParserJoinTest.kt index df50cdb67b..d7afcca3ed 100644 --- a/lang/test/org/partiql/lang/syntax/SqlParserSelectJoinTests.kt +++ b/lang/test/org/partiql/lang/syntax/SqlParserJoinTest.kt @@ -20,9 +20,10 @@ class SqlParserJoinTest : SqlParserTestBase() { joinType, scan(id("stuff"), "s"), scan(id("foo"), "f"), - joinPredicate), - where = wherePredicate) - + joinPredicate + ), + where = wherePredicate + ) private fun PartiqlAst.Builder.selectWithFromSource(fromSource: PartiqlAst.FromSource): PartiqlAst.Expr = select(project = projectX, from = fromSource) @@ -40,7 +41,8 @@ class SqlParserJoinTest : SqlParserTestBase() { ) { selectWithOneJoin( joinType = PartiqlAst.JoinType.Right(), - joinPredicate = null) + joinPredicate = null + ) } @Test @@ -57,7 +59,8 @@ class SqlParserJoinTest : SqlParserTestBase() { ) { selectWithOneJoin( joinType = full(), - joinPredicate = eq(id("s"), id("f"))) + joinPredicate = eq(id("s"), id("f")) + ) } @Test @@ -82,8 +85,10 @@ class SqlParserJoinTest : SqlParserTestBase() { inner(), scan(id("A")), scan(id("B")), - eq(id("A"), id("B"))), - where = null) + eq(id("A"), id("B")) + ), + where = null + ) } @Test @@ -108,8 +113,10 @@ class SqlParserJoinTest : SqlParserTestBase() { inner(), scan(id("A")), scan(id("B")), - eq(id("A"), id("B"))), - where = null) + eq(id("A"), id("B")) + ), + where = null + ) } @Test @@ -137,13 +144,17 @@ class SqlParserJoinTest : SqlParserTestBase() { project = projectX, from = join( inner(), - join(inner(), + join( + inner(), scan(id("A")), scan(id("B")), - eq(id("A"), id("B"))), + eq(id("A"), id("B")) + ), scan(id("C")), - eq(id("B"), id("C"))), - where = null) + eq(id("B"), id("C")) + ), + where = null + ) } @Test @@ -171,13 +182,17 @@ class SqlParserJoinTest : SqlParserTestBase() { project = projectX, from = join( inner(), - join(inner(), + join( + inner(), scan(id("B")), scan(id("C")), - eq(id("B"), id("C"))), + eq(id("B"), id("C")) + ), scan(id("A")), - eq(id("A"), id("B"))), - where = null) + eq(id("A"), id("B")) + ), + where = null + ) } @Test @@ -210,16 +225,22 @@ class SqlParserJoinTest : SqlParserTestBase() { project = projectX, from = join( inner(), - join(inner(), - join(inner(), + join( + inner(), + join( + inner(), scan(id("C")), scan(id("D")), - eq(id("C"), id("D"))), + eq(id("C"), id("D")) + ), scan(id("B")), - eq(id("B"), id("C"))), + eq(id("B"), id("C")) + ), scan(id("A")), - eq(id("A"), id("B"))), - where = null) + eq(id("A"), id("B")) + ), + where = null + ) } @Test @@ -242,8 +263,10 @@ class SqlParserJoinTest : SqlParserTestBase() { inner(), scan(id("A")), scan(lit(ionInt(1))), - lit(ionBool(true))), - where = null) + lit(ionBool(true)) + ), + where = null + ) } @Test @@ -277,8 +300,10 @@ class SqlParserJoinTest : SqlParserTestBase() { where = null ) ), - lit(ionBool(true))), - where = null) + lit(ionBool(true)) + ), + where = null + ) } private val deeplyNestedJoins = PartiqlAst.build { @@ -294,15 +319,20 @@ class SqlParserJoinTest : SqlParserTestBase() { inner(), scan(id("a")), scan(id("b")), - null), + null + ), scan(id("c")), - null), + null + ), scan(id("d")), - id("e")), + id("e") + ), scan(id("f")), - null), + null + ), scan(id("g")), - id("h")) + id("h") + ) } @Test diff --git a/lang/test/org/partiql/lang/util/ArgumentsProvider.kt b/lang/test/org/partiql/lang/util/ArgumentsProviderBase.kt similarity index 100% rename from lang/test/org/partiql/lang/util/ArgumentsProvider.kt rename to lang/test/org/partiql/lang/util/ArgumentsProviderBase.kt From a22e6e0d7834fdf671ee1f8edfd8cdb62a35c9f7 Mon Sep 17 00:00:00 2001 From: Alan Cai Date: Wed, 2 Mar 2022 17:57:15 -0800 Subject: [PATCH 8/8] Adds files reformatted by ktlintFormat --- cli/src/org/partiql/cli/Cli.kt | 22 +- cli/src/org/partiql/cli/Repl.kt | 84 +- .../org/partiql/cli/functions/BaseFunction.kt | 15 +- cli/src/org/partiql/cli/functions/ReadFile.kt | 19 +- .../org/partiql/cli/functions/WriteFile.kt | 9 +- cli/src/org/partiql/cli/main.kt | 26 +- cli/test/org/partiql/cli/CliTest.kt | 16 +- cli/test/org/partiql/cli/ReplTest.kt | 59 +- .../org/partiql/cli/functions/ReadFileTest.kt | 26 +- .../partiql/cli/functions/WriteFileTest.kt | 6 +- .../examples/CustomProceduresExample.kt | 22 +- .../examples/EvaluationWithBindings.kt | 2 +- .../PartialEvaluationVisitorTransform.kt | 4 +- .../examples/SimpleExpressionEvaluation.kt | 2 +- .../org/partiql/examples/BaseExampleTest.kt | 2 +- .../partiql/examples/CSVJavaExampleTest.kt | 5 +- .../examples/CsvExprValueExampleTest.kt | 2 +- .../examples/CustomFunctionsExampleTest.kt | 2 +- .../examples/EvaluationWithBindingsTest.kt | 2 +- .../EvaluationWithLazyBindingsTest.kt | 2 +- .../examples/ParserErrorExampleTest.kt | 2 +- .../examples/PreventJoinVisitorExampleTest.kt | 2 +- .../SimpleExpressionEvaluationTest.kt | 2 +- lang/src/org/partiql/lang/CompilerPipeline.kt | 17 +- .../lang/ast/AggregateCallSiteListMeta.kt | 4 +- .../partiql/lang/ast/AstDeserialization.kt | 275 +- .../org/partiql/lang/ast/AstSerialization.kt | 136 +- .../partiql/lang/ast/ExprNodeToStatement.kt | 92 +- .../src/org/partiql/lang/ast/InternalMetas.kt | 7 +- .../org/partiql/lang/ast/IsCountStarMeta.kt | 2 +- .../org/partiql/lang/ast/IsImplictJoinMeta.kt | 4 +- .../org/partiql/lang/ast/IsIonLiteralMeta.kt | 4 +- .../partiql/lang/ast/LegacyLogicalNotMeta.kt | 1 - .../lang/ast/MemoizedMetaDeserializer.kt | 2 +- .../partiql/lang/ast/SourceLocationMeta.kt | 10 +- .../partiql/lang/ast/StatementToExprNode.kt | 111 +- lang/src/org/partiql/lang/ast/Util.kt | 9 +- lang/src/org/partiql/lang/ast/ast.kt | 156 +- lang/src/org/partiql/lang/ast/meta.kt | 21 +- .../lang/ast/passes/AstRewriterBase.kt | 647 +- .../org/partiql/lang/ast/passes/AstVisitor.kt | 2 +- .../org/partiql/lang/ast/passes/AstWalker.kt | 6 +- .../lang/ast/passes/SemanticException.kt | 3 +- .../lang/ast/passes/SemanticProblemDetails.kt | 10 +- .../lang/ast/passes/StatementRedactor.kt | 66 +- .../passes/inference/StaticTypeInferencer.kt | 9 +- lang/src/org/partiql/lang/domains/util.kt | 3 +- .../lang/errors/ErrorAndErrorContexts.kt | 11 - lang/src/org/partiql/lang/errors/ErrorCode.kt | 548 +- .../org/partiql/lang/errors/ProblemHandler.kt | 4 +- .../org/partiql/lang/eval/AnyOfCastTable.kt | 16 +- .../org/partiql/lang/eval/BaseExprValue.kt | 1 - lang/src/org/partiql/lang/eval/Bindings.kt | 34 +- .../partiql/lang/eval/BindingsExtensions.kt | 4 +- .../org/partiql/lang/eval/CompileOptions.kt | 13 +- lang/src/org/partiql/lang/eval/Environment.kt | 10 +- .../org/partiql/lang/eval/ErrorSignaler.kt | 20 +- .../partiql/lang/eval/EvaluatingCompiler.kt | 137 +- .../partiql/lang/eval/EvaluationSession.kt | 16 +- lang/src/org/partiql/lang/eval/Exceptions.kt | 65 +- .../src/org/partiql/lang/eval/ExprFunction.kt | 4 +- .../partiql/lang/eval/ExprNodeExtensions.kt | 3 +- .../partiql/lang/eval/ExprValueExtensions.kt | 147 +- .../org/partiql/lang/eval/ExprValueFactory.kt | 78 +- .../org/partiql/lang/eval/ExprValueType.kt | 37 +- lang/src/org/partiql/lang/eval/Group.kt | 2 +- .../partiql/lang/eval/GroupKeyExprValue.kt | 9 +- .../partiql/lang/eval/IonStructBindings.kt | 9 +- .../org/partiql/lang/eval/OrdinalBindings.kt | 1 - .../partiql/lang/eval/PartiqlAstExtensions.kt | 7 +- .../org/partiql/lang/eval/StructExprValue.kt | 13 +- lang/src/org/partiql/lang/eval/Thunk.kt | 76 +- .../partiql/lang/eval/binding/LocalsBinder.kt | 36 +- .../lang/eval/builtins/BuiltinFunctions.kt | 4 +- .../partiql/lang/eval/builtins/Constants.kt | 36 +- .../lang/eval/builtins/DateAddExprFunction.kt | 88 +- .../eval/builtins/DateDiffExprFunction.kt | 36 +- .../lang/eval/builtins/ExtractExprFunction.kt | 23 +- .../eval/builtins/MakeDateExprFunction.kt | 5 +- .../eval/builtins/MakeTimeExprFunction.kt | 2 +- .../lang/eval/builtins/SizeExprFunction.kt | 2 +- .../eval/builtins/SubstringExprFunction.kt | 4 +- .../lang/eval/builtins/TimestampParser.kt | 139 +- .../builtins/TimestampTemporalAccessor.kt | 19 +- .../eval/builtins/ToStringExprFunction.kt | 20 +- .../eval/builtins/ToTimestampExprFunction.kt | 10 +- .../lang/eval/builtins/TrimExprFunction.kt | 41 +- .../eval/builtins/UnixTimestampFunction.kt | 2 +- .../storedprocedure/StoredProcedure.kt | 2 +- .../eval/builtins/timestamp/FormatItem.kt | 5 +- .../eval/builtins/timestamp/FormatPattern.kt | 62 +- .../timestamp/TimestampFormatPatternLexer.kt | 46 +- .../timestamp/TimestampFormatPatternParser.kt | 55 +- .../partiql/lang/eval/io/DelimitedValues.kt | 42 +- .../lang/eval/like/CheckpointIterator.kt | 2 - .../lang/eval/like/CheckpointIteratorImpl.kt | 5 +- .../eval/like/CodepointCheckpointIterator.kt | 4 +- .../org/partiql/lang/eval/like/PatternPart.kt | 73 +- lang/src/org/partiql/lang/eval/time/Time.kt | 37 +- .../partiql/lang/eval/time/TimeExtensions.kt | 20 +- .../AggregateSupportVisitorTransform.kt | 3 +- .../visitors/CustomTypeVisitorTransform.kt | 4 +- .../FromSourceAliasVisitorTransform.kt | 8 +- .../GroupByItemAliasVisitorTransform.kt | 28 +- .../GroupByPathExpressionVisitorTransform.kt | 41 +- .../visitors/PartiqlAstSanityValidator.kt | 42 +- .../visitors/PipelinedVisitorTransform.kt | 2 +- .../SelectListItemAliasVisitorTransform.kt | 2 +- .../visitors/SelectStarVisitorTransform.kt | 18 +- .../visitors/StaticTypeVisitorTransform.kt | 73 +- .../visitors/SubstitutionVisitorTransform.kt | 6 +- .../eval/visitors/VisitorTransformBase.kt | 6 +- .../partiql/lang/mappers/StaticTypeMapper.kt | 32 +- .../lang/partiqlisl/ResourceAuthority.kt | 12 +- .../schemadiscovery/ConstraintDiscoverer.kt | 30 +- .../lang/schemadiscovery/ConstraintInferer.kt | 7 +- .../lang/schemadiscovery/ConstraintUnifier.kt | 55 +- .../DiscoveredConstraintUnifier.kt | 25 +- .../lang/schemadiscovery/IonExampleParser.kt | 4 +- .../NormalizeNullableVisitorTransform.kt | 2 +- .../SchemaInferencerFromExampleImpl.kt | 15 +- .../src/org/partiql/lang/syntax/Exceptions.kt | 38 +- .../org/partiql/lang/syntax/LexerConstants.kt | 236 +- .../org/partiql/lang/syntax/SourcePosition.kt | 1 - lang/src/org/partiql/lang/syntax/SqlLexer.kt | 110 +- lang/src/org/partiql/lang/syntax/SqlParser.kt | 428 +- lang/src/org/partiql/lang/syntax/Token.kt | 10 +- lang/src/org/partiql/lang/syntax/TokenType.kt | 1 - lang/src/org/partiql/lang/types/CustomType.kt | 3 +- .../partiql/lang/types/FunctionSignature.kt | 4 +- .../lang/types/PartiqlAstTypeExtensions.kt | 18 +- lang/src/org/partiql/lang/types/StaticType.kt | 77 +- .../partiql/lang/types/TypedOpParameter.kt | 2 +- .../org/partiql/lang/util/BindingHelpers.kt | 22 +- .../partiql/lang/util/CollectionExtensions.kt | 11 +- .../partiql/lang/util/ExprValueFormatter.kt | 21 +- .../org/partiql/lang/util/FacetExtensions.kt | 1 - .../partiql/lang/util/IonValueExtensions.kt | 75 +- .../org/partiql/lang/util/IonWriterContext.kt | 8 +- .../org/partiql/lang/util/LongExtensions.kt | 6 +- .../org/partiql/lang/util/NumberExtensions.kt | 59 +- .../partiql/lang/util/PropertyMapHelpers.kt | 11 +- .../partiql/lang/util/ThreadInterruptUtils.kt | 2 +- .../partiql/lang/util/TokenListExtensions.kt | 10 +- .../org/partiql/lang/util/WhenAsExpression.kt | 5 +- .../partiql/lang/CustomTypeTestFixtures.kt | 6 +- lang/test/org/partiql/lang/Ion.kt | 1 - lang/test/org/partiql/lang/TestBase.kt | 84 +- lang/test/org/partiql/lang/ast/AstNodeTest.kt | 148 +- .../partiql/lang/ast/IsIonLiteralMetaTest.kt | 2 +- .../partiql/lang/ast/PathComponentExprTest.kt | 9 +- .../lang/ast/SerializationRoundTripTests.kt | 6 +- .../lang/ast/SourceLocationMetaTest.kt | 2 +- .../partiql/lang/ast/VariableReferenceTest.kt | 5 +- .../lang/ast/passes/AstRewriterBaseTest.kt | 1 - .../partiql/lang/ast/passes/AstWalkerTests.kt | 74 +- .../lang/ast/passes/RewriterTestBase.kt | 11 +- .../lang/ast/passes/StatementRedactorTest.kt | 224 +- .../passes/inference/StaticTypeCastTests.kt | 9 +- .../PartiqlAstToExprNodeRoundTripTests.kt | 1 - .../partiql/lang/errors/LexerErrorsTest.kt | 39 +- .../partiql/lang/errors/ParserErrorsTest.kt | 2275 ++++--- .../lang/errors/PropertyValueMapTest.kt | 7 +- .../partiql/lang/errors/SqlExceptionTest.kt | 2 +- .../org/partiql/lang/eval/BindingsTest.kt | 30 +- .../lang/eval/CoalesceEvaluationTest.kt | 93 +- .../test/org/partiql/lang/eval/CompOptions.kt | 52 +- .../partiql/lang/eval/ErrorSignalerTests.kt | 7 +- .../EvaluatingCompilerCustomTypeCastTests.kt | 26 +- .../eval/EvaluatingCompilerDateTimeTests.kt | 43 +- .../eval/EvaluatingCompilerExceptionsTest.kt | 137 +- .../lang/eval/EvaluatingCompilerExecTests.kt | 97 +- .../eval/EvaluatingCompilerFromLetTests.kt | 82 +- .../EvaluatingCompilerFromSourceByTests.kt | 34 +- .../eval/EvaluatingCompilerGroupByTest.kt | 1025 +-- .../lang/eval/EvaluatingCompilerHavingTest.kt | 33 +- .../lang/eval/EvaluatingCompilerInTests.kt | 8 +- .../lang/eval/EvaluatingCompilerIntTest.kt | 23 +- .../lang/eval/EvaluatingCompilerIsTests.kt | 22 +- .../lang/eval/EvaluatingCompilerLimitTests.kt | 8 +- .../EvaluatingCompilerNAryIntOverflowTests.kt | 36 +- .../lang/eval/EvaluatingCompilerNAryTests.kt | 42 +- .../eval/EvaluatingCompilerOffsetTests.kt | 6 +- .../eval/EvaluatingCompilerSelectStarTests.kt | 31 +- .../EvaluatingCompilerUnknownValuesTest.kt | 177 +- .../lang/eval/EvaluationSessionTest.kt | 8 +- .../lang/eval/EvaluatorErrorTestCase.kt | 2 +- .../lang/eval/EvaluatorStaticTypeTests.kt | 23 +- .../partiql/lang/eval/EvaluatorTestBase.kt | 228 +- .../partiql/lang/eval/EvaluatorTestCase.kt | 5 +- .../partiql/lang/eval/EvaluatorTestSuite.kt | 368 +- .../org/partiql/lang/eval/EvaluatorTests.kt | 7 +- .../lang/eval/ExceptionWrappingTest.kt | 2 +- .../partiql/lang/eval/ExprValueFactoryTest.kt | 65 +- .../lang/eval/JoinWithOnConditionTest.kt | 79 +- .../partiql/lang/eval/LikePredicateTest.kt | 468 +- .../eval/NaturalExprValueComparatorsTest.kt | 32 +- .../org/partiql/lang/eval/NodeMetadataTest.kt | 38 +- .../partiql/lang/eval/NullIfEvaluationTest.kt | 7 +- .../lang/eval/QuotedIdentifierTests.kt | 37 +- .../eval/SimpleEvaluatingCompilerTests.kt | 7 +- .../partiql/lang/eval/ThunkFactoryTests.kt | 13 +- .../org/partiql/lang/eval/TypingModeTests.kt | 10 +- .../eval/builtins/InvalidArgTypeChecker.kt | 33 +- .../lang/eval/builtins/TimestampExtensions.kt | 45 +- .../lang/eval/builtins/TimestampParserTest.kt | 5 +- .../TimestampTemporalAccessorTests.kt | 4 +- .../functions/CharLengthEvaluationTest.kt | 3 +- .../CharacterLengthEvaluationTest.kt | 2 +- .../functions/ConcatEvaluationTest.kt | 72 +- .../functions/DateDiffEvaluationTest.kt | 5 +- .../functions/ExtractEvaluationTest.kt | 2 +- .../functions/MakeDateEvaluationTest.kt | 2 +- .../functions/MakeTimeEvaluationTest.kt | 2 +- .../builtins/functions/SizeEvaluationTest.kt | 2 +- .../functions/SubstringEvaluationTest.kt | 2 +- .../functions/ToStringEvaluationTest.kt | 8 +- .../functions/ToTimestampEvaluationTest.kt | 3 +- .../builtins/functions/TrimEvaluationTest.kt | 2 +- .../functions/UnixTimestampFunctionTest.kt | 2 +- .../TimestampFormatPatternLexerTest.kt | 148 +- .../ToTimestampFormatPatternValidationTest.kt | 66 +- .../eval/io/CustomExceptionHandlerTest.kt | 49 +- .../lang/eval/io/DelimitedValuesTest.kt | 42 +- .../lang/eval/like/PatternPartTests.kt | 224 +- .../org/partiql/lang/eval/time/TimeTest.kt | 6 +- .../AggregateSupportVisitorTransformTests.kt | 44 +- .../FromSourceAliasVisitorTransformTests.kt | 283 +- .../PartiqlAstSanityValidatorTests.kt | 115 +- .../SelectStarVisitorTransformTests.kt | 4 +- ...StaticTypeInferenceVisitorTransformTest.kt | 5563 +++++++++-------- .../StaticTypeVisitorTransformTests.kt | 414 +- .../SubstitutionVisitorTransformTest.kt | 3 +- .../eval/visitors/VisitorTransformTestBase.kt | 8 +- .../partiql/lang/mappers/E2EMapperTests.kt | 3841 ++++++++---- lang/test/org/partiql/lang/mockdb/MockDb.kt | 6 +- .../lang/partiqlisl/PartiQLISLSchemaTests.kt | 87 +- .../SchemaInferencerFromExampleTests.kt | 235 +- .../org/partiql/lang/syntax/SqlLexerTest.kt | 3 +- .../partiql/lang/syntax/SqlParserCastTests.kt | 12 +- .../syntax/SqlParserCorrelatedJoinTests.kt | 41 +- .../syntax/SqlParserCustomTypeCatalogTests.kt | 10 +- .../lang/syntax/SqlParserDateTimeTests.kt | 16 +- .../lang/syntax/SqlParserPrecedenceTest.kt | 1591 ++--- .../org/partiql/lang/syntax/SqlParserTest.kt | 308 +- .../partiql/lang/syntax/SqlParserTestBase.kt | 21 +- .../partiql/lang/thread/EndlessTokenList.kt | 4 +- .../lang/thread/ThreadInterruptedTests.kt | 7 +- .../org/partiql/lang/types/StaticTypeTests.kt | 95 +- .../partiql/lang/util/AssertJExtensions.kt | 2 +- .../org/partiql/lang/util/AssertionHelpers.kt | 2 +- .../org/partiql/lang/util/AstExtensions.kt | 10 +- .../partiql/lang/util/BindingsExtensions.kt | 3 - .../partiql/lang/util/CollectionsListTests.kt | 6 +- .../lang/util/CompileOptionsExtensions.kt | 1 - .../ConfigurableExprValueFormatterTest.kt | 20 +- lang/test/org/partiql/lang/util/CrossMap.kt | 2 +- .../partiql/lang/util/ErrorContextHelpers.kt | 2 - .../lang/util/ExprValueFactoryExtensions.kt | 2 +- .../partiql/lang/util/LongExtensionsTest.kt | 2 +- .../test/org/partiql/lang/util/NumbersTest.kt | 2 +- .../org/partiql/lang/util/SchemaHelpers.kt | 1 - .../partiql/lang/util/SexpAstPrettyPrinter.kt | 13 +- .../lang/util/testdsl/ExprNodeTestCase.kt | 6 +- .../partiql/lang/util/testdsl/GroupBuilder.kt | 7 +- .../lang/util/testdsl/IonResultTestCase.kt | 6 +- .../lang/util/testdsl/IonResultTestGroup.kt | 2 +- .../lang/util/testdsl/IonResultTestSuite.kt | 8 +- .../partiql/lang/util/testdsl/SuiteBuilder.kt | 8 +- .../lang/util/testdsl/TestDslMarker.kt | 2 +- pts/test/org/partiql/lang/pts/PtsTest.kt | 3 +- .../org/partiql/testscript/PtsException.kt | 6 +- .../src/org/partiql/testscript/Result.kt | 18 +- .../testscript/compiler/CompilerErrors.kt | 42 +- .../testscript/compiler/CompilerException.kt | 1 - .../testscript/evaluator/TestResult.kt | 7 +- .../testscript/extensions/FileExtensions.kt | 2 +- .../testscript/extensions/IonExtensions.kt | 6 +- .../src/org/partiql/testscript/parser/Ion.kt | 24 +- .../testscript/parser/NamedInputStream.kt | 6 +- .../partiql/testscript/parser/ParserError.kt | 55 +- .../testscript/parser/ParserException.kt | 2 +- .../testscript/parser/ScriptLocation.kt | 2 +- .../partiql/testscript/parser/ast/AstNode.kt | 34 +- .../testscript/parser/ast/MacroNodes.kt | 14 +- .../parser/ast/builders/AppendTestBuilder.kt | 10 +- .../org/partiql/testscript/SpecConverter.kt | 7 +- .../testscript/compiler/CompilerTest.kt | 655 +- .../evaluator/DefaultPtsEqualityTest.kt | 178 +- .../testscript/parser/BaseParseTests.kt | 10 +- .../testscript/parser/ParserForTests.kt | 224 +- .../testscript/parser/ParserMacroTests.kt | 297 +- .../org/partiql/testscript/parser/util.kt | 13 +- 293 files changed, 15398 insertions(+), 11671 deletions(-) diff --git a/cli/src/org/partiql/cli/Cli.kt b/cli/src/org/partiql/cli/Cli.kt index e268d08df7..0ce78201ac 100644 --- a/cli/src/org/partiql/cli/Cli.kt +++ b/cli/src/org/partiql/cli/Cli.kt @@ -32,17 +32,19 @@ import java.io.OutputStreamWriter /** * TODO builder, kdoc */ -internal class Cli(private val valueFactory: ExprValueFactory, - private val input: InputStream, - private val output: OutputStream, - private val format: OutputFormat, - private val compilerPipeline: CompilerPipeline, - private val globals: Bindings, - private val query: String) : PartiQLCommand { +internal class Cli( + private val valueFactory: ExprValueFactory, + private val input: InputStream, + private val output: OutputStream, + private val format: OutputFormat, + private val compilerPipeline: CompilerPipeline, + private val globals: Bindings, + private val query: String +) : PartiQLCommand { companion object { val ionTextWriterBuilder: IonTextWriterBuilder = IonTextWriterBuilder.standard() - .withWriteTopLevelValuesOnNewLines(true) + .withWriteTopLevelValuesOnNewLines(true) } override fun run() { @@ -51,7 +53,7 @@ internal class Cli(private val valueFactory: ExprValueFactory, val inputExprValue = valueFactory.newBag(inputIonValue) val bindings = Bindings.buildLazyBindings { // If `input` is a class of `EmptyInputStream`, it means there is no input data provided by user. - if (input !is EmptyInputStream){ addBinding("input_data") { inputExprValue } } + if (input !is EmptyInputStream) { addBinding("input_data") { inputExprValue } } }.delegate(globals) val result = compilerPipeline.compile(query).eval(EvaluationSession.build { globals(bindings) }) @@ -74,7 +76,7 @@ internal class Cli(private val valueFactory: ExprValueFactory, when (value.type) { // writes top level bags as a datagram ExprValueType.BAG -> value.iterator().forEach { v -> v.ionValue.writeTo(ionWriter) } - else -> value.ionValue.writeTo(ionWriter) + else -> value.ionValue.writeTo(ionWriter) } } } diff --git a/cli/src/org/partiql/cli/Repl.kt b/cli/src/org/partiql/cli/Repl.kt index de8d9d6c3b..1ba4ab4c7a 100644 --- a/cli/src/org/partiql/cli/Repl.kt +++ b/cli/src/org/partiql/cli/Repl.kt @@ -84,7 +84,7 @@ private class GlobalBinding(private val valueFactory: ExprValueFactory) { } Bindings.empty() -> { } // nothing to do - else -> throw IllegalArgumentException("Invalid binding type for global environment: $bindings") + else -> throw IllegalArgumentException("Invalid binding type for global environment: $bindings") } return this @@ -112,24 +112,27 @@ interface Timer { /** * TODO builder, kdoc */ -internal class Repl(private val valueFactory: ExprValueFactory, - input: InputStream, - output: OutputStream, - private val parser: Parser, - private val compiler: CompilerPipeline, - initialGlobal: Bindings, - private val timer: Timer = object : Timer {} +internal class Repl( + private val valueFactory: ExprValueFactory, + input: InputStream, + output: OutputStream, + private val parser: Parser, + private val compiler: CompilerPipeline, + initialGlobal: Bindings, + private val timer: Timer = object : Timer {} ) : PartiQLCommand { private val outputWriter = OutputStreamWriter(output, "UTF-8") private inner class ReplCommands { operator fun get(commandName: String): (String) -> ExprValue? = commands[commandName] - ?: throw IllegalArgumentException("REPL command: '$commandName' not found! " + "use '!list_commands' to see all available commands") + ?: throw IllegalArgumentException("REPL command: '$commandName' not found! " + "use '!list_commands' to see all available commands") - private val commands: Map ExprValue?> = mapOf("add_to_global_env" to ::addToGlobalEnv, - "global_env" to ::globalEnv, - "list_commands" to ::listCommands) + private val commands: Map ExprValue?> = mapOf( + "add_to_global_env" to ::addToGlobalEnv, + "global_env" to ::globalEnv, + "list_commands" to ::listCommands + ) private fun addToGlobalEnv(source: String): ExprValue? { if (source == "") { @@ -147,12 +150,14 @@ internal class Repl(private val valueFactory: ExprValueFactory, private fun listCommands(@Suppress("UNUSED_PARAMETER") source: String): ExprValue? { outputWriter.write("\n") - outputWriter.write(""" + outputWriter.write( + """ |!add_to_global_env: adds a value to the global environment |!global_env: displays the current global environment |!list_commands: print this message | - """.trimMargin()) + """.trimMargin() + ) return null } } @@ -162,8 +167,7 @@ internal class Repl(private val valueFactory: ExprValueFactory, val splitIndex = source.indexOfFirst { it == ' ' }.let { if (it == -1) { source.length - } - else { + } else { it } } @@ -213,7 +217,7 @@ internal class Repl(private val valueFactory: ExprValueFactory, private fun printPrompt() { when { buffer.isEmpty() -> outputWriter.write(PROMPT_1) - else -> outputWriter.write(PROMPT_2) + else -> outputWriter.write(PROMPT_2) } outputWriter.flush() } @@ -244,8 +248,7 @@ internal class Repl(private val valueFactory: ExprValueFactory, outputWriter.write("OK!") outputWriter.write("\n") outputWriter.flush() - } - catch (e: Exception) { + } catch (e: Exception) { e.printStackTrace(PrintWriter(outputWriter)) outputWriter.write("ERROR!") outputWriter.write("\n") @@ -253,8 +256,7 @@ internal class Repl(private val valueFactory: ExprValueFactory, return if (line == null) { ReplState.FINAL - } - else { + } else { ReplState.READY } } @@ -264,8 +266,7 @@ internal class Repl(private val valueFactory: ExprValueFactory, val locals = Bindings.buildLazyBindings { addBinding("_") { previousResult } }.delegate(globals.bindings) compiler.compile(source).eval(EvaluationSession.build { globals(locals) }) - } - else { + } else { null } } @@ -275,8 +276,7 @@ internal class Repl(private val valueFactory: ExprValueFactory, val astStatementSexp = parser.parseAstStatement(source).toIonElement() val astStatmentIonValue = astStatementSexp.asAnyElement().toIonValue(valueFactory.ion) valueFactory.newFromIonValue(astStatmentIonValue) - } - else { + } else { null } } @@ -284,56 +284,56 @@ internal class Repl(private val valueFactory: ExprValueFactory, override fun run() { while (state != ReplState.FINAL) { state = when (state) { - ReplState.INIT -> { + ReplState.INIT -> { printWelcomeMessage() printVersionNumber() ReplState.READY } - ReplState.READY -> { + ReplState.READY -> { line = readLine() when { - line == null -> ReplState.FINAL + line == null -> ReplState.FINAL arrayOf("!!", "").any { it == line } -> ReplState.EXECUTE_PARTIQL - line!!.startsWith("!") -> ReplState.READ_REPL_COMMAND - line!!.endsWith(";") -> ReplState.LAST_PARTIQL_LINE - else -> ReplState.READ_PARTIQL + line!!.startsWith("!") -> ReplState.READ_REPL_COMMAND + line!!.endsWith(";") -> ReplState.LAST_PARTIQL_LINE + else -> ReplState.READ_PARTIQL } } - ReplState.READ_PARTIQL -> { + ReplState.READ_PARTIQL -> { buffer.appendln(line) line = readLine() when { - line == null -> ReplState.FINAL - line == "" -> ReplState.EXECUTE_PARTIQL + line == null -> ReplState.FINAL + line == "" -> ReplState.EXECUTE_PARTIQL line!!.endsWith(";") -> ReplState.LAST_PARTIQL_LINE - line == "!!" -> ReplState.PARSE_PARTIQL_WITH_FILTER - else -> ReplState.READ_PARTIQL + line == "!!" -> ReplState.PARSE_PARTIQL_WITH_FILTER + else -> ReplState.READ_PARTIQL } } - ReplState.LAST_PARTIQL_LINE -> { + ReplState.LAST_PARTIQL_LINE -> { buffer.appendln(line) ReplState.EXECUTE_PARTIQL } - ReplState.READ_REPL_COMMAND -> { + ReplState.READ_REPL_COMMAND -> { buffer.appendln(line) line = readLine() when (line) { null -> ReplState.FINAL - "" -> ReplState.EXECUTE_REPL_COMMAND + "" -> ReplState.EXECUTE_REPL_COMMAND else -> ReplState.READ_REPL_COMMAND } } - ReplState.EXECUTE_PARTIQL -> executePartiQL() + ReplState.EXECUTE_PARTIQL -> executePartiQL() ReplState.PARSE_PARTIQL_WITH_FILTER -> parsePartiQLWithFilters() - ReplState.EXECUTE_REPL_COMMAND -> executeReplCommand() + ReplState.EXECUTE_REPL_COMMAND -> executeReplCommand() // shouldn't really happen - ReplState.FINAL -> ReplState.FINAL + ReplState.FINAL -> ReplState.FINAL } } } diff --git a/cli/src/org/partiql/cli/functions/BaseFunction.kt b/cli/src/org/partiql/cli/functions/BaseFunction.kt index 682ecbe2e5..1adeb040cf 100644 --- a/cli/src/org/partiql/cli/functions/BaseFunction.kt +++ b/cli/src/org/partiql/cli/functions/BaseFunction.kt @@ -20,20 +20,21 @@ import org.partiql.lang.eval.ExprValue import org.partiql.lang.eval.ExprValueFactory internal abstract class BaseFunction(val valueFactory: ExprValueFactory) : ExprFunction { - protected fun optionsStruct(requiredArity: Int, - args: List, - optionsIndex: Int = requiredArity): IonStruct = when (args.size) { - requiredArity -> valueFactory.ion.newEmptyStruct() + protected fun optionsStruct( + requiredArity: Int, + args: List, + optionsIndex: Int = requiredArity + ): IonStruct = when (args.size) { + requiredArity -> valueFactory.ion.newEmptyStruct() requiredArity + 1 -> extractOptVal(args, optionsIndex) - else -> throw IllegalArgumentException("Bad number of arguments: ${args.size}") + else -> throw IllegalArgumentException("Bad number of arguments: ${args.size}") } private fun extractOptVal(args: List, optionsIndex: Int): IonStruct { val optVal = args[optionsIndex].ionValue return when (optVal) { is IonStruct -> optVal - else -> throw IllegalArgumentException("Invalid option: $optVal") + else -> throw IllegalArgumentException("Invalid option: $optVal") } } } - diff --git a/cli/src/org/partiql/cli/functions/ReadFile.kt b/cli/src/org/partiql/cli/functions/ReadFile.kt index 57980e68a4..9e7c14199f 100644 --- a/cli/src/org/partiql/cli/functions/ReadFile.kt +++ b/cli/src/org/partiql/cli/functions/ReadFile.kt @@ -40,8 +40,8 @@ internal class ReadFile(valueFactory: ExprValueFactory) : BaseFunction(valueFact ) private fun conversionModeFor(name: String) = - ConversionMode.values().find { it.name.toLowerCase() == name } ?: - throw IllegalArgumentException( "Unknown conversion: $name") + ConversionMode.values().find { it.name.toLowerCase() == name } + ?: throw IllegalArgumentException("Unknown conversion: $name") private fun fileReadHandler(csvFormat: CSVFormat): (InputStream, IonStruct) -> ExprValue = { input, options -> val encoding = options["encoding"]?.stringValue() ?: "UTF-8" @@ -58,13 +58,13 @@ internal class ReadFile(valueFactory: ExprValueFactory) : BaseFunction(valueFact val quote = options["quote"]?.stringValue()?.first() // CSVParser library only accepts a single character as quote val csvFormatWithOptions = csvFormat.withIgnoreEmptyLines(ignoreEmptyLine) - .withIgnoreSurroundingSpaces(ignoreSurroundingSpace) - .withTrim(trim) - .let { if (hasHeader) it.withFirstRecordAsHeader() else it } - .let { if (delimiter != null) it.withDelimiter(delimiter) else it } - .let { if (record != null) it.withRecordSeparator(record) else it } - .let { if (escape != null) it.withEscape(escape) else it } - .let { if (quote != null) it.withQuote(quote) else it } + .withIgnoreSurroundingSpaces(ignoreSurroundingSpace) + .withTrim(trim) + .let { if (hasHeader) it.withFirstRecordAsHeader() else it } + .let { if (delimiter != null) it.withDelimiter(delimiter) else it } + .let { if (record != null) it.withRecordSeparator(record) else it } + .let { if (escape != null) it.withEscape(escape) else it } + .let { if (quote != null) it.withQuote(quote) else it } DelimitedValues.exprValue(valueFactory, reader, csvFormatWithOptions, conversionModeFor(conversion)) } @@ -111,4 +111,3 @@ internal class ReadFile(valueFactory: ExprValueFactory) : BaseFunction(valueFact return valueFactory.newBag(seq) } } - diff --git a/cli/src/org/partiql/cli/functions/WriteFile.kt b/cli/src/org/partiql/cli/functions/WriteFile.kt index b363b13dd1..8f5c39395b 100644 --- a/cli/src/org/partiql/cli/functions/WriteFile.kt +++ b/cli/src/org/partiql/cli/functions/WriteFile.kt @@ -60,7 +60,8 @@ internal class WriteFile(valueFactory: ExprValueFactory) : BaseFunction(valueFac private val writeHandlers = mapOf( "tsv" to delimitedWriteHandler('\t'), "csv" to delimitedWriteHandler(','), - "ion" to PRETTY_ION_WRITER) + "ion" to PRETTY_ION_WRITER + ) override fun callWithRequired(env: Environment, required: List): ExprValue { val fileName = required[0].stringValue() @@ -72,8 +73,7 @@ internal class WriteFile(valueFactory: ExprValueFactory) : BaseFunction(valueFac handler(results, it, valueFactory.ion.newEmptyStruct()) } valueFactory.newBoolean(true) - } - catch (e: Exception) { + } catch (e: Exception) { e.printStackTrace() valueFactory.newBoolean(false) } @@ -91,8 +91,7 @@ internal class WriteFile(valueFactory: ExprValueFactory) : BaseFunction(valueFac handler(results, it, options) } valueFactory.newBoolean(true) - } - catch (e: Exception) { + } catch (e: Exception) { e.printStackTrace() valueFactory.newBoolean(false) } diff --git a/cli/src/org/partiql/cli/main.kt b/cli/src/org/partiql/cli/main.kt index b31f073489..af9e447e2a 100644 --- a/cli/src/org/partiql/cli/main.kt +++ b/cli/src/org/partiql/cli/main.kt @@ -17,18 +17,18 @@ package org.partiql.cli import com.amazon.ion.system.IonSystemBuilder -import joptsimple.OptionParser import joptsimple.BuiltinHelpFormatter import joptsimple.OptionDescriptor import joptsimple.OptionException +import joptsimple.OptionParser import joptsimple.OptionSet import org.partiql.cli.functions.ReadFile import org.partiql.cli.functions.WriteFile import org.partiql.lang.CompilerPipeline -import org.partiql.lang.eval.EvaluationSession -import org.partiql.lang.eval.ExprValueFactory import org.partiql.lang.eval.Bindings +import org.partiql.lang.eval.EvaluationSession import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueFactory import org.partiql.lang.syntax.SqlParser import java.io.File import java.io.FileInputStream @@ -129,7 +129,7 @@ fun main(args: Array) = try { System.exit(0) // print help and bail } - if(optionSet.nonOptionArguments().isNotEmpty()) { + if (optionSet.nonOptionArguments().isNotEmpty()) { throw IllegalArgumentException("Non option arguments are not allowed!") } @@ -140,23 +140,19 @@ fun main(args: Array) = try { val config = compilerPipeline.compile(configSource).eval(EvaluationSession.standard()) config.bindings } - else -> Bindings.empty() + else -> Bindings.empty() } if (optionSet.has(queryOpt)) { runCli(environment, optionSet) - } - else { + } else { runRepl(environment) } -} -catch (e: OptionException) { +} catch (e: OptionException) { System.err.println("${e.message}\n") optParser.printHelpOn(System.err) exitProcess(1) - -} -catch (e: Exception) { +} catch (e: Exception) { e.printStackTrace(System.err) exitProcess(1) } @@ -168,15 +164,13 @@ private fun runRepl(environment: Bindings) { private fun runCli(environment: Bindings, optionSet: OptionSet) { val input = if (optionSet.has(inputFileOpt)) { FileInputStream(optionSet.valueOf(inputFileOpt)) - } - else { + } else { EmptyInputStream() } val output = if (optionSet.has(outputFileOpt)) { FileOutputStream(optionSet.valueOf(outputFileOpt)) - } - else { + } else { UnclosableOutputStream(System.out) } diff --git a/cli/test/org/partiql/cli/CliTest.kt b/cli/test/org/partiql/cli/CliTest.kt index 12c764167c..572c544c59 100644 --- a/cli/test/org/partiql/cli/CliTest.kt +++ b/cli/test/org/partiql/cli/CliTest.kt @@ -19,7 +19,6 @@ import junit.framework.Assert.assertEquals import org.junit.After import org.junit.Before import org.junit.Test - import org.partiql.lang.CompilerPipeline import org.partiql.lang.eval.Bindings import org.partiql.lang.eval.EvaluationException @@ -48,11 +47,13 @@ class CliTest { Files.deleteIfExists(testFile.toPath()) } - private fun makeCli(query: String, - input: String? = null, - bindings: Bindings = Bindings.empty(), - outputFormat: OutputFormat = OutputFormat.ION_TEXT, - output: OutputStream = this.output) = + private fun makeCli( + query: String, + input: String? = null, + bindings: Bindings = Bindings.empty(), + outputFormat: OutputFormat = OutputFormat.ION_TEXT, + output: OutputStream = this.output + ) = Cli( valueFactory, input?.byteInputStream(Charsets.UTF_8) ?: EmptyInputStream(), @@ -60,7 +61,8 @@ class CliTest { outputFormat, compilerPipeline, bindings, - query) + query + ) private fun Cli.runAndOutput(): String { run() diff --git a/cli/test/org/partiql/cli/ReplTest.kt b/cli/test/org/partiql/cli/ReplTest.kt index a1be9c7fd0..21202f0fb9 100644 --- a/cli/test/org/partiql/cli/ReplTest.kt +++ b/cli/test/org/partiql/cli/ReplTest.kt @@ -18,12 +18,12 @@ import com.amazon.ion.system.IonSystemBuilder import org.junit.Assert import org.junit.Ignore import org.junit.Test -import org.partiql.lang.syntax.Parser import org.partiql.lang.CompilerPipeline import org.partiql.lang.eval.Bindings import org.partiql.lang.eval.EvaluationSession import org.partiql.lang.eval.ExprValue import org.partiql.lang.eval.ExprValueFactory +import org.partiql.lang.syntax.Parser import org.partiql.lang.syntax.SqlParser import java.io.ByteArrayOutputStream import java.io.OutputStream @@ -112,7 +112,6 @@ private class ReplTester(bindings: Bindings = Bindings.empty()) { fun assertReplPrompt(expectedPromptText: String) { outputPhaser.register() - replThread.start() outputCollectorThread.start() @@ -153,7 +152,6 @@ private class ReplTester(bindings: Bindings = Bindings.empty()) { .map { it.removePrefix(PROMPT_1) } .map { it.removePrefix(PROMPT_2) } .map { line -> "$line\n" } // add back the \n removed in the split - } @Ignore("https://github.com/partiql/partiql-lang-kotlin/issues/266") @@ -162,7 +160,8 @@ class ReplTest { @Test fun singleQuery() { - ReplTester().assertReplPrompt(""" + ReplTester().assertReplPrompt( + """ #Welcome to the PartiQL REPL! #Using version: $partiqlVersionAndHash #PartiQL> 1+1 @@ -172,12 +171,14 @@ class ReplTest { #--- #OK! #PartiQL> - """.trimMargin("#")) + """.trimMargin("#") + ) } @Test fun querySemiColon() { - ReplTester().assertReplPrompt(""" + ReplTester().assertReplPrompt( + """ #Welcome to the PartiQL REPL! #Using version: $partiqlVersionAndHash #PartiQL> 1+1; @@ -186,12 +187,14 @@ class ReplTest { #--- #OK! #PartiQL> - """.trimMargin("#")) + """.trimMargin("#") + ) } @Test fun multipleQuery() { - ReplTester().assertReplPrompt(""" + ReplTester().assertReplPrompt( + """ #Welcome to the PartiQL REPL! #Using version: $partiqlVersionAndHash #PartiQL> 1 + 1 @@ -207,13 +210,14 @@ class ReplTest { #--- #OK! #PartiQL> - """.trimMargin("#")) - + """.trimMargin("#") + ) } @Test fun astWithoutMetas() { - ReplTester().assertReplPrompt(""" + ReplTester().assertReplPrompt( + """ #Welcome to the PartiQL REPL! #Using version: $partiqlVersionAndHash #PartiQL> 1 + 1 @@ -237,12 +241,14 @@ class ReplTest { #--- #OK! #PartiQL> - """.trimMargin("#")) + """.trimMargin("#") + ) } @Test fun addToGlobalEnvAndQuery() { - ReplTester().assertReplPrompt(""" + ReplTester().assertReplPrompt( + """ #Welcome to the PartiQL REPL! #Using version: $partiqlVersionAndHash #PartiQL> !add_to_global_env {'myTable': <<{'a':1}, {'a': 2}>>} @@ -274,7 +280,8 @@ class ReplTest { #--- #OK! #PartiQL> - """.trimMargin("#")) + """.trimMargin("#") + ) } @Test @@ -285,7 +292,8 @@ class ReplTest { .eval(EvaluationSession.standard()) .bindings - ReplTester(initialBindings).assertReplPrompt(""" + ReplTester(initialBindings).assertReplPrompt( + """ #Welcome to the PartiQL REPL! #Using version: $partiqlVersionAndHash #PartiQL> !global_env @@ -301,12 +309,14 @@ class ReplTest { #--- #OK! #PartiQL> - """.trimMargin("#")) + """.trimMargin("#") + ) } @Test fun dumpEmptyInitialEnv() { - ReplTester().assertReplPrompt(""" + ReplTester().assertReplPrompt( + """ #Welcome to the PartiQL REPL! #Using version: $partiqlVersionAndHash #PartiQL> !global_env @@ -316,12 +326,14 @@ class ReplTest { #--- #OK! #PartiQL> - """.trimMargin("#")) + """.trimMargin("#") + ) } @Test fun dumpEnvAfterAltering() { - ReplTester().assertReplPrompt(""" + ReplTester().assertReplPrompt( + """ #Welcome to the PartiQL REPL! #Using version: $partiqlVersionAndHash #PartiQL> !add_to_global_env {'myTable': <<{'a':1}, {'a': 2}>>} @@ -355,12 +367,14 @@ class ReplTest { #--- #OK! #PartiQL> - """.trimMargin("#")) + """.trimMargin("#") + ) } @Test fun listCommands() { - ReplTester().assertReplPrompt(""" + ReplTester().assertReplPrompt( + """ #Welcome to the PartiQL REPL! #Using version: $partiqlVersionAndHash #PartiQL> !list_commands @@ -371,6 +385,7 @@ class ReplTest { #!list_commands: print this message #OK! #PartiQL> - """.trimMargin("#")) + """.trimMargin("#") + ) } } diff --git a/cli/test/org/partiql/cli/functions/ReadFileTest.kt b/cli/test/org/partiql/cli/functions/ReadFileTest.kt index 60e80bcd2e..e6396b598b 100644 --- a/cli/test/org/partiql/cli/functions/ReadFileTest.kt +++ b/cli/test/org/partiql/cli/functions/ReadFileTest.kt @@ -35,8 +35,10 @@ class ReadFileTest { private val ion = IonSystemBuilder.standard().build() private val valueFactory = ExprValueFactory.standard(ion) private val function = ReadFile(valueFactory) - private val env = Environment(locals = Bindings.empty(), - session = EvaluationSession.standard()) + private val env = Environment( + locals = Bindings.empty(), + session = EvaluationSession.standard() + ) private fun String.exprValue() = valueFactory.newFromIonValue(ion.singleValue(this)) private fun writeFile(path: String, content: String) = File(dirPath(path)).writeText(content) @@ -58,7 +60,7 @@ class ReadFileTest { } private fun IonValue.removeAnnotations() { - when(this.type) { + when (this.type) { // Remove $partiql_missing annotation from NULL for assertions IonType.NULL -> this.removeTypeAnnotation("\$partiql_missing") IonType.DATAGRAM, @@ -111,7 +113,6 @@ class ReadFileTest { val expected = "[1, 2]" assertValues(expected, actual) - } @Test @@ -124,7 +125,6 @@ class ReadFileTest { val expected = "[{_1:\"1\",_2:\"2\"}]" assertValues(expected, actual) - } @Test @@ -137,7 +137,6 @@ class ReadFileTest { val expected = "[{_1:\"1\",_2:\"2\"}]" assertValues(expected, actual) - } @Test @@ -150,7 +149,6 @@ class ReadFileTest { val expected = "[{_1:\"1,2\",_2:\"2\"}]" assertValues(expected, actual) - } @Test @@ -163,7 +161,6 @@ class ReadFileTest { val expected = "[{_1:\"1\",_2:\"2\"},{_1:\"3\"}]" assertValues(expected, actual) - } @Test @@ -176,7 +173,6 @@ class ReadFileTest { val expected = "[{col1:\"1\",col2:\"2\"}]" assertValues(expected, actual) - } @Test @@ -189,7 +185,6 @@ class ReadFileTest { val expected = "[{_1:\"1\",_2:\"2\"}]" assertValues(expected, actual) - } @Test @@ -202,7 +197,6 @@ class ReadFileTest { val expected = "[{col1:\"1\",col2:\"2\"}]" assertValues(expected, actual) - } @Test @@ -215,7 +209,6 @@ class ReadFileTest { val expected = "[{title:\"harry potter\",category:\"book\",price:\"7.99\"}]" assertValues(expected, actual) - } @Test @@ -228,7 +221,6 @@ class ReadFileTest { val expected = "[{id:\"1\",name:\"B\\\"ob\",balance:\"10000.00\"}]" assertValues(expected, actual) - } @Test @@ -241,7 +233,6 @@ class ReadFileTest { val expected = "[{id:\"1\",name:\"Bob\",balance:\"10000.00\"}]" assertValues(expected, actual) - } @Test @@ -254,7 +245,6 @@ class ReadFileTest { val expected = "[{id:\"1\",name:\"B\\\"ob\",balance:\"10000.00\"}]" assertValues(expected, actual) - } @Test @@ -267,7 +257,6 @@ class ReadFileTest { val expected = "[{id:\"1\",name:\"Bob\",balance:\"10000.00\"}]" assertValues(expected, actual) - } @Test @@ -280,7 +269,6 @@ class ReadFileTest { val expected = "[{id:\"\"},{id:\"1\",name:\"Bob\",balance:\"10000.00\"}]" assertValues(expected, actual) - } @Test @@ -293,7 +281,6 @@ class ReadFileTest { val expected = "[{id:\" 1 \",name:\" Bob \",balance:\" 10000.00 \"}]" assertValues(expected, actual) - } @Test @@ -306,7 +293,6 @@ class ReadFileTest { val expected = "[{id:\"1\",name:\"Bob\",balance:\"10000.00\"}]" assertValues(expected, actual) - } @Test @@ -319,7 +305,6 @@ class ReadFileTest { val expected = "[{id:\"\\\"1\",name:\"Bob\",balance:\"10000.00\"}]" assertValues(expected, actual) - } @Test @@ -332,6 +317,5 @@ class ReadFileTest { val expected = "[{id:\"1,\",name:\"Bob\",balance:\"10000.00\"}]" assertValues(expected, actual) - } } diff --git a/cli/test/org/partiql/cli/functions/WriteFileTest.kt b/cli/test/org/partiql/cli/functions/WriteFileTest.kt index f86386ce32..48383e6ec6 100644 --- a/cli/test/org/partiql/cli/functions/WriteFileTest.kt +++ b/cli/test/org/partiql/cli/functions/WriteFileTest.kt @@ -29,8 +29,10 @@ class WriteFileTest { private val ion = IonSystemBuilder.standard().build() private val valueFactory = ExprValueFactory.standard(ion) private val function = WriteFile(valueFactory) - private val env = Environment(locals = Bindings.empty(), - session = EvaluationSession.standard()) + private val env = Environment( + locals = Bindings.empty(), + session = EvaluationSession.standard() + ) private fun String.exprValue() = valueFactory.newFromIonValue(ion.singleValue(this)) private fun readFile(path: String) = File(dirPath(path)).readText() diff --git a/examples/src/kotlin/org/partiql/examples/CustomProceduresExample.kt b/examples/src/kotlin/org/partiql/examples/CustomProceduresExample.kt index 9435d27e16..2a082ccad1 100644 --- a/examples/src/kotlin/org/partiql/examples/CustomProceduresExample.kt +++ b/examples/src/kotlin/org/partiql/examples/CustomProceduresExample.kt @@ -33,7 +33,7 @@ private val ion = IonSystemBuilder.standard().build() * This example demonstrates how to create a custom stored procedure, check argument types, and modify the * [EvaluationSession]. */ -class CalculateCrewMoonWeight(private val valueFactory: ExprValueFactory): StoredProcedure { +class CalculateCrewMoonWeight(private val valueFactory: ExprValueFactory) : StoredProcedure { private val MOON_GRAVITATIONAL_CONSTANT = BigDecimal(1.622 / 9.81) // [StoredProcedureSignature] takes two arguments: @@ -55,10 +55,12 @@ class CalculateCrewMoonWeight(private val valueFactory: ExprValueFactory): Store it[Property.ACTUAL_ARGUMENT_TYPES] = crewName.type.name it[Property.FUNCTION_NAME] = signature.name } - throw EvaluationException("First argument to ${signature.name} was not a string", + throw EvaluationException( + "First argument to ${signature.name} was not a string", ErrorCode.EVALUATOR_INCORRECT_TYPE_OF_ARGUMENTS_TO_PROCEDURE_CALL, errorContext, - internal = false) + internal = false + ) } // Next we check if the given `crewName` is in the [EvaluationSession]'s global bindings. If not, we return 0. @@ -99,12 +101,18 @@ class CustomProceduresExample(out: PrintStream) : Example(out) { val initialCrews = Bindings.ofMap( mapOf( "crew1" to pipeline.valueFactory.newFromIonValue( - ion.singleValue("""[ { name: "Neil", mass: 80.5 }, + ion.singleValue( + """[ { name: "Neil", mass: 80.5 }, { name: "Buzz", mass: 72.3 }, - { name: "Michael", mass: 89.9 } ]""")), + { name: "Michael", mass: 89.9 } ]""" + ) + ), "crew2" to pipeline.valueFactory.newFromIonValue( - ion.singleValue("""[ { name: "James", mass: 77.1 }, - { name: "Spock", mass: 81.6 } ]""")) + ion.singleValue( + """[ { name: "James", mass: 77.1 }, + { name: "Spock", mass: 81.6 } ]""" + ) + ) ) ) val session = EvaluationSession.build { globals(initialCrews) } diff --git a/examples/src/kotlin/org/partiql/examples/EvaluationWithBindings.kt b/examples/src/kotlin/org/partiql/examples/EvaluationWithBindings.kt index cee7a39b2c..0c33d207df 100644 --- a/examples/src/kotlin/org/partiql/examples/EvaluationWithBindings.kt +++ b/examples/src/kotlin/org/partiql/examples/EvaluationWithBindings.kt @@ -34,4 +34,4 @@ class EvaluationWithBindings(out: PrintStream) : Example(out) { val result = e.eval(session) print("result", result.toString()) } -} \ No newline at end of file +} diff --git a/examples/src/kotlin/org/partiql/examples/PartialEvaluationVisitorTransform.kt b/examples/src/kotlin/org/partiql/examples/PartialEvaluationVisitorTransform.kt index 257308260e..d1133b5655 100644 --- a/examples/src/kotlin/org/partiql/examples/PartialEvaluationVisitorTransform.kt +++ b/examples/src/kotlin/org/partiql/examples/PartialEvaluationVisitorTransform.kt @@ -11,8 +11,6 @@ import org.partiql.lang.eval.EvaluationSession import org.partiql.lang.syntax.SqlParser import java.io.PrintStream - - /** * A simple AST visitor transform that performs partial evaluation--i.e.: evaluates all sub-expressions containing only * literal operands and replaces them with the result. For example, the query `1 + 2 * 3` would be transformed to @@ -69,7 +67,7 @@ private class PartialEvaluationVisitorTransform(val ion: IonSystem, val compileO return when { transformedOps.all { it is PartiqlAst.Expr.Lit } -> { - val e = pipeline.compile(PartiqlAst.build { query(transformedNAry) } ) + val e = pipeline.compile(PartiqlAst.build { query(transformedNAry) }) val partiallyEvaluatedResult = e.eval(session) PartiqlAst.build { lit(partiallyEvaluatedResult.ionValue.toIonElement(), metas) } } diff --git a/examples/src/kotlin/org/partiql/examples/SimpleExpressionEvaluation.kt b/examples/src/kotlin/org/partiql/examples/SimpleExpressionEvaluation.kt index 2b58131eee..4616cbe68a 100644 --- a/examples/src/kotlin/org/partiql/examples/SimpleExpressionEvaluation.kt +++ b/examples/src/kotlin/org/partiql/examples/SimpleExpressionEvaluation.kt @@ -24,4 +24,4 @@ class SimpleExpressionEvaluation(out: PrintStream) : Example(out) { val result = e.eval(session) print("result", result.toString()) } -} \ No newline at end of file +} diff --git a/examples/test/org/partiql/examples/BaseExampleTest.kt b/examples/test/org/partiql/examples/BaseExampleTest.kt index c52f5003f5..cfa7ce0445 100644 --- a/examples/test/org/partiql/examples/BaseExampleTest.kt +++ b/examples/test/org/partiql/examples/BaseExampleTest.kt @@ -18,4 +18,4 @@ abstract class BaseExampleTest { Assert.assertEquals(expected, outBuffer.toString("UTF-8")) } -} \ No newline at end of file +} diff --git a/examples/test/org/partiql/examples/CSVJavaExampleTest.kt b/examples/test/org/partiql/examples/CSVJavaExampleTest.kt index 5c6d52fd52..de93dc3c25 100644 --- a/examples/test/org/partiql/examples/CSVJavaExampleTest.kt +++ b/examples/test/org/partiql/examples/CSVJavaExampleTest.kt @@ -1,7 +1,6 @@ -package org.partiql.examples; +package org.partiql.examples import org.partiql.examples.util.Example - import java.io.PrintStream class CSVJavaExampleTest : BaseExampleTest() { @@ -30,4 +29,4 @@ class CSVJavaExampleTest : BaseExampleTest() { | >> | """.trimMargin() -} \ No newline at end of file +} diff --git a/examples/test/org/partiql/examples/CsvExprValueExampleTest.kt b/examples/test/org/partiql/examples/CsvExprValueExampleTest.kt index fa980c264a..feae4fe981 100644 --- a/examples/test/org/partiql/examples/CsvExprValueExampleTest.kt +++ b/examples/test/org/partiql/examples/CsvExprValueExampleTest.kt @@ -33,4 +33,4 @@ class CsvExprValueExampleTest : BaseExampleTest() { | >> | """.trimMargin() -} \ No newline at end of file +} diff --git a/examples/test/org/partiql/examples/CustomFunctionsExampleTest.kt b/examples/test/org/partiql/examples/CustomFunctionsExampleTest.kt index d95f120de0..3612a1e183 100644 --- a/examples/test/org/partiql/examples/CustomFunctionsExampleTest.kt +++ b/examples/test/org/partiql/examples/CustomFunctionsExampleTest.kt @@ -197,4 +197,4 @@ class CustomFunctionsExampleTest : BaseExampleTest() { | ] | """.trimMargin() -} \ No newline at end of file +} diff --git a/examples/test/org/partiql/examples/EvaluationWithBindingsTest.kt b/examples/test/org/partiql/examples/EvaluationWithBindingsTest.kt index 802aef6893..36aae44282 100644 --- a/examples/test/org/partiql/examples/EvaluationWithBindingsTest.kt +++ b/examples/test/org/partiql/examples/EvaluationWithBindingsTest.kt @@ -15,4 +15,4 @@ class EvaluationWithBindingsTest : BaseExampleTest() { | 'Hello, Homer Simpson' | """.trimMargin() -} \ No newline at end of file +} diff --git a/examples/test/org/partiql/examples/EvaluationWithLazyBindingsTest.kt b/examples/test/org/partiql/examples/EvaluationWithLazyBindingsTest.kt index 1c6f25801e..ad6411985b 100644 --- a/examples/test/org/partiql/examples/EvaluationWithLazyBindingsTest.kt +++ b/examples/test/org/partiql/examples/EvaluationWithLazyBindingsTest.kt @@ -19,4 +19,4 @@ class EvaluationWithLazyBindingsTest : BaseExampleTest() { | >> | """.trimMargin() -} \ No newline at end of file +} diff --git a/examples/test/org/partiql/examples/ParserErrorExampleTest.kt b/examples/test/org/partiql/examples/ParserErrorExampleTest.kt index 027ed676a5..660feeedf4 100644 --- a/examples/test/org/partiql/examples/ParserErrorExampleTest.kt +++ b/examples/test/org/partiql/examples/ParserErrorExampleTest.kt @@ -20,4 +20,4 @@ class ParserErrorExampleTest : BaseExampleTest() { | '+' | """.trimMargin() -} \ No newline at end of file +} diff --git a/examples/test/org/partiql/examples/PreventJoinVisitorExampleTest.kt b/examples/test/org/partiql/examples/PreventJoinVisitorExampleTest.kt index 796173d884..5f9326a32d 100644 --- a/examples/test/org/partiql/examples/PreventJoinVisitorExampleTest.kt +++ b/examples/test/org/partiql/examples/PreventJoinVisitorExampleTest.kt @@ -17,4 +17,4 @@ class PreventJoinVisitorExampleTest : BaseExampleTest() { | true | """.trimMargin() -} \ No newline at end of file +} diff --git a/examples/test/org/partiql/examples/SimpleExpressionEvaluationTest.kt b/examples/test/org/partiql/examples/SimpleExpressionEvaluationTest.kt index d9407269b5..91ea111d6a 100644 --- a/examples/test/org/partiql/examples/SimpleExpressionEvaluationTest.kt +++ b/examples/test/org/partiql/examples/SimpleExpressionEvaluationTest.kt @@ -13,4 +13,4 @@ class SimpleExpressionEvaluationTest : BaseExampleTest() { | 2 | """.trimMargin() -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/CompilerPipeline.kt b/lang/src/org/partiql/lang/CompilerPipeline.kt index 48d7087fea..98c6a2325f 100644 --- a/lang/src/org/partiql/lang/CompilerPipeline.kt +++ b/lang/src/org/partiql/lang/CompilerPipeline.kt @@ -74,7 +74,7 @@ typealias ProcessingStep = (PartiqlAst.Statement, StepContext) -> PartiqlAst.Sta * used to compile queries concurrently. If used in a multithreaded application, use one instance of [CompilerPipeline] * per thread. */ -interface CompilerPipeline { +interface CompilerPipeline { val valueFactory: ExprValueFactory /** The compilation options. */ @@ -203,7 +203,7 @@ interface CompilerPipeline { fun build(): CompilerPipeline { val compileOptionsToUse = compileOptions ?: CompileOptions.standard() - when(compileOptionsToUse.thunkReturnTypeAssertions) { + when (compileOptionsToUse.thunkReturnTypeAssertions) { ThunkReturnTypeAssertions.DISABLED -> { /* intentionally blank */ } ThunkReturnTypeAssertions.ENABLED -> { check(this.globalTypeBindings != null) { @@ -254,7 +254,8 @@ internal class CompilerPipelineImpl( } }.flatten().toMap(), procedures, - compileOptions) + compileOptions + ) override fun compile(query: String): Expression = compile(parser.parseAstStatement(query)) @@ -277,15 +278,17 @@ internal class CompilerPipelineImpl( StaticTypeInferenceVisitorTransform( globalBindings = globalTypeBindings, customFunctionSignatures = functions.values.map { it.signature }, - customTypedOpParameters = customDataTypes.map { customType -> + customTypedOpParameters = customDataTypes.map { customType -> (customType.aliases + customType.name).map { alias -> Pair(alias.toLowerCase(), customType.typedOpParameter) } }.flatten().toMap() - )) + ) + ) } } - ).flatten().toTypedArray()) + ).flatten().toTypedArray() + ) val queryToCompile = transforms.transformStatement(preProcessedQuery) @@ -293,5 +296,5 @@ internal class CompilerPipelineImpl( } internal fun executePreProcessingSteps(query: PartiqlAst.Statement, context: StepContext) = preProcessingSteps - .interruptibleFold(query) { currentAstStatement, step -> step(currentAstStatement, context) } + .interruptibleFold(query) { currentAstStatement, step -> step(currentAstStatement, context) } } diff --git a/lang/src/org/partiql/lang/ast/AggregateCallSiteListMeta.kt b/lang/src/org/partiql/lang/ast/AggregateCallSiteListMeta.kt index 35f473f046..a2f61d5fc9 100644 --- a/lang/src/org/partiql/lang/ast/AggregateCallSiteListMeta.kt +++ b/lang/src/org/partiql/lang/ast/AggregateCallSiteListMeta.kt @@ -20,7 +20,7 @@ import org.partiql.lang.domains.PartiqlAst /** * Contains references to each of the aggregate call-sites in a given [Select]. */ -data class AggregateCallSiteListMeta(val aggregateCallSites: List): Meta { +data class AggregateCallSiteListMeta(val aggregateCallSites: List) : Meta { override val tag = TAG override val shouldSerialize: Boolean @@ -32,6 +32,6 @@ data class AggregateCallSiteListMeta(val aggregateCallSites: List, arityFrom: Int, arityTo: Int = arityFrom) - : this(text, versions.map { Pair(it, SexpValidationRules(arityFrom, arityTo)) }.toMap()) + constructor(text: String, versions: Array, arityFrom: Int, arityTo: Int = arityFrom) : + this(text, versions.map { Pair(it, SexpValidationRules(arityFrom, arityTo)) }.toMap()) } /** @@ -162,11 +162,11 @@ private enum class NodeTag(val definition: TagDefinition) { NARY_CALL(TagDefinition("call", 1, Int.MAX_VALUE)), NARY_STRING_CONCAT(TagDefinition("||", AstVersion.V0, 1, Int.MAX_VALUE)), NARY_UNION(TagDefinition("union", 2, Int.MAX_VALUE)), - NARY_UNION_ALL(TagDefinition("union_all", AstVersion.V0,2, Int.MAX_VALUE)), + NARY_UNION_ALL(TagDefinition("union_all", AstVersion.V0, 2, Int.MAX_VALUE)), NARY_EXCEPT(TagDefinition("except", 2, Int.MAX_VALUE)), - NARY_EXCEPT_ALL(TagDefinition("except_all", AstVersion.V0,2, Int.MAX_VALUE)), + NARY_EXCEPT_ALL(TagDefinition("except_all", AstVersion.V0, 2, Int.MAX_VALUE)), NARY_INTERSECT(TagDefinition("intersect", 2, Int.MAX_VALUE)), - NARY_INTERSECT_ALL(TagDefinition("intersect_all", AstVersion.V0,2, Int.MAX_VALUE)), + NARY_INTERSECT_ALL(TagDefinition("intersect_all", AstVersion.V0, 2, Int.MAX_VALUE)), TYPED_IS(TagDefinition("is", AstVersion.V0, 2)), TYPED_IS_NOT(TagDefinition("is_not", AstVersion.V0, 2)), @@ -194,7 +194,7 @@ private enum class NodeTag(val definition: TagDefinition) { SET(TagDefinition("set", 1, Int.MAX_VALUE)), REMOVE(TagDefinition("remove", 1, 1)), DELETE(TagDefinition("delete", 0, 0)), - ASSIGNMENT(TagDefinition("assignment", 2 , 2)), + ASSIGNMENT(TagDefinition("assignment", 2, 2)), // Only valid within (select ...) PROJECT(TagDefinition("project", 1)), @@ -239,7 +239,7 @@ private enum class NodeTag(val definition: TagDefinition) { // Only valided in a typed expression i.e. the second argument of (is ...) or (cast ...)) TYPE(TagDefinition("type", AstVersion.V0, 1, 3)), - //Only valid as path components... + // Only valid as path components... CASE_INSENSITIVE(TagDefinition("case_insensitive", 0, 1)), CASE_SENSITIVE(TagDefinition("case_sensitive", 0, 1)), @@ -263,7 +263,8 @@ class AstDeserializerBuilder(val ion: IonSystem, val customTypes: List deserializeIonValueMetaOrTerm(targetValue: IonValue, deserializeNode: (IonValue, MetaContainer) -> T): T = when (targetValue) { // If it's not an sexp, it can't be a meta node. - //extract meta + // extract meta !is IonSexp -> deserializeNode(targetValue, emptyMetaContainer) else -> when (targetValue.tagText) { // Expression has metas -- extract source location information and pass that to [block]. "meta" -> { - //extract meta + // extract meta val struct = targetValue.args[1].asIonStruct() val lineNum = struct.field("line").longValue() val charOffset = struct.field("column").longValue() @@ -351,14 +352,13 @@ internal class AstDeserializerInternal( val expSexp = targetValue.args[0] deserializeNode(expSexp, metaContainerOf(locationMeta)) } - //Expression not wrapped in `meta` and therefore has no metas, pass empty MetaContainer to [deserializeNode]. + // Expression not wrapped in `meta` and therefore has no metas, pass empty MetaContainer to [deserializeNode]. else -> { deserializeNode(targetValue, emptyMetaContainer) } } } - private fun List.deserializeAllExprNodes(): List = map { deserializeExprNode(it.asIonSexp()) } /** @@ -368,65 +368,65 @@ internal class AstDeserializerInternal( checkThreadInterrupted() return deserializeSexpMetaOrTerm(metaOrTermOrExp) { target, metas -> val nodeTag = target.nodeTag - val targetArgs = target.args //args is an extension property--call it once for efficiency - //.toList() forces immutability + val targetArgs = target.args // args is an extension property--call it once for efficiency + // .toList() forces immutability when (nodeTag) { - NodeTag.LIT -> deserializeLit(targetArgs, metas) - NodeTag.MISSING -> deserializeMissing(metas) - NodeTag.ID -> deserializeId(targetArgs, metas) - NodeTag.SCOPE_QUALIFIER -> deserializeScopeQualifier(targetArgs, metas) - NodeTag.SELECT -> deserializeSelect(target, metas) - NodeTag.PIVOT -> deserializeSelect(target, metas) - NodeTag.DATA_MANIPULATION -> deserializeDataManipulation(target, metas) - NodeTag.PATH -> deserializePath(target) - NodeTag.CALL_AGG -> deserializeCallAgg(targetArgs, metas) - NodeTag.CALL_AGG_WILDCARD -> deserializeCallAggWildcard(targetArgs, metas) - NodeTag.STRUCT -> deserializeStruct(targetArgs, metas) - NodeTag.PARAMETER -> Parameter(target[1].asIonInt().intValue(), metas) + NodeTag.LIT -> deserializeLit(targetArgs, metas) + NodeTag.MISSING -> deserializeMissing(metas) + NodeTag.ID -> deserializeId(targetArgs, metas) + NodeTag.SCOPE_QUALIFIER -> deserializeScopeQualifier(targetArgs, metas) + NodeTag.SELECT -> deserializeSelect(target, metas) + NodeTag.PIVOT -> deserializeSelect(target, metas) + NodeTag.DATA_MANIPULATION -> deserializeDataManipulation(target, metas) + NodeTag.PATH -> deserializePath(target) + NodeTag.CALL_AGG -> deserializeCallAgg(targetArgs, metas) + NodeTag.CALL_AGG_WILDCARD -> deserializeCallAggWildcard(targetArgs, metas) + NodeTag.STRUCT -> deserializeStruct(targetArgs, metas) + NodeTag.PARAMETER -> Parameter(target[1].asIonInt().intValue(), metas) NodeTag.LIST, NodeTag.BAG, NodeTag.SEXP -> deserializeSeq(nodeTag, targetArgs, metas) - NodeTag.SIMPLE_CASE -> deserializeSimpleCase(target, metas) - NodeTag.SEARCHED_CASE -> deserializeSearchedCase(target, metas) - NodeTag.NARY_NOT -> deserializeNAryNot(targetArgs, metas) - NodeTag.NARY_ADD -> deserializeNAryAdd(targetArgs, metas) - NodeTag.NARY_SUB -> deserializeNArySub(targetArgs, metas) - NodeTag.NARY_MUL -> deserializeNAryMul(targetArgs, metas) - NodeTag.NARY_DIV -> deserializeNAryDiv(targetArgs, metas) - NodeTag.NARY_MOD -> deserializeNAryMod(targetArgs, metas) - NodeTag.NARY_GT -> deserializeNAryGt(targetArgs, metas) - NodeTag.NARY_GTE -> deserializeNAryGte(targetArgs, metas) - NodeTag.NARY_LT -> deserializeNAryLt(targetArgs, metas) - NodeTag.NARY_LTE -> deserializeNAryLte(targetArgs, metas) - NodeTag.NARY_EQ -> deserializeNAryEq(targetArgs, metas) - NodeTag.NARY_NE -> deserializeNAryNe(targetArgs, metas) - NodeTag.NARY_IN -> deserializeNAryIn(targetArgs, metas) - NodeTag.NARY_NOT_IN -> deserializeNAryNotIn(targetArgs, metas) - NodeTag.NARY_AND -> deserializeNAryAnd(targetArgs, metas) - NodeTag.NARY_OR -> deserializeNAryOr(targetArgs, metas) - NodeTag.NARY_LIKE -> deserializeNAryLike(targetArgs, metas) - NodeTag.NARY_NOT_LIKE -> deserializeNAryNotLlike(targetArgs, metas) - NodeTag.NARY_BETWEEN -> deserializeNAryBetween(targetArgs, metas) - NodeTag.NARY_NOT_BETWEEN -> deserializeNAryNotBetween(targetArgs, metas) + NodeTag.SIMPLE_CASE -> deserializeSimpleCase(target, metas) + NodeTag.SEARCHED_CASE -> deserializeSearchedCase(target, metas) + NodeTag.NARY_NOT -> deserializeNAryNot(targetArgs, metas) + NodeTag.NARY_ADD -> deserializeNAryAdd(targetArgs, metas) + NodeTag.NARY_SUB -> deserializeNArySub(targetArgs, metas) + NodeTag.NARY_MUL -> deserializeNAryMul(targetArgs, metas) + NodeTag.NARY_DIV -> deserializeNAryDiv(targetArgs, metas) + NodeTag.NARY_MOD -> deserializeNAryMod(targetArgs, metas) + NodeTag.NARY_GT -> deserializeNAryGt(targetArgs, metas) + NodeTag.NARY_GTE -> deserializeNAryGte(targetArgs, metas) + NodeTag.NARY_LT -> deserializeNAryLt(targetArgs, metas) + NodeTag.NARY_LTE -> deserializeNAryLte(targetArgs, metas) + NodeTag.NARY_EQ -> deserializeNAryEq(targetArgs, metas) + NodeTag.NARY_NE -> deserializeNAryNe(targetArgs, metas) + NodeTag.NARY_IN -> deserializeNAryIn(targetArgs, metas) + NodeTag.NARY_NOT_IN -> deserializeNAryNotIn(targetArgs, metas) + NodeTag.NARY_AND -> deserializeNAryAnd(targetArgs, metas) + NodeTag.NARY_OR -> deserializeNAryOr(targetArgs, metas) + NodeTag.NARY_LIKE -> deserializeNAryLike(targetArgs, metas) + NodeTag.NARY_NOT_LIKE -> deserializeNAryNotLlike(targetArgs, metas) + NodeTag.NARY_BETWEEN -> deserializeNAryBetween(targetArgs, metas) + NodeTag.NARY_NOT_BETWEEN -> deserializeNAryNotBetween(targetArgs, metas) NodeTag.NARY_STRING_CONCAT -> deserializeNAryStringConcat(targetArgs, metas) - NodeTag.NARY_CALL -> deserializeNAryCall(targetArgs, metas) - NodeTag.NARY_UNION -> deserializeNAryUnion(targetArgs, metas) - NodeTag.NARY_UNION_ALL -> deserializeNAryUnionAll(targetArgs, metas) - NodeTag.NARY_EXCEPT -> deserializeNAryExcept(targetArgs, metas) - NodeTag.NARY_EXCEPT_ALL -> deserializeNAryExceptAll(targetArgs, metas) - NodeTag.NARY_INTERSECT -> deserializeNAryIntersect(targetArgs, metas) + NodeTag.NARY_CALL -> deserializeNAryCall(targetArgs, metas) + NodeTag.NARY_UNION -> deserializeNAryUnion(targetArgs, metas) + NodeTag.NARY_UNION_ALL -> deserializeNAryUnionAll(targetArgs, metas) + NodeTag.NARY_EXCEPT -> deserializeNAryExcept(targetArgs, metas) + NodeTag.NARY_EXCEPT_ALL -> deserializeNAryExceptAll(targetArgs, metas) + NodeTag.NARY_INTERSECT -> deserializeNAryIntersect(targetArgs, metas) NodeTag.NARY_INTERSECT_ALL -> deserializeNAryIntersectAll(targetArgs, metas) - NodeTag.TYPED_IS -> deserializeTypedIs(targetArgs, metas) - NodeTag.TYPED_IS_NOT -> deserializeTypedIsNot(targetArgs, metas) - NodeTag.TYPED_CAST -> deserializeTypedCast(targetArgs, metas) - NodeTag.NULLIF -> deserializeNullIf(targetArgs, metas) - NodeTag.COALESCE -> deserializeCoalesce(targetArgs, metas) - NodeTag.CREATE -> deserializeCreateV0(targetArgs, metas) - NodeTag.DROP_INDEX -> deserializeDropIndexV0(targetArgs, metas) - NodeTag.DROP_TABLE -> deserializeDropTableV0(targetArgs, metas) + NodeTag.TYPED_IS -> deserializeTypedIs(targetArgs, metas) + NodeTag.TYPED_IS_NOT -> deserializeTypedIsNot(targetArgs, metas) + NodeTag.TYPED_CAST -> deserializeTypedCast(targetArgs, metas) + NodeTag.NULLIF -> deserializeNullIf(targetArgs, metas) + NodeTag.COALESCE -> deserializeCoalesce(targetArgs, metas) + NodeTag.CREATE -> deserializeCreateV0(targetArgs, metas) + NodeTag.DROP_INDEX -> deserializeDropIndexV0(targetArgs, metas) + NodeTag.DROP_TABLE -> deserializeDropTableV0(targetArgs, metas) // These are handled elsewhere NodeTag.META, - // These can't be directly deserialized to ExprNode instances. + // These can't be directly deserialized to ExprNode instances. NodeTag.INDEX, NodeTag.TABLE, NodeTag.KEYS, @@ -479,8 +479,8 @@ internal class AstDeserializerInternal( targetArgs[0].asIonSymbol().stringValue(), CaseSensitivity.fromSymbol(targetArgs[1].asIonSymbol().stringValue()), ScopeQualifier.UNQUALIFIED, - metas) - + metas + ) private fun deserializeScopeQualifier(targetArgs: List, metas: MetaContainer): VariableReference { val qualifiedSexp = targetArgs[0].asIonSexp() @@ -488,7 +488,8 @@ internal class AstDeserializerInternal( qualifiedSexp.args[0].asIonSymbol().stringValue()!!, CaseSensitivity.fromSymbol(qualifiedSexp.args[1].asIonSymbol().stringValue()), ScopeQualifier.LEXICAL, - metas) + metas + ) } private fun deserializeCallAgg(targetArgs: List, metas: MetaContainer) = @@ -496,10 +497,11 @@ internal class AstDeserializerInternal( VariableReference( targetArgs[0].asIonSymbol().stringValue(), CaseSensitivity.INSENSITIVE, - ScopeQualifier.UNQUALIFIED, emptyMetaContainer), + ScopeQualifier.UNQUALIFIED, emptyMetaContainer + ), SetQuantifier.valueOf(targetArgs[1].asIonSymbol().toString().toUpperCase()), - deserializeExprNode(targetArgs[2].asIonSexp()), metas) - + deserializeExprNode(targetArgs[2].asIonSexp()), metas + ) private fun deserializeCallAggWildcard(targetArgs: List, metas: MetaContainer): CallAgg { if (targetArgs[0].asIonSymbol().stringValue() != "count") { @@ -526,7 +528,9 @@ internal class AstDeserializerInternal( pairs.add( StructField( deserializeExprNode(targetArgs[keyIndex].asIonSexp()), - deserializeExprNode(targetArgs[keyIndex + 1].asIonSexp()))) + deserializeExprNode(targetArgs[keyIndex + 1].asIonSexp()) + ) + ) } return Struct(pairs.toList(), metas) } @@ -616,7 +620,8 @@ internal class AstDeserializerInternal( ): NAry = NAry( NAryOp.NOT, listOf(NAry(NAryOp.IN, targetArgs.deserializeAllExprNodes(), metas)), - metas + metaContainerOf(LegacyLogicalNotMeta.instance)) + metas + metaContainerOf(LegacyLogicalNotMeta.instance) + ) private fun deserializeNAryIn( targetArgs: List, @@ -629,7 +634,8 @@ internal class AstDeserializerInternal( ): NAry = NAry( NAryOp.NOT, listOf(NAry(NAryOp.LIKE, targetArgs.deserializeAllExprNodes(), metas)), - metas + metaContainerOf(LegacyLogicalNotMeta.instance)) + metas + metaContainerOf(LegacyLogicalNotMeta.instance) + ) private fun deserializeNAryLike( targetArgs: List, @@ -644,7 +650,8 @@ internal class AstDeserializerInternal( ): NAry = NAry( NAryOp.NOT, listOf(NAry(NAryOp.BETWEEN, targetArgs.deserializeAllExprNodes(), metas)), - metas + metaContainerOf(LegacyLogicalNotMeta.instance)) + metas + metaContainerOf(LegacyLogicalNotMeta.instance) + ) private fun deserializeNAryBetween( targetArgs: List, @@ -661,13 +668,14 @@ internal class AstDeserializerInternal( targetArgs[0].asIonSymbol().stringValue(), CaseSensitivity.INSENSITIVE, ScopeQualifier.UNQUALIFIED, - emptyMetaContainer) + emptyMetaContainer + ) val argExprNodes = targetArgs.drop(1).deserializeAllExprNodes() return when (functionReference.id) { - "null_if" -> deserializeNullIf(targetArgs.drop(1), metas) + "null_if" -> deserializeNullIf(targetArgs.drop(1), metas) "coalesce" -> deserializeCoalesce(targetArgs.drop(1), metas) - else -> NAry(NAryOp.CALL, listOf(functionReference) + argExprNodes, metas) + else -> NAry(NAryOp.CALL, listOf(functionReference) + argExprNodes, metas) } } @@ -714,7 +722,7 @@ internal class AstDeserializerInternal( val id = targetArgs[0].stringValue() val target = targetArgs[1].asIonSexp() val args = target.args - return when(target.nodeTag) { + return when (target.nodeTag) { NodeTag.TABLE -> { val tableName = id ?: err("Table name must be specified") CreateTable(tableName, metas) @@ -762,8 +770,8 @@ internal class AstDeserializerInternal( TypedOp.IS, deserializeExprNode(targetArgs[0].asIonSexp()), deserializeDataType(targetArgs[1]), - metas) - + metas + ) private fun deserializeTypedIsNot( targetArgs: List, @@ -775,8 +783,11 @@ internal class AstDeserializerInternal( TypedOp.IS, deserializeExprNode(targetArgs[0].asIonSexp()), deserializeDataType(targetArgs[1]), - metas)), - metas + metaContainerOf(LegacyLogicalNotMeta.instance)) + metas + ) + ), + metas + metaContainerOf(LegacyLogicalNotMeta.instance) + ) private fun deserializeTypedCast( targetArgs: List, @@ -785,7 +796,8 @@ internal class AstDeserializerInternal( TypedOp.CAST, deserializeExprNode(targetArgs[0].asIonSexp()), deserializeDataType(targetArgs[1]), - metas) + metas + ) private fun deserializeNullIf(targetArgs: List, metas: MetaContainer): ExprNode { val args = targetArgs.map { it.asIonSexp() } @@ -806,7 +818,7 @@ internal class AstDeserializerInternal( private fun deserializeDataManipulation(target: IonSexp, metas: MetaContainer): ExprNode { val args = target.args.toListOfIonSexp() - val dmlOp = when(args[0].nodeTag) { + val dmlOp = when (args[0].nodeTag) { else -> deserializeDataManipulationOperation(args[0]) } val children = args.drop(1).toListOfIonSexp().map { Pair(it.nodeTag, it) }.toMap() @@ -824,7 +836,8 @@ internal class AstDeserializerInternal( from = from, where = where, returning = null, // V0 does not support the RETURNING clause. - metas = metas) + metas = metas + ) } private fun deserializeDataManipulationOperation(target: IonSexp): List { @@ -835,12 +848,14 @@ internal class AstDeserializerInternal( } NodeTag.INSERT_VALUE -> { val args = target.args - listOf(InsertValueOp( - lvalue = deserializeExprNode(args[0].asIonSexp()), - value = deserializeExprNode(args[1].asIonSexp()), - position = args.getOrNull(2)?.let { deserializeExprNode(it.asIonSexp()) }, - onConflict = null // V0 does not support the ON CONFLICT clause - )) + listOf( + InsertValueOp( + lvalue = deserializeExprNode(args[0].asIonSexp()), + value = deserializeExprNode(args[1].asIonSexp()), + position = args.getOrNull(2)?.let { deserializeExprNode(it.asIonSexp()) }, + onConflict = null // V0 does not support the ON CONFLICT clause + ) + ) } NodeTag.SET -> deserializeSetAssignments(target.args.toListOfIonSexp()).map { AssignmentOp(it) } @@ -903,7 +918,8 @@ internal class AstDeserializerInternal( groupBy = groupBy, having = havingExprNode, limit = limitExprNode, - metas = metas) + metas = metas + ) } private fun deserializeLet(let: IonSexp): LetSource { @@ -930,13 +946,14 @@ internal class AstDeserializerInternal( deserializeSexpMetaOrTerm(nameArg.asIonSexp()) { target, metas -> SymbolicName( target.args[0].asIonSymbol().stringValue(), - metas) + metas + ) } } val groupingSrategy = when (groupBy.nodeTag) { NodeTag.GROUP -> GroupingStrategy.FULL - else -> GroupingStrategy.PARTIAL + else -> GroupingStrategy.PARTIAL } return GroupBy(groupingSrategy, items, nameSymbol) @@ -955,7 +972,6 @@ internal class AstDeserializerInternal( private fun deserializeSelectValueOrListV0(project: IonSexp): SelectProjection { val projectChild = project[1].asIonSexp() - return when (projectChild.nodeTag) { NodeTag.VALUE -> SelectProjectionValue(deserializeExprNode(projectChild.args.first().asIonSexp()), emptyMetaContainer) NodeTag.LIST -> deserializeSelectListItems(projectChild) @@ -967,25 +983,26 @@ internal class AstDeserializerInternal( val selectListItems = projectChild.args.map { selectListItemSexp -> deserializeSexpMetaOrTerm(selectListItemSexp.asIonSexp()) { itemTarget, metas -> when (itemTarget.nodeTag) { - NodeTag.AS -> { + NodeTag.AS -> { val asName = SymbolicName( itemTarget.args[0].asIonSymbol().stringValue(), - metas) + metas + ) SelectListItemExpr(deserializeExprNode(itemTarget.args[1].asIonSexp()), asName) } - NodeTag.PROJECT_ALL -> { + NodeTag.PROJECT_ALL -> { if (itemTarget.arity == 0) { SelectListItemStar(metas) - } - else { + } else { SelectListItemProjectAll( // Note: metas is always empty in this case because the metas are on the child of // PROJECT_ALL. This means we should not call .copy() as in other cases.) - deserializeExprNode(itemTarget.args[0].asIonSexp())) + deserializeExprNode(itemTarget.args[0].asIonSexp()) + ) } } - else -> SelectListItemExpr(deserializeExprNode(selectListItemSexp.asIonSexp())) + else -> SelectListItemExpr(deserializeExprNode(selectListItemSexp.asIonSexp())) } } } @@ -1007,19 +1024,22 @@ internal class AstDeserializerInternal( if (variables.atName != null) error("'at' previously encountered in this from source") deserializeFromSourceV0( target[2].asIonSexp(), - variables.copy(atName = SymbolicName(target.args[0].asIonSymbol().stringValue(), metas))) + variables.copy(atName = SymbolicName(target.args[0].asIonSymbol().stringValue(), metas)) + ) } NodeTag.AS -> { if (variables.asName != null) error("'as' previously encountered in this from source") deserializeFromSourceV0( target[2].asIonSexp(), - variables.copy(asName = SymbolicName(target.args[0].asIonSymbol().stringValue(), metas))) + variables.copy(asName = SymbolicName(target.args[0].asIonSymbol().stringValue(), metas)) + ) } NodeTag.BY -> { if (variables.byName != null) error("'by' previously encountered in this from source") deserializeFromSourceV0( target[2].asIonSexp(), - variables.copy(byName = SymbolicName(target.args[0].asIonSymbol().stringValue(), metas))) + variables.copy(byName = SymbolicName(target.args[0].asIonSymbol().stringValue(), metas)) + ) } NodeTag.UNPIVOT -> { deserializeFromSourceUnpivotV0(targetArgs, variables, metas) @@ -1054,11 +1074,12 @@ internal class AstDeserializerInternal( ): FromSourceJoin { val joinOp = when (target.nodeTag) { NodeTag.INNER_JOIN -> JoinOp.INNER - NodeTag.LEFT_JOIN -> JoinOp.LEFT + NodeTag.LEFT_JOIN -> JoinOp.LEFT NodeTag.RIGHT_JOIN -> JoinOp.RIGHT NodeTag.OUTER_JOIN -> JoinOp.OUTER - else -> throw IllegalStateException( - "Illegal join operator: ${target.nodeTag.definition.tagText}") + else -> throw IllegalStateException( + "Illegal join operator: ${target.nodeTag.definition.tagText}" + ) } val leftFromSource = deserializeFromSourceV0(targetArgs[0].asIonSexp()) @@ -1066,9 +1087,10 @@ internal class AstDeserializerInternal( val (condition, metasMaybeWithImplicitJoin) = when { target.arity > 2 -> Pair(deserializeExprNode(targetArgs[2].asIonSexp()), metas) - else -> Pair( + else -> Pair( Literal(ion.newBool(true), emptyMetaContainer), - metas + metaContainerOf(IsImplictJoinMeta.instance)) + metas + metaContainerOf(IsImplictJoinMeta.instance) + ) } return FromSourceJoin( @@ -1076,7 +1098,8 @@ internal class AstDeserializerInternal( leftFromSource, rightFromSource, condition, - metasMaybeWithImplicitJoin) + metasMaybeWithImplicitJoin + ) } private fun deserializeFromSourceExprV0( @@ -1086,7 +1109,8 @@ internal class AstDeserializerInternal( ): FromSourceExpr { return FromSourceExpr( expr = deserializeExprNode(target).copy(metas), - variables = variables) + variables = variables + ) } private fun deserializeGroupByItem(target: IonSexp): GroupByItem = @@ -1096,12 +1120,13 @@ internal class AstDeserializerInternal( NodeTag.AS -> { val symbolicName = SymbolicName( innerTargetArgs[0].asIonSymbol().stringValue(), - metas) + metas + ) val expr = deserializeExprNode(innerTargetArgs[1].asIonSexp()) GroupByItem(expr, symbolicName) } - else -> { + else -> { val expr = deserializeExprNode(innerTarget).copy(metas) GroupByItem(expr, null) } @@ -1173,7 +1198,6 @@ internal class AstDeserializerInternal( Path(root, pathComponents, metas) } - private fun deserializePathComponents(componentSexps: List): List = componentSexps.map { componentSexp -> val (targetComponent, caseSensitivity) = when (componentSexp.nodeTag) { @@ -1197,7 +1221,8 @@ internal class AstDeserializerInternal( PathComponentUnpivot(metas) } else -> throw IllegalStateException( - "invalid arity for (star) or (*) (this should have been caught earlier)") + "invalid arity for (star) or (*) (this should have been caught earlier)" + ) } } else -> { @@ -1238,7 +1263,7 @@ internal class AstDeserializerInternal( } DataType(sqlDataType, args, metas) } - else -> { + else -> { err("Expected `${NodeTag.TYPE.definition.tagText}` tag instead found ${nodeTag.definition.tagText}") } } @@ -1252,7 +1277,7 @@ internal class AstDeserializerInternal( */ private fun IonSexp.singleWrappedChildWithTagOrNull(tagName: String): IonValue? = this.args.map { it.asIonSexp() }.singleOrNull { - val tagText = when(it.tagText) { + val tagText = when (it.tagText) { "meta" -> it.args[0].asIonSexp().tagText else -> it.tagText } @@ -1299,4 +1324,4 @@ private data class UnknownMeta(override val tag: String, val metaSexp: IonSexp) } } } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/ast/AstSerialization.kt b/lang/src/org/partiql/lang/ast/AstSerialization.kt index 35d294a887..d37ea96662 100644 --- a/lang/src/org/partiql/lang/ast/AstSerialization.kt +++ b/lang/src/org/partiql/lang/ast/AstSerialization.kt @@ -44,7 +44,7 @@ interface AstSerializer { } } -private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): AstSerializer { +private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem) : AstSerializer { override fun serialize(exprNode: ExprNode): IonSexp { val resultSexp = ion.newEmptySexp() val writer = ion.newWriter(resultSexp) @@ -56,7 +56,7 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): private fun IonWriterContext.writeAsTerm(metas: MetaContainer?, block: IonWriterContext.() -> Unit) { val sloc = metas?.find(SourceLocationMeta.TAG) as? SourceLocationMeta - if(sloc != null) { + if (sloc != null) { sexp { symbol("meta") block() @@ -65,8 +65,7 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): int("column", sloc.charOffset) } } - } - else { + } else { block() } } @@ -77,30 +76,30 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): sexp { when (expr) { // Leaf nodes - is Literal -> case { writeLiteral(expr) } - is LiteralMissing -> case { writeLiteralMissing(expr) } + is Literal -> case { writeLiteral(expr) } + is LiteralMissing -> case { writeLiteralMissing(expr) } is VariableReference -> case { writeVariableReference(expr) } - is NAry -> case { writeNAry(expr) } - is CallAgg -> case { writeCallAgg(expr) } - is Typed -> case { writeTyped(expr) } - is Path -> case { writePath(expr) } - is SimpleCase -> case { writeSimpleCase(expr) } - is SearchedCase -> case { writeSearchedCase(expr) } - is Struct -> case { writeStruct(expr) } - is Seq -> case { writeSeq(expr) } - is Select -> case { writeSelect(expr) } - is DataManipulation -> case { writeDataManipulation(expr) } - is CreateTable -> case { writeCreateTable(expr) } - is CreateIndex -> case { writeCreateIndex(expr) } - is DropTable -> case { writeDropTable(expr) } - is DropIndex -> case { writeDropIndex(expr) } - is Parameter -> case { writeParameter(expr) } - is NullIf -> case { writeNullIf(expr) } - is Coalesce -> case { writeCoalesce(expr) } - is Parameter -> case { writeParameter(expr)} + is NAry -> case { writeNAry(expr) } + is CallAgg -> case { writeCallAgg(expr) } + is Typed -> case { writeTyped(expr) } + is Path -> case { writePath(expr) } + is SimpleCase -> case { writeSimpleCase(expr) } + is SearchedCase -> case { writeSearchedCase(expr) } + is Struct -> case { writeStruct(expr) } + is Seq -> case { writeSeq(expr) } + is Select -> case { writeSelect(expr) } + is DataManipulation -> case { writeDataManipulation(expr) } + is CreateTable -> case { writeCreateTable(expr) } + is CreateIndex -> case { writeCreateIndex(expr) } + is DropTable -> case { writeDropTable(expr) } + is DropIndex -> case { writeDropIndex(expr) } + is Parameter -> case { writeParameter(expr) } + is NullIf -> case { writeNullIf(expr) } + is Coalesce -> case { writeCoalesce(expr) } + is Parameter -> case { writeParameter(expr) } is DateLiteral -> throw UnsupportedOperationException("DATE literals not supported by the V0 AST") is TimeLiteral -> throw UnsupportedOperationException("TIME literals not supported by the V0 AST") - is Exec -> throw UnsupportedOperationException("EXEC clause not supported by the V0 AST") + is Exec -> throw UnsupportedOperationException("EXEC clause not supported by the V0 AST") }.toUnit() } } @@ -119,7 +118,7 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): private fun IonWriterContext.writeVariableReference(expr: VariableReference) { val (id, sensitivity, lookup, _: MetaContainer) = expr - when(astVersion) { + when (astVersion) { AstVersion.V0 -> { fun writeVarRef() { symbol("id") @@ -131,13 +130,11 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): sexp { writeVarRef() } - } - else { + } else { writeVarRef() } } } - } private fun IonWriterContext.writeCallAgg(expr: CallAgg) { @@ -186,7 +183,7 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): fun IonWriterContext.writeDataType(dataType: DataType) { writeAsTerm(dataType.metas) { sexp { - when(astVersion) { + when (astVersion) { AstVersion.V0 -> { symbol("type") symbol(dataType.sqlDataType.typeName) @@ -210,7 +207,7 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): private fun IonWriterContext.writeSelect(expr: Select) { val (setQuantifier, projection, from, fromLet, where, groupBy, having, orderBy, limit, offset, _: MetaContainer) = expr - if (offset != null){ + if (offset != null) { throw UnsupportedOperationException("OFFSET clause is not supported by the V0 AST") } @@ -244,7 +241,7 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): } groupBy?.let { - when(astVersion) { + when (astVersion) { AstVersion.V0 -> writeGroupByV0(groupBy) } } @@ -284,7 +281,7 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): sexp { symbol( when (grouping) { - GroupingStrategy.FULL -> "group" + GroupingStrategy.FULL -> "group" GroupingStrategy.PARTIAL -> "group_partial" } ) @@ -300,8 +297,7 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): writeExprNode(itemExpr) } } - } - else { + } else { writeExprNode(itemExpr) } } @@ -338,7 +334,7 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): } } where?.let { - sexp{ + sexp { symbol("where") writeExprNode(it) } @@ -350,17 +346,18 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): // but it does *not* support mixing multiple DML operations together, i.e.: SET a = 1, REMOVE b is illegal // The following bit of code checks to make sure that [dmlOp] only contains items that can be represented in // the V0 AST. - val isSetOnly = if(!opList.ops.any { it !is AssignmentOp }) { - if(opList.ops.size != 1) { + val isSetOnly = if (!opList.ops.any { it !is AssignmentOp }) { + if (opList.ops.size != 1) { throw UnsupportedOperationException( - "A single DML statement with multiple operations other than SET cannot be represented with the V0 AST") + "A single DML statement with multiple operations other than SET cannot be represented with the V0 AST" + ) } true } else { false } - if(!isSetOnly) { + if (!isSetOnly) { val dmlOp = opList.ops.first() sexp { @@ -402,11 +399,9 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): } } } - } } - private fun IonWriterContext.writeSelectProjection(projection: SelectProjection, setQuantifier: SetQuantifier) { when (astVersion) { AstVersion.V0 -> @@ -420,13 +415,14 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): private fun IonWriterContext.writeSelectProjectionValueV0( projection: SelectProjectionValue, - setQuantifier: SetQuantifier) { + setQuantifier: SetQuantifier + ) { val (valueExpr) = projection symbol("select") sexp { symbol( when (setQuantifier) { - SetQuantifier.ALL -> "project" + SetQuantifier.ALL -> "project" SetQuantifier.DISTINCT -> "project_distinct" } ) @@ -450,14 +446,15 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): private fun IonWriterContext.writeSelectProjectionListV0( projection: SelectProjectionList, - setQuantifier: SetQuantifier) { + setQuantifier: SetQuantifier + ) { val (items) = projection symbol("select") sexp { symbol( when (setQuantifier) { - SetQuantifier.ALL -> "project" + SetQuantifier.ALL -> "project" SetQuantifier.DISTINCT -> "project_distinct" } ) @@ -466,8 +463,8 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): symbol("list") items.forEach { when (it) { - is SelectListItemStar -> case { writeSelectListItemStar(it) } - is SelectListItemExpr -> case { writeSelectListItemExpr(it) } + is SelectListItemStar -> case { writeSelectListItemStar(it) } + is SelectListItemExpr -> case { writeSelectListItemExpr(it) } is SelectListItemProjectAll -> case { writeSelectListItemProjectAll(it) } }.toUnit() } @@ -493,8 +490,7 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): writeExprNode(itemExpr) } } - } - else { + } else { writeExprNode(itemExpr) } } @@ -508,7 +504,7 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): } private fun IonWriterContext.writeFromSource(fromSource: FromSource): Unit = - when(astVersion) { + when (astVersion) { AstVersion.V0 -> writeFromSourceV0(fromSource) } @@ -630,7 +626,7 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): val (root, components, _: MetaContainer) = expr symbol("path") writeExprNode(root) - when(astVersion) { + when (astVersion) { AstVersion.V0 -> case { writePathComponentsV0(components) } } } @@ -638,8 +634,8 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): private fun IonWriterContext.writePathComponentsV0(components: List) { components.forEach { when (it) { - is PathComponentExpr -> case { writePathComponentExprV0(it) } - is PathComponentUnpivot -> case { writePathComponentUnpivotV0(it) } + is PathComponentExpr -> case { writePathComponentExprV0(it) } + is PathComponentUnpivot -> case { writePathComponentUnpivotV0(it) } is PathComponentWildcard -> case { writePathComponentWildcardV0(it) } }.toUnit() } @@ -647,15 +643,14 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): private fun IonWriterContext.writePathComponentExprV0(pathComponent: PathComponentExpr) { val (exp, case) = pathComponent - //Only wrap variable references and literal strings in case_[in]sensitive... + // Only wrap variable references and literal strings in case_[in]sensitive... if ((exp is VariableReference) || exp is Literal && exp.ionValue is IonString) { sexp { symbol(case.toSymbol()) writeExprNode(exp) } - } - else { + } else { writeExprNode(exp) } } @@ -685,9 +680,9 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): // The true branch will unwrap that expression, preserving the original AST form. if (astVersion == AstVersion.V0 && op == NAryOp.NOT && expr.metas.hasMeta(LegacyLogicalNotMeta.TAG)) { val firstArg = args.first() - //Note: it is intentional that this is `when` statement and not an expression + // Note: it is intentional that this is `when` statement and not an expression when (firstArg) { - is NAry -> { + is NAry -> { val (argOp, argArgs, _: MetaContainer) = firstArg fun recurseArgs() { @@ -698,15 +693,15 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): symbol("not_between") recurseArgs() } - NAryOp.LIKE -> { + NAryOp.LIKE -> { symbol("not_like") recurseArgs() } - NAryOp.IN -> { + NAryOp.IN -> { symbol("not_in") recurseArgs() } - else -> { + else -> { throw IllegalStateException("Invalid NAryOp on argument of `(not )` node decorated with LegacyLogicalNotMeta") } } @@ -719,10 +714,9 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): writeExprNode(firstArg.expr) writeDataType(firstArg.type) } - else -> { + else -> { throw IllegalStateException("Invalid node type of of `(not )` node decorated with LegacyLogicalNotMeta") } - } return } @@ -734,7 +728,7 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): writeExprNode(it) } - when(op) { + when (op) { NAryOp.CALL -> { // Note: we can assume that by this point the AST has been checked // for errors. (in this case that means the arity is at least 1) @@ -760,7 +754,7 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): } private fun getOpSymbol(op: NAryOp) = - when(astVersion) { + when (astVersion) { AstVersion.V0 -> op.symbol } @@ -858,7 +852,7 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): } private fun IonWriterContext.nestByAlias(variables: LetVariables, block: () -> Unit) { - if(variables.byName != null) { + if (variables.byName != null) { writeAsTerm(variables.byName.metas) { sexp { symbol("by") @@ -871,7 +865,7 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): } } private fun IonWriterContext.nestAtAlias(variables: LetVariables, block: () -> Unit) { - if(variables.atName != null) { + if (variables.atName != null) { writeAsTerm(variables.atName.metas) { sexp { symbol("at") @@ -885,7 +879,7 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): } private fun IonWriterContext.nestAsAlias(variables: LetVariables, block: () -> Unit) { - if(variables.asName != null) { + if (variables.asName != null) { writeAsTerm(variables.asName.metas) { sexp { symbol("as") @@ -903,8 +897,8 @@ private class AstSerializerImpl(val astVersion: AstVersion, val ion: IonSystem): is VariableReference -> id else -> throw UnsupportedOperationException( "Using arbitrary expressions to identify a function in a call_agg or call node is not supported. " + - "Functions must be identified by name only.") + "Functions must be identified by name only." + ) } } - } diff --git a/lang/src/org/partiql/lang/ast/ExprNodeToStatement.kt b/lang/src/org/partiql/lang/ast/ExprNodeToStatement.kt index 4c63ba3466..71d8f5e026 100644 --- a/lang/src/org/partiql/lang/ast/ExprNodeToStatement.kt +++ b/lang/src/org/partiql/lang/ast/ExprNodeToStatement.kt @@ -10,7 +10,7 @@ import org.partiql.pig.runtime.asPrimitive /** Converts an [ExprNode] to a [PartiqlAst.statement]. */ fun ExprNode.toAstStatement(): PartiqlAst.Statement { val node = this - return when(node) { + return when (node) { is Literal, is LiteralMissing, is VariableReference, is Parameter, is NAry, is CallAgg, is Typed, is Path, is SimpleCase, is SearchedCase, is Select, is Struct, is DateLiteral, is TimeLiteral, is Seq, is NullIf, is Coalesce -> PartiqlAst.build { query(toAstExpr()) } @@ -26,7 +26,7 @@ fun ExprNode.toAstStatement(): PartiqlAst.Statement { internal fun PartiQlMetaContainer.toIonElementMetaContainer(): IonElementMetaContainer = com.amazon.ionelement.api.metaContainerOf(map { it.tag to it }) -private fun SymbolicName.toSymbolPrimitive() : SymbolPrimitive = +private fun SymbolicName.toSymbolPrimitive(): SymbolPrimitive = SymbolPrimitive(this.name, this.metas.toIonElementMetaContainer()) private fun ExprNode.toAstDdl(): PartiqlAst.Statement { @@ -34,7 +34,7 @@ private fun ExprNode.toAstDdl(): PartiqlAst.Statement { val metas = metas.toIonElementMetaContainer() return PartiqlAst.build { - when(thiz) { + when (thiz) { is Literal, is LiteralMissing, is VariableReference, is Parameter, is NAry, is CallAgg, is Typed, is Path, is SimpleCase, is SearchedCase, is Select, is Struct, is Seq, is DateLiteral, is TimeLiteral, is NullIf, is Coalesce, is DataManipulation, is Exec -> error("Can't convert ${thiz.javaClass} to PartiqlAst.ddl") @@ -46,26 +46,30 @@ private fun ExprNode.toAstDdl(): PartiqlAst.Statement { identifier(thiz.tableId.id, thiz.tableId.case.toAstCaseSensitivity()), thiz.keys.map { it.toAstExpr() } ), - metas) + metas + ) is DropIndex -> ddl( dropIndex( // case-sensitivity of table names cannot be represented with ExprNode. identifier(thiz.tableId.id, thiz.tableId.case.toAstCaseSensitivity()), - identifier(thiz.indexId.id, thiz.indexId.case.toAstCaseSensitivity())), - metas) + identifier(thiz.indexId.id, thiz.indexId.case.toAstCaseSensitivity()) + ), + metas + ) is DropTable -> // case-sensitivity of table names cannot be represented with ExprNode. ddl( dropTable( identifier(thiz.tableId.id, thiz.tableId.case.toAstCaseSensitivity()) ), - metas) + metas + ) } } } -private fun ExprNode.toAstExec() : PartiqlAst.Statement { +private fun ExprNode.toAstExec(): PartiqlAst.Statement { val node = this val metas = metas.toIonElementMetaContainer() @@ -90,7 +94,7 @@ fun ExprNode.toAstExpr(): PartiqlAst.Expr { is Parameter -> parameter(node.position.toLong(), metas) is NAry -> { val args = node.args.map { it.toAstExpr() } - when(node.op) { + when (node.op) { NAryOp.ADD -> when (args.size) { 0 -> throw IllegalArgumentException("Operator 'Add' must have at least one argument") 1 -> pos(args.first(), metas) @@ -142,7 +146,7 @@ fun ExprNode.toAstExpr(): PartiqlAst.Expr { callAgg_(node.setQuantifier.toAstSetQuantifier(), symbol1Primitive, node.arg.toAstExpr(), metas) } is Typed -> - when(node.op) { + when (node.op) { TypedOp.CAST -> cast(node.expr.toAstExpr(), node.type.toAstType(), metas) TypedOp.CAN_CAST -> canCast(node.expr.toAstExpr(), node.type.toAstType(), metas) TypedOp.CAN_LOSSLESS_CAST -> canLosslessCast(node.expr.toAstExpr(), node.type.toAstType(), metas) @@ -154,12 +158,14 @@ fun ExprNode.toAstExpr(): PartiqlAst.Expr { node.valueExpr.toAstExpr(), exprPairList(node.whenClauses.map { exprPair(it.valueExpr.toAstExpr(), it.thenExpr.toAstExpr()) }), node.elseExpr?.toAstExpr(), - metas) + metas + ) is SearchedCase -> searchedCase( exprPairList(node.whenClauses.map { exprPair(it.condition.toAstExpr(), it.thenExpr.toAstExpr()) }), node.elseExpr?.toAstExpr(), - metas) + metas + ) is Select -> select( // Only set setq if its distinct since setting it causes it to be added to the s-expressions @@ -181,10 +187,11 @@ fun ExprNode.toAstExpr(): PartiqlAst.Expr { having = node.having?.toAstExpr(), limit = node.limit?.toAstExpr(), offset = node.offset?.toAstExpr(), - metas = metas) + metas = metas + ) is Struct -> struct(node.fields.map { exprPair(it.name.toAstExpr(), it.expr.toAstExpr()) }, metas) is Seq -> - when(node.type) { + when (node.type) { SeqType.LIST -> list(node.values.map { it.toAstExpr() }, metas) SeqType.SEXP -> sexp(node.values.map { it.toAstExpr() }, metas) SeqType.BAG -> bag(node.values.map { it.toAstExpr() }, metas) @@ -235,14 +242,16 @@ private fun GroupBy.toAstGroupSpec(): PartiqlAst.GroupBy = PartiqlAst.build { groupBy_( this@toAstGroupSpec.grouping.toAstGroupStrategy(), - groupKeyList(this@toAstGroupSpec.groupByItems.map { - val keyMetas = it.asName?.metas?.toIonElementMetaContainer() ?: emptyMetaContainer() - groupKey_(it.expr.toAstExpr(), it.asName?.name?.asPrimitive(keyMetas) ) - }), - this@toAstGroupSpec.groupName?.name?.asPrimitive(this@toAstGroupSpec.groupName.metas.toIonElementMetaContainer())) + groupKeyList( + this@toAstGroupSpec.groupByItems.map { + val keyMetas = it.asName?.metas?.toIonElementMetaContainer() ?: emptyMetaContainer() + groupKey_(it.expr.toAstExpr(), it.asName?.name?.asPrimitive(keyMetas)) + } + ), + this@toAstGroupSpec.groupName?.name?.asPrimitive(this@toAstGroupSpec.groupName.metas.toIonElementMetaContainer()) + ) } - private fun GroupingStrategy.toAstGroupStrategy(): PartiqlAst.GroupingStrategy = PartiqlAst.build { when (this@toAstGroupStrategy) { @@ -284,18 +293,17 @@ private fun SetQuantifier.toAstSetQuantifier(): PartiqlAst.SetQuantifier { private fun SelectProjection.toAstSelectProject(): PartiqlAst.Projection { val thiz = this return PartiqlAst.build { - when(thiz) { + when (thiz) { is SelectProjectionValue -> projectValue(thiz.expr.toAstExpr(), thiz.metas.toIonElementMetaContainer()) is SelectProjectionList -> { - if(thiz.items.any { it is SelectListItemStar }) { - if(thiz.items.size > 1) error("More than one select item when SELECT * was present.") + if (thiz.items.any { it is SelectListItemStar }) { + if (thiz.items.size > 1) error("More than one select item when SELECT * was present.") val metas = (thiz.items[0] as SelectListItemStar).metas.toIonElementMetaContainer() projectStar(metas) - } - else + } else projectList( thiz.items.map { - when(it) { + when (it) { is SelectListItemExpr -> projectExpr_(it.expr.toAstExpr(), it.asName?.toPrimitive(), it.expr.metas.toIonElementMetaContainer()) is SelectListItemProjectAll -> projectAll(it.expr.toAstExpr(), it.expr.metas.toIonElementMetaContainer()) is SelectListItemStar -> error("this should happen due to `when` branch above.") @@ -319,7 +327,8 @@ private fun FromSource.toAstFromSource(): PartiqlAst.FromSource { thiz.variables.asName?.toPrimitive(), thiz.variables.atName?.toPrimitive(), thiz.variables.byName?.toPrimitive(), - thiz.expr.metas.toIonElementMetaContainer()) + thiz.expr.metas.toIonElementMetaContainer() + ) is FromSourceJoin -> { val jt = when (thiz.joinOp) { JoinOp.INNER -> inner() @@ -332,14 +341,16 @@ private fun FromSource.toAstFromSource(): PartiqlAst.FromSource { thiz.leftRef.toAstFromSource(), thiz.rightRef.toAstFromSource(), if (thiz.metas.hasMeta(IsImplictJoinMeta.TAG)) null else thiz.condition.toAstExpr(), - metas = metas) + metas = metas + ) } is FromSourceUnpivot -> unpivot_( thiz.expr.toAstExpr(), thiz.variables.asName?.toPrimitive(), thiz.variables.atName?.toPrimitive(), thiz.variables.byName?.toPrimitive(), - thiz.metas.toIonElementMetaContainer()) + thiz.metas.toIonElementMetaContainer() + ) } } } @@ -370,7 +381,7 @@ private fun PathComponent.toAstPathStep(): PartiqlAst.PathStep { private fun OnConflict.toAstOnConflict(): PartiqlAst.OnConflict { val thiz = this return PartiqlAst.build { - when(thiz.conflictAction) { + when (thiz.conflictAction) { ConflictAction.DO_NOTHING -> onConflict(thiz.condition.toAstExpr(), doNothing()) } } @@ -387,7 +398,8 @@ private fun DataManipulation.toAstDml(): PartiqlAst.Statement { thiz.from?.toAstFromSource(), thiz.where?.toAstExpr(), thiz.returning?.toAstReturningExpr(), - thiz.metas.toIonElementMetaContainer()) + thiz.metas.toIonElementMetaContainer() + ) } } @@ -397,7 +409,8 @@ private fun DmlOpList.toAstDmlOps(dml: DataManipulation): PartiqlAst.DmlOpList = this@toAstDmlOps.ops.map { it.toAstDmlOp(dml) }, - metas = dml.metas.toIonElementMetaContainer()) + metas = dml.metas.toIonElementMetaContainer() + ) } private fun DataManipulationOperation.toAstDmlOp(dml: DataManipulation): PartiqlAst.DmlOp = PartiqlAst.build { @@ -405,19 +418,23 @@ private fun DataManipulationOperation.toAstDmlOp(dml: DataManipulation): Partiql is InsertOp -> insert( thiz.lvalue.toAstExpr(), - thiz.values.toAstExpr()) + thiz.values.toAstExpr() + ) is InsertValueOp -> insertValue( thiz.lvalue.toAstExpr(), thiz.value.toAstExpr(), thiz.position?.toAstExpr(), thiz.onConflict?.toAstOnConflict(), - dml.metas.toIonElementMetaContainer()) + dml.metas.toIonElementMetaContainer() + ) is AssignmentOp -> set( assignment( thiz.assignment.lvalue.toAstExpr(), - thiz.assignment.rvalue.toAstExpr())) + thiz.assignment.rvalue.toAstExpr() + ) + ) is RemoveOp -> remove(thiz.lvalue.toAstExpr()) DeleteOp -> delete() } @@ -461,7 +478,7 @@ fun DataType.toAstType(): PartiqlAst.Type { val arg2 = thiz.args.getOrNull(1)?.toLong() return PartiqlAst.build { - when(thiz.sqlDataType) { + when (thiz.sqlDataType) { SqlDataType.MISSING -> missingType(metas) SqlDataType.NULL -> nullType(metas) SqlDataType.BOOLEAN -> booleanType(metas) @@ -495,6 +512,5 @@ fun DataType.toAstType(): PartiqlAst.Type { } } - private fun SymbolicName.toPrimitive(): SymbolPrimitive = - SymbolPrimitive(this.name, this.metas.toIonElementMetaContainer()) \ No newline at end of file + SymbolPrimitive(this.name, this.metas.toIonElementMetaContainer()) diff --git a/lang/src/org/partiql/lang/ast/InternalMetas.kt b/lang/src/org/partiql/lang/ast/InternalMetas.kt index 14e3fc793c..4b75f7d48b 100644 --- a/lang/src/org/partiql/lang/ast/InternalMetas.kt +++ b/lang/src/org/partiql/lang/ast/InternalMetas.kt @@ -20,13 +20,14 @@ import com.amazon.ion.IonWriter * Base class for [Meta] implementations which are used internally by [org.partiql.lang.eval.EvaluatingCompiler] * during compilation and should never be serialized. */ -open class InternalMeta(override val tag: String): Meta { +open class InternalMeta(override val tag: String) : Meta { override val shouldSerialize: Boolean get() = false override fun serialize(writer: IonWriter) { throw UnsupportedOperationException( - "${this.javaClass} is meant for internal use only and cannot be serialized.") + "${this.javaClass} is meant for internal use only and cannot be serialized." + ) } } @@ -59,4 +60,4 @@ class IsSyntheticNameMeta private constructor() : InternalMeta(TAG) { val instance = IsSyntheticNameMeta() } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/ast/IsCountStarMeta.kt b/lang/src/org/partiql/lang/ast/IsCountStarMeta.kt index d34ec3017c..61d8d5ccaf 100644 --- a/lang/src/org/partiql/lang/ast/IsCountStarMeta.kt +++ b/lang/src/org/partiql/lang/ast/IsCountStarMeta.kt @@ -22,4 +22,4 @@ class IsCountStarMeta private constructor() : Meta { val instance = IsCountStarMeta() val deserializer = MemoizedMetaDeserializer(TAG, instance) } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/ast/IsImplictJoinMeta.kt b/lang/src/org/partiql/lang/ast/IsImplictJoinMeta.kt index 7264433a70..d1cb174f39 100644 --- a/lang/src/org/partiql/lang/ast/IsImplictJoinMeta.kt +++ b/lang/src/org/partiql/lang/ast/IsImplictJoinMeta.kt @@ -17,7 +17,7 @@ package org.partiql.lang.ast * Meta node intended to be attached to an instance of [FromSourcedJoin] to indicate that no * join condition was specified in the original query and therefore this is an implicit join. */ -class IsImplictJoinMeta private constructor(): Meta { +class IsImplictJoinMeta private constructor() : Meta { override val tag = TAG companion object { @@ -26,4 +26,4 @@ class IsImplictJoinMeta private constructor(): Meta { val instance = IsImplictJoinMeta() val deserializer = MemoizedMetaDeserializer(TAG, instance) } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/ast/IsIonLiteralMeta.kt b/lang/src/org/partiql/lang/ast/IsIonLiteralMeta.kt index 5b91be7f31..cde17a9b93 100644 --- a/lang/src/org/partiql/lang/ast/IsIonLiteralMeta.kt +++ b/lang/src/org/partiql/lang/ast/IsIonLiteralMeta.kt @@ -21,7 +21,7 @@ package org.partiql.lang.ast * Meta node intended to be attached to an instance of [Literal] to indicate that it was * designated as an `ionLiteral` in the parsed statement. */ -class IsIonLiteralMeta private constructor(): Meta { +class IsIonLiteralMeta private constructor() : Meta { override val tag = TAG companion object { @@ -30,4 +30,4 @@ class IsIonLiteralMeta private constructor(): Meta { val instance = IsIonLiteralMeta() val deserializer = MemoizedMetaDeserializer(TAG, instance) } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/ast/LegacyLogicalNotMeta.kt b/lang/src/org/partiql/lang/ast/LegacyLogicalNotMeta.kt index f49e7c3295..69531009b3 100644 --- a/lang/src/org/partiql/lang/ast/LegacyLogicalNotMeta.kt +++ b/lang/src/org/partiql/lang/ast/LegacyLogicalNotMeta.kt @@ -41,4 +41,3 @@ class LegacyLogicalNotMeta private constructor() : Meta { val deserializer = MemoizedMetaDeserializer(TAG, instance) } } - diff --git a/lang/src/org/partiql/lang/ast/MemoizedMetaDeserializer.kt b/lang/src/org/partiql/lang/ast/MemoizedMetaDeserializer.kt index 41e0927d69..badef18bc0 100644 --- a/lang/src/org/partiql/lang/ast/MemoizedMetaDeserializer.kt +++ b/lang/src/org/partiql/lang/ast/MemoizedMetaDeserializer.kt @@ -28,4 +28,4 @@ import com.amazon.ion.IonValue */ class MemoizedMetaDeserializer(override val tag: String, val instance: Meta) : MetaDeserializer { override fun deserialize(sexp: IonValue): Meta = instance -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/ast/SourceLocationMeta.kt b/lang/src/org/partiql/lang/ast/SourceLocationMeta.kt index 32f1d84bf4..7611659552 100644 --- a/lang/src/org/partiql/lang/ast/SourceLocationMeta.kt +++ b/lang/src/org/partiql/lang/ast/SourceLocationMeta.kt @@ -26,12 +26,10 @@ import org.partiql.lang.util.longValue * Represents a specific location within a source file. */ data class SourceLocationMeta(val lineNum: Long, val charOffset: Long, val length: Long = -1) : Meta { - override fun toString() = "$lineNum:$charOffset:${if(length > 0) length.toString() else ""}" + override fun toString() = "$lineNum:$charOffset:${if (length > 0) length.toString() else ""}" override val tag = TAG - - override fun serialize(writer: IonWriter) { IonWriterContext(writer).apply { struct { @@ -50,7 +48,7 @@ data class SourceLocationMeta(val lineNum: Long, val charOffset: Long, val lengt if (charOffset != other.charOffset) return false // if length is unknown or the other is unknown, ignore the length. - if(length > 0 && other.length > 0 && length != other.length) return false + if (length > 0 && other.length > 0 && length != other.length) return false return true } @@ -60,7 +58,7 @@ data class SourceLocationMeta(val lineNum: Long, val charOffset: Long, val lengt result = 31 * result + charOffset.hashCode() // if the length is unknown, ignore it. - if(length > 0) { + if (length > 0) { result = 31 * result + length.hashCode() } return result @@ -91,4 +89,4 @@ val IonElementMetaContainer.sourceLocation: SourceLocationMeta? get() = metaOrNu val PartiQlMetaContainer.sourceLocationContainer: PartiQlMetaContainer get() = sourceLocation?.let { metaContainerOf(it) - } ?: emptyMetaContainer \ No newline at end of file + } ?: emptyMetaContainer diff --git a/lang/src/org/partiql/lang/ast/StatementToExprNode.kt b/lang/src/org/partiql/lang/ast/StatementToExprNode.kt index 5f278478d0..361552a76f 100644 --- a/lang/src/org/partiql/lang/ast/StatementToExprNode.kt +++ b/lang/src/org/partiql/lang/ast/StatementToExprNode.kt @@ -21,7 +21,7 @@ internal fun PartiqlAst.Expr.toExprNode(ion: IonSystem): ExprNode { return StatementTransformer(ion).transform(this) } -internal fun PartiqlAst.SetQuantifier.toExprNodeSetQuantifier(): SetQuantifier = +internal fun PartiqlAst.SetQuantifier.toExprNodeSetQuantifier(): SetQuantifier = when (this) { is PartiqlAst.SetQuantifier.All -> SetQuantifier.ALL is PartiqlAst.SetQuantifier.Distinct -> SetQuantifier.DISTINCT @@ -51,7 +51,6 @@ private class StatementTransformer(val ion: IonSystem) { fun transform(stmt: PartiqlAst.Expr): ExprNode = stmt.toExprNode() - private fun PartiqlAst.Statement.Query.toExprNode(): ExprNode { return this.expr.toExprNode() } @@ -93,7 +92,8 @@ private class StatementTransformer(val ion: IonSystem) { is PartiqlAst.SetQuantifier.All -> NAryOp.UNION_ALL }, operands.toExprNodeList(), - metas) + metas + ) is PartiqlAst.Expr.Intersect -> NAry( when (setq) { @@ -101,7 +101,8 @@ private class StatementTransformer(val ion: IonSystem) { is PartiqlAst.SetQuantifier.All -> NAryOp.INTERSECT_ALL }, operands.toExprNodeList(), - metas) + metas + ) is PartiqlAst.Expr.Except -> NAry( when (setq) { @@ -109,7 +110,8 @@ private class StatementTransformer(val ion: IonSystem) { is PartiqlAst.SetQuantifier.All -> NAryOp.EXCEPT_ALL }, operands.toExprNodeList(), - metas) + metas + ) is PartiqlAst.Expr.Like -> NAry(NAryOp.LIKE, listOfNotNull(value.toExprNode(), pattern.toExprNode(), escape?.toExprNode()), metas) is PartiqlAst.Expr.Between -> NAry(NAryOp.BETWEEN, listOf(value.toExprNode(), from.toExprNode(), to.toExprNode()), metas) is PartiqlAst.Expr.InCollection -> NAry(NAryOp.IN, operands.toExprNodeList(), metas) @@ -123,12 +125,14 @@ private class StatementTransformer(val ion: IonSystem) { expr.toExprNode(), cases.pairs.map { SimpleCaseWhen(it.first.toExprNode(), it.second.toExprNode()) }, default?.toExprNode(), - metas) + metas + ) is PartiqlAst.Expr.SearchedCase -> SearchedCase( cases.pairs.map { SearchedCaseWhen(it.first.toExprNode(), it.second.toExprNode()) }, this.default?.toExprNode(), - metas) + metas + ) is PartiqlAst.Expr.Struct -> Struct(this.fields.map { StructField(it.first.toExprNode(), it.second.toExprNode()) }, metas) is PartiqlAst.Expr.Bag -> Seq(SeqType.BAG, values.toExprNodeList(), metas) is PartiqlAst.Expr.List -> Seq(SeqType.LIST, values.toExprNodeList(), metas) @@ -143,12 +147,14 @@ private class StatementTransformer(val ion: IonSystem) { PathComponentExpr( it.index.toExprNode(), it.case.toCaseSensitivity(), - componentMetas) + componentMetas + ) is PartiqlAst.PathStep.PathUnpivot -> PathComponentUnpivot(componentMetas) is PartiqlAst.PathStep.PathWildcard -> PathComponentWildcard(componentMetas) } }, - metas) + metas + ) is PartiqlAst.Expr.Call -> NAry( NAryOp.CALL, @@ -157,19 +163,23 @@ private class StatementTransformer(val ion: IonSystem) { funcName.text, CaseSensitivity.INSENSITIVE, ScopeQualifier.UNQUALIFIED, - emptyMetaContainer) + emptyMetaContainer + ) ) + args.map { it.toExprNode() }, - metas) + metas + ) is PartiqlAst.Expr.CallAgg -> CallAgg( VariableReference( funcName.text, CaseSensitivity.INSENSITIVE, ScopeQualifier.UNQUALIFIED, - funcName.metas.toPartiQlMetaContainer()), + funcName.metas.toPartiQlMetaContainer() + ), setq.toSetQuantifier(), arg.toExprNode(), - metas) + metas + ) is PartiqlAst.Expr.Select -> Select( setQuantifier = setq?.toSetQuantifier() ?: SetQuantifier.ALL, @@ -183,7 +193,7 @@ private class StatementTransformer(val ion: IonSystem) { limit = limit?.toExprNode(), offset = offset?.toExprNode(), metas = metas - ) + ) is PartiqlAst.Expr.Date -> DateLiteral(year.value.toInt(), month.value.toInt(), day.value.toInt(), metas) is PartiqlAst.Expr.LitTime -> TimeLiteral( @@ -215,9 +225,12 @@ private class StatementTransformer(val ion: IonSystem) { is PartiqlAst.ProjectItem.ProjectExpr -> SelectListItemExpr( it.expr.toExprNode(), - it.asAlias?.toSymbolicName()) + it.asAlias?.toSymbolicName() + ) } - }, metas) + }, + metas + ) } } @@ -230,15 +243,19 @@ private class StatementTransformer(val ion: IonSystem) { variables = LetVariables( asName = asAlias?.toSymbolicName(), atName = atAlias?.toSymbolicName(), - byName = byAlias?.toSymbolicName())) + byName = byAlias?.toSymbolicName() + ) + ) is PartiqlAst.FromSource.Unpivot -> FromSourceUnpivot( expr = expr.toExprNode(), variables = LetVariables( asName = asAlias?.toSymbolicName(), atName = atAlias?.toSymbolicName(), - byName = byAlias?.toSymbolicName()), - metas = metas) + byName = byAlias?.toSymbolicName() + ), + metas = metas + ) is PartiqlAst.FromSource.Join -> FromSourceJoin( joinOp = type.toJoinOp(), @@ -247,7 +264,8 @@ private class StatementTransformer(val ion: IonSystem) { // Consider adding StaticTypeMeta here only when static type inference occurs. // See https://github.com/partiql/partiql-lang-kotlin/issues/511 condition = predicate?.toExprNode() ?: Literal(ion.newBool(true), metaContainerOf(StaticTypeMeta(StaticType.BOOL))), - metas = metas) + metas = metas + ) } } @@ -277,10 +295,13 @@ private class StatementTransformer(val ion: IonSystem) { sortSpecItems = this.sortSpecs.map { SortSpec( it.expr.toExprNode(), - it.orderingSpec.toOrderSpec())}) + it.orderingSpec.toOrderSpec() + ) + } + ) private fun PartiqlAst.OrderingSpec?.toOrderSpec(): OrderingSpec = - when(this) { + when (this) { is PartiqlAst.OrderingSpec.Desc -> OrderingSpec.DESC else -> OrderingSpec.ASC } @@ -291,13 +312,15 @@ private class StatementTransformer(val ion: IonSystem) { groupByItems = keyList.keys.map { GroupByItem( it.expr.toExprNode(), - it.asAlias?.toSymbolicName()) + it.asAlias?.toSymbolicName() + ) }, - groupName = groupAsAlias?.toSymbolicName()) + groupName = groupAsAlias?.toSymbolicName() + ) private fun PartiqlAst.GroupingStrategy.toGroupingStrategy(): GroupingStrategy = - when(this) { - is PartiqlAst.GroupingStrategy.GroupFull-> GroupingStrategy.FULL + when (this) { + is PartiqlAst.GroupingStrategy.GroupFull -> GroupingStrategy.FULL is PartiqlAst.GroupingStrategy.GroupPartial -> GroupingStrategy.PARTIAL } @@ -374,7 +397,7 @@ private class StatementTransformer(val ion: IonSystem) { } private fun PartiqlAst.OnConflict.toOnConflictNode(): OnConflict { - return when(this.conflictAction) { + return when (this.conflictAction) { is PartiqlAst.ConflictAction.DoNothing -> OnConflict(this.expr.toExprNode(), ConflictAction.DO_NOTHING) } } @@ -400,12 +423,15 @@ private class StatementTransformer(val ion: IonSystem) { lvalue = target.toExprNode(), value = value.toExprNode(), position = this.index?.toExprNode(), - onConflict = onConflict?.toOnConflictNode()) + onConflict = onConflict?.toOnConflictNode() + ) is PartiqlAst.DmlOp.Set -> AssignmentOp( assignment = Assignment( lvalue = this.assignment.target.toExprNode(), - rvalue = assignment.value.toExprNode())) + rvalue = assignment.value.toExprNode() + ) + ) is PartiqlAst.DmlOp.Remove -> RemoveOp(target.toExprNode()) @@ -418,8 +444,8 @@ private class StatementTransformer(val ion: IonSystem) { ReturningExpr( returningElems = elems.map { ReturningElem( - it.mapping.toExprNodeReturningMapping(), - it.column.toColumnComponent() + it.mapping.toExprNodeReturningMapping(), + it.column.toColumnComponent() ) } ) @@ -433,17 +459,17 @@ private class StatementTransformer(val ion: IonSystem) { } private fun PartiqlAst.ReturningMapping.toExprNodeReturningMapping(): ReturningMapping = - when(this) { - is PartiqlAst.ReturningMapping.ModifiedOld -> ReturningMapping.MODIFIED_OLD - is PartiqlAst.ReturningMapping.ModifiedNew -> ReturningMapping.MODIFIED_NEW - is PartiqlAst.ReturningMapping.AllOld -> ReturningMapping.ALL_OLD - is PartiqlAst.ReturningMapping.AllNew -> ReturningMapping.ALL_NEW - } + when (this) { + is PartiqlAst.ReturningMapping.ModifiedOld -> ReturningMapping.MODIFIED_OLD + is PartiqlAst.ReturningMapping.ModifiedNew -> ReturningMapping.MODIFIED_NEW + is PartiqlAst.ReturningMapping.AllOld -> ReturningMapping.ALL_OLD + is PartiqlAst.ReturningMapping.AllNew -> ReturningMapping.ALL_NEW + } private fun PartiqlAst.Statement.Ddl.toExprNode(): ExprNode { val op = this.op val metas = this.metas.toPartiQlMetaContainer() - return when(op) { + return when (op) { is PartiqlAst.DdlOp.CreateTable -> CreateTable(op.tableName.text, metas) is PartiqlAst.DdlOp.DropTable -> DropTable( @@ -452,7 +478,8 @@ private class StatementTransformer(val ion: IonSystem) { case = op.tableName.case.toCaseSensitivity(), metas = emptyMetaContainer ), - metas = metas) + metas = metas + ) is PartiqlAst.DdlOp.CreateIndex -> CreateIndex( tableId = Identifier( @@ -461,7 +488,8 @@ private class StatementTransformer(val ion: IonSystem) { metas = emptyMetaContainer ), keys = op.fields.map { it.toExprNode() }, - metas = metas) + metas = metas + ) is PartiqlAst.DdlOp.DropIndex -> DropIndex( tableId = Identifier( @@ -474,7 +502,8 @@ private class StatementTransformer(val ion: IonSystem) { case = op.keys.case.toCaseSensitivity(), metas = emptyMetaContainer ), - metas = metas) + metas = metas + ) } } diff --git a/lang/src/org/partiql/lang/ast/Util.kt b/lang/src/org/partiql/lang/ast/Util.kt index aec028b67d..1fb316e42d 100644 --- a/lang/src/org/partiql/lang/ast/Util.kt +++ b/lang/src/org/partiql/lang/ast/Util.kt @@ -35,17 +35,18 @@ fun createCountStar(ion: IonSystem, metas: MetaContainer): CallAgg { // The [VariableReference] and [Literal] below should only get the [SourceLocationMeta] if present, // not any other metas. val srcLocationMetaOnly = metas.find(SourceLocationMeta.TAG) - ?.let { metaContainerOf(it) } ?: metaContainerOf() - + ?.let { metaContainerOf(it) } ?: metaContainerOf() + // optimize count(*) to count(1). return CallAgg( funcExpr = VariableReference( id = "count", case = CaseSensitivity.INSENSITIVE, scopeQualifier = ScopeQualifier.UNQUALIFIED, - metas = srcLocationMetaOnly), + metas = srcLocationMetaOnly + ), setQuantifier = SetQuantifier.ALL, - arg = Literal(ion.newInt(1), srcLocationMetaOnly), + arg = Literal(ion.newInt(1), srcLocationMetaOnly), metas = metas.add(IsCountStarMeta.instance) ) } diff --git a/lang/src/org/partiql/lang/ast/ast.kt b/lang/src/org/partiql/lang/ast/ast.kt index 2351bd615e..ece71514f5 100644 --- a/lang/src/org/partiql/lang/ast/ast.kt +++ b/lang/src/org/partiql/lang/ast/ast.kt @@ -74,31 +74,31 @@ sealed class ExprNode : AstNode(), HasMetas { // This looks like duplication but really isn't: each branch executes a different compiler-generated `copy` function. val metas = newMetas ?: this.metas return when (this) { - is Literal -> { + is Literal -> { copy(metas = metas) } - is LiteralMissing -> { + is LiteralMissing -> { copy(metas = metas) } is VariableReference -> { copy(metas = metas) } - is NAry -> { + is NAry -> { copy(metas = metas) } - is CallAgg -> { + is CallAgg -> { copy(metas = metas) } - is Typed -> { + is Typed -> { copy(metas = metas) } - is Path -> { + is Path -> { copy(metas = metas) } - is SimpleCase -> { + is SimpleCase -> { copy(metas = metas) } - is SearchedCase -> { + is SearchedCase -> { copy(metas = metas) } is Select -> { @@ -107,34 +107,34 @@ sealed class ExprNode : AstNode(), HasMetas { is Struct -> { copy(metas = metas) } - is Seq -> { + is Seq -> { copy(metas = metas) } - is DataManipulation -> { + is DataManipulation -> { copy(metas = metas) } - is CreateTable -> { + is CreateTable -> { copy(metas = metas) } - is CreateIndex -> { + is CreateIndex -> { copy(metas = metas) } - is DropTable -> { + is DropTable -> { copy(metas = metas) } - is DropIndex -> { + is DropIndex -> { copy(metas = metas) } - is Parameter -> { + is Parameter -> { copy(metas = metas) } - is NullIf -> { + is NullIf -> { copy(metas = metas) } - is Coalesce -> { + is Coalesce -> { copy(metas = metas) } - is Exec -> { + is Exec -> { copy(metas = metas) } is DateLiteral -> { @@ -182,24 +182,25 @@ data class VariableReference( * Respects case sensitivity when comparing against another [VariableReference]. */ override fun equals(other: Any?): Boolean = - if(other !is VariableReference) { false } - else { - id.compareTo(other.id, case == CaseSensitivity.INSENSITIVE) == 0 - && case == other.case - && scopeQualifier == other.scopeQualifier - && metas == other.metas + if (other !is VariableReference) { false } else { + id.compareTo(other.id, case == CaseSensitivity.INSENSITIVE) == 0 && + case == other.case && + scopeQualifier == other.scopeQualifier && + metas == other.metas } override fun hashCode(): Int = Arrays.hashCode( arrayOf( - when(case) { + when (case) { CaseSensitivity.SENSITIVE -> id CaseSensitivity.INSENSITIVE -> id.toLowerCase() }, case, scopeQualifier, - metas)) + metas + ) + ) override val children: List = listOf() } @@ -218,8 +219,8 @@ data class Identifier( * by the evaluation environment. */ data class Parameter( - val position: Int, - override val metas: MetaContainer + val position: Int, + override val metas: MetaContainer ) : ExprNode() { override val children: List = listOf() } @@ -275,9 +276,9 @@ data class Coalesce( override val children: List = args } -//******************************** +// ******************************** // Stored procedure clauses -//******************************** +// ******************************** /** Represents a call to a stored procedure, i.e. `EXEC stored_procedure [.*]` */ data class Exec( @@ -288,9 +289,9 @@ data class Exec( override val children: List = args } -//******************************** +// ******************************** // Path expressions -//******************************** +// ******************************** /** Represents a path expression, i.e. `foo.bar`, `foo[*].bar`, etc. */ data class Path( @@ -301,20 +302,20 @@ data class Path( override val children: List = listOf(root) + components } -//******************************** +// ******************************** // Simple CASE -//******************************** +// ******************************** /** For `CASE foo WHEN THEN ELSE END` */ data class SimpleCase( val valueExpr: ExprNode, val whenClauses: List, val elseExpr: ExprNode?, - override val metas: MetaContainer) : ExprNode() { + override val metas: MetaContainer +) : ExprNode() { override val children: List = listOf(valueExpr) + whenClauses + listOfNotNull(elseExpr) } - /** Represents a case of a [SimpleCase]. */ data class SimpleCaseWhen( val valueExpr: ExprNode, @@ -323,9 +324,9 @@ data class SimpleCaseWhen( override val children: List = listOf(valueExpr, thenExpr) } -//******************************** +// ******************************** // Searched CASE -//******************************** +// ******************************** /** For `CASE WHEN THEN ELSE END`. */ data class SearchedCase( @@ -344,9 +345,9 @@ data class SearchedCaseWhen( override val children: List = listOf(condition, thenExpr) } -//******************************** +// ******************************** // Data Manipulation Expressions -//******************************** +// ******************************** sealed class DataManipulationOperation(val name: String) : AstNode() @@ -380,11 +381,13 @@ data class InsertValueOp( val value: ExprNode, val position: ExprNode?, val onConflict: OnConflict? -): DataManipulationOperation(name = "insert_value") { +) : DataManipulationOperation(name = "insert_value") { override val children: List = listOfNotNull(lvalue, value, position, onConflict) } -data class OnConflict(val condition: ExprNode, val conflictAction: ConflictAction +data class OnConflict( + val condition: ExprNode, + val conflictAction: ConflictAction ) : AstNode() { override val children: List = listOf(condition) } @@ -427,7 +430,7 @@ fun DeleteOp() = DeleteOp /** Represents `RETURNING [ ',' ]*` */ data class ReturningExpr( val returningElems: List -): AstNode() { +) : AstNode() { override val children: List = returningElems } @@ -435,7 +438,7 @@ data class ReturningExpr( data class ReturningElem( val returningMapping: ReturningMapping, val columnComponent: ColumnComponent -): AstNode() { +) : AstNode() { override val children: List = listOf(columnComponent) } @@ -458,9 +461,9 @@ enum class ReturningMapping { ALL_OLD } -//******************************** +// ******************************** // Select Expression -//******************************** +// ******************************** /** * Represents a `SELECT` statements as well as the `PIVOT` and `SELECT VALUE`, variants. @@ -481,9 +484,9 @@ data class Select( override val children: List = listOfNotNull(projection, from, fromLet, where, groupBy, having, orderBy, limit, offset) } -//******************************** +// ******************************** // DDL Expressions -//******************************** +// ******************************** // TODO determine if we should encapsulate DDL as a separate space from ExprNode... @@ -583,7 +586,7 @@ data class SymbolicName( sealed class PathComponent : AstNode(), HasMetas { fun copy(newMetas: MetaContainer?): PathComponent { val metas = newMetas ?: this.metas - return when(this) { + return when (this) { is PathComponentExpr -> this.copy(metas = metas) is PathComponentUnpivot -> this.copy(metas = metas) is PathComponentWildcard -> this.copy(metas = metas) @@ -605,9 +608,9 @@ data class PathComponentExpr( companion object { private fun getStringValueIfCaseInsensitiveLiteral(component: PathComponentExpr): String? = when { - component.case == CaseSensitivity.INSENSITIVE - && component.expr is Literal - && component.expr.ionValue.type == IonType.STRING -> { + component.case == CaseSensitivity.INSENSITIVE && + component.expr is Literal && + component.expr.ionValue.type == IonType.STRING -> { component.expr.ionValue.stringValue() } else -> null @@ -631,7 +634,7 @@ data class PathComponentExpr( myStringValue == null || otherStringValue == null -> // Only one of the components was a case insensitive literal, so they are not equal false - else -> + else -> // Both components are case insensitive literals, perform case insensitive comparison myStringValue.equals(otherStringValue, true) } @@ -655,11 +658,10 @@ data class PathComponentWildcard(override val metas: MetaContainer) : PathCompon override val children: List = listOf() } - sealed class SelectProjection : AstNode(), HasMetas { fun copy(newMetas: MetaContainer? = null): SelectProjection { val metas = newMetas ?: this.metas - return when(this) { + return when (this) { is SelectProjectionList -> this.copy(metas = metas) is SelectProjectionValue -> this.copy(metas = metas) is SelectProjectionPivot -> this.copy(metas = metas) @@ -735,9 +737,9 @@ data class SelectListItemStar(override val metas: MetaContainer) : SelectListIte * Note: `FromSource`s that are separated by commas are modeled as an INNER JOIN with a condition of `true`. */ sealed class FromSource : AstNode() { - fun metas(): MetaContainer = when(this) { - is FromSourceExpr -> this.expr.metas - is FromSourceJoin -> this.metas + fun metas(): MetaContainer = when (this) { + is FromSourceExpr -> this.expr.metas + is FromSourceJoin -> this.metas is FromSourceUnpivot -> this.expr.metas } } @@ -768,8 +770,8 @@ sealed class FromSourceLet : FromSource() { abstract val variables: LetVariables fun copy(newVariables: LetVariables): FromSourceLet = - when(this) { - is FromSourceExpr -> this.copy(variables = newVariables) + when (this) { + is FromSourceExpr -> this.copy(variables = newVariables) is FromSourceUnpivot -> this.copy(variables = newVariables) } } @@ -798,9 +800,9 @@ data class FromSourceUnpivot( override val children: List = listOf(expr) } -//******************************** +// ******************************** // LET clause -//******************************** +// ******************************** /** Represents a list of LetBindings */ data class LetSource( @@ -822,14 +824,14 @@ data class GroupBy( val grouping: GroupingStrategy, val groupByItems: List, val groupName: SymbolicName? = null -): AstNode() { +) : AstNode() { override val children: List = groupByItems } data class GroupByItem( val expr: ExprNode, val asName: SymbolicName? = null -): AstNode() { +) : AstNode() { override val children: List = listOf(expr) } @@ -839,25 +841,25 @@ data class GroupByItem( */ data class OrderBy( val sortSpecItems: List -): AstNode() { +) : AstNode() { override val children: List = sortSpecItems } data class SortSpec( val expr: ExprNode, val orderingSpec: OrderingSpec -): AstNode() { +) : AstNode() { override val children: List = listOf(expr) } -//******************************** +// ******************************** // Constructors -//******************************** +// ******************************** /** Represents a field in a struct constructor. */ data class StructField( val name: ExprNode, val expr: ExprNode -): AstNode() { +) : AstNode() { override val children: List = listOf(name, expr) } @@ -897,9 +899,9 @@ data class DataType( override val children: List = listOf() } -//******************************** +// ******************************** // Node attributes -//******************************** +// ******************************** /** Indicates case sensitivity of variable references. */ enum class CaseSensitivity(private val symbol: String) { @@ -907,7 +909,7 @@ enum class CaseSensitivity(private val symbol: String) { INSENSITIVE("case_insensitive"); companion object { - fun fromSymbol(s: String) : CaseSensitivity = when (s) { + fun fromSymbol(s: String): CaseSensitivity = when (s) { "case_sensitive" -> SENSITIVE "case_insensitive" -> INSENSITIVE else -> throw IllegalArgumentException("Unrecognized CaseSensitivity $s") @@ -915,7 +917,6 @@ enum class CaseSensitivity(private val symbol: String) { } fun toSymbol() = symbol - } /** Indicates if all rows in a select query are to be returned or only distinct rows. */ @@ -1009,7 +1010,6 @@ enum class NAryOp(val arityRange: IntRange, val symbol: String, val textName: St .map { Pair(it.symbol, it) } .toMap() - fun forSymbol(symbol: String): NAryOp? = OP_SYMBOL_TO_OP_LOOKUP[symbol] } } @@ -1096,7 +1096,6 @@ data class TimeLiteral( override val children: List = listOf() } - /** * Indicates strategy for binding lookup within scopes. */ @@ -1119,9 +1118,10 @@ sealed class SqlDataType(val typeName: String, open val arityRange: IntRange) { */ @JvmStatic fun values(): Array = arrayOf( - MISSING, NULL, BOOLEAN, SMALLINT, INTEGER4, INTEGER8, INTEGER, FLOAT, REAL, DOUBLE_PRECISION, DECIMAL, - NUMERIC, DATE, TIME, TIME_WITH_TIME_ZONE, TIMESTAMP, CHARACTER, CHARACTER_VARYING, STRING, SYMBOL, CLOB, - BLOB, STRUCT, TUPLE, LIST, SEXP, BAG, ANY) + MISSING, NULL, BOOLEAN, SMALLINT, INTEGER4, INTEGER8, INTEGER, FLOAT, REAL, DOUBLE_PRECISION, DECIMAL, + NUMERIC, DATE, TIME, TIME_WITH_TIME_ZONE, TIMESTAMP, CHARACTER, CHARACTER_VARYING, STRING, SYMBOL, CLOB, + BLOB, STRUCT, TUPLE, LIST, SEXP, BAG, ANY + ) /* * Making this object lazy so that any reference to below objects diff --git a/lang/src/org/partiql/lang/ast/meta.kt b/lang/src/org/partiql/lang/ast/meta.kt index 1fbbf683fb..54d9c536d7 100644 --- a/lang/src/org/partiql/lang/ast/meta.kt +++ b/lang/src/org/partiql/lang/ast/meta.kt @@ -114,7 +114,7 @@ private data class MetaContainerImpl internal constructor(private val metas: Tre metas.containsKey(tagName) override operator fun get(tagName: String): Meta = - metas[tagName] ?: throw IllegalArgumentException("Meta with tag '${tagName}' is not present in this MetaContainer instance.") + metas[tagName] ?: throw IllegalArgumentException("Meta with tag '$tagName' is not present in this MetaContainer instance.") override fun find(tagName: String): Meta? = metas[tagName] @@ -146,12 +146,12 @@ private data class MetaContainerImpl internal constructor(private val metas: Tre override fun equals(other: Any?): Boolean = when { this === other -> true - else -> when (other) { - null -> false + else -> when (other) { + null -> false is MetaContainerImpl -> { when { metas.size != other.metas.size -> false - else -> { + else -> { metas.forEach { val otherValue = other.metas[it.key] when (otherValue) { @@ -167,7 +167,7 @@ private data class MetaContainerImpl internal constructor(private val metas: Tre } } } - else -> false + else -> false } } } @@ -188,16 +188,17 @@ fun metaContainerOf(metas: Iterable): MetaContainer { return MetaContainerImpl( TreeMap().apply { metas.forEach { - //Sanity check to make sure there are no duplicate keys (the type of the Meta instance is used as the key) + // Sanity check to make sure there are no duplicate keys (the type of the Meta instance is used as the key) if (containsKey(it.tag)) { IllegalArgumentException("List of metas contains one or more duplicate s-expression tag: ${it.tag}") } put(it.tag, it) } - }) + } + ) } -infix fun Class<*>.to (m: Meta) = Pair(this, m) +infix fun Class<*>.to(m: Meta) = Pair(this, m) /** * Merges two meta containers. @@ -209,5 +210,5 @@ operator fun MetaContainer.plus(other: MetaContainer): MetaContainer = TreeMap().also { newMap -> forEach { newMap.put(it.tag, it) } other.forEach { newMap.put(it.tag, it) } - }) - + } + ) diff --git a/lang/src/org/partiql/lang/ast/passes/AstRewriterBase.kt b/lang/src/org/partiql/lang/ast/passes/AstRewriterBase.kt index 02f51c084d..24f737b351 100644 --- a/lang/src/org/partiql/lang/ast/passes/AstRewriterBase.kt +++ b/lang/src/org/partiql/lang/ast/passes/AstRewriterBase.kt @@ -105,24 +105,24 @@ open class AstRewriterBase : AstRewriter { is Literal -> rewriteLiteral(node) is LiteralMissing -> rewriteLiteralMissing(node) is VariableReference -> rewriteVariableReference(node) - is NAry -> rewriteNAry(node) - is CallAgg -> rewriteCallAgg(node) - is Typed -> rewriteTyped(node) - is Path -> rewritePath(node) - is SimpleCase -> rewriteSimpleCase(node) - is SearchedCase -> rewriteSearchedCase(node) - is Struct -> rewriteStruct(node) - is Seq -> rewriteSeq(node) - is Select -> rewriteSelect(node) - is Parameter -> rewriteParameter(node) - is DataManipulation -> rewriteDataManipulation(node) - is CreateTable -> rewriteCreateTable(node) - is CreateIndex -> rewriteCreateIndex(node) - is DropTable -> rewriteDropTable(node) - is DropIndex -> rewriteDropIndex(node) + is NAry -> rewriteNAry(node) + is CallAgg -> rewriteCallAgg(node) + is Typed -> rewriteTyped(node) + is Path -> rewritePath(node) + is SimpleCase -> rewriteSimpleCase(node) + is SearchedCase -> rewriteSearchedCase(node) + is Struct -> rewriteStruct(node) + is Seq -> rewriteSeq(node) + is Select -> rewriteSelect(node) + is Parameter -> rewriteParameter(node) + is DataManipulation -> rewriteDataManipulation(node) + is CreateTable -> rewriteCreateTable(node) + is CreateIndex -> rewriteCreateIndex(node) + is DropTable -> rewriteDropTable(node) + is DropIndex -> rewriteDropIndex(node) is NullIf -> rewriteNullIf(node) is Coalesce -> rewriteCoalesce(node) - is Exec -> rewriteExec(node) + is Exec -> rewriteExec(node) is DateLiteral -> rewriteDate(node) is TimeLiteral -> rewriteTime(node) } @@ -141,30 +141,36 @@ open class AstRewriterBase : AstRewriter { id = node.id, case = node.case, scopeQualifier = node.scopeQualifier, - metas = rewriteMetas(node)) + metas = rewriteMetas(node) + ) open fun rewriteSeq(node: Seq): ExprNode = - Seq(rewriteSeqType(node.type), + Seq( + rewriteSeqType(node.type), node.values.map { rewriteExprNode(it) }, - rewriteMetas(node)) + rewriteMetas(node) + ) open fun rewriteSeqType(type: SeqType): SeqType = type open fun rewriteStruct(node: Struct): ExprNode = Struct( node.fields.mapIndexed { index, field -> rewriteStructField(field, index) }, - rewriteMetas(node)) + rewriteMetas(node) + ) open fun rewriteStructField(field: StructField, index: Int): StructField = - StructField( - rewriteExprNode(field.name), - rewriteExprNode(field.expr)) + StructField( + rewriteExprNode(field.name), + rewriteExprNode(field.expr) + ) open fun rewriteSearchedCase(node: SearchedCase): ExprNode { return SearchedCase( node.whenClauses.map { rewriteSearchedCaseWhen(it) }, node.elseExpr?.let { rewriteExprNode(it) }, - rewriteMetas(node)) + rewriteMetas(node) + ) } open fun rewriteSimpleCase(node: SimpleCase): ExprNode { @@ -172,14 +178,16 @@ open class AstRewriterBase : AstRewriter { rewriteExprNode(node.valueExpr), node.whenClauses.map { rewriteSimpleCaseWhen(it) }, node.elseExpr?.let { rewriteExprNode(it) }, - rewriteMetas(node)) + rewriteMetas(node) + ) } open fun rewritePath(node: Path): ExprNode { return Path( rewriteExprNode(node.root), node.components.map { rewritePathComponent(it) }, - rewriteMetas(node)) + rewriteMetas(node) + ) } open fun rewriteTyped(node: Typed): ExprNode { @@ -187,7 +195,8 @@ open class AstRewriterBase : AstRewriter { node.op, rewriteExprNode(node.expr), rewriteDataType(node.type), - rewriteMetas(node)) + rewriteMetas(node) + ) } open fun rewriteCallAgg(node: CallAgg): ExprNode { @@ -195,14 +204,16 @@ open class AstRewriterBase : AstRewriter { rewriteExprNode(node.funcExpr), node.setQuantifier, rewriteExprNode(node.arg), - rewriteMetas(node)) + rewriteMetas(node) + ) } open fun rewriteNAry(node: NAry): ExprNode { return NAry( node.op, node.args.map { rewriteExprNode(it) }, - rewriteMetas(node)) + rewriteMetas(node) + ) } open fun rewriteSelect(selectExpr: Select): ExprNode = @@ -212,13 +223,15 @@ open class AstRewriterBase : AstRewriter { return NullIf( rewriteExprNode(node.expr1), rewriteExprNode(node.expr2), - rewriteMetas(node)) + rewriteMetas(node) + ) } open fun rewriteCoalesce(node: Coalesce): ExprNode { return Coalesce( node.args.map { rewriteExprNode(it) }, - rewriteMetas(node)) + rewriteMetas(node) + ) } /** @@ -262,10 +275,11 @@ open class AstRewriterBase : AstRewriter { orderBy = orderBy, limit = limit, offset = offset, - metas = metas) + metas = metas + ) } - open fun rewriteSelectWhere(node: ExprNode):ExprNode = rewriteExprNode(node) + open fun rewriteSelectWhere(node: ExprNode): ExprNode = rewriteExprNode(node) open fun rewriteSelectHaving(node: ExprNode): ExprNode = rewriteExprNode(node) @@ -284,280 +298,303 @@ open class AstRewriterBase : AstRewriter { open fun rewriteSelectProjectionList(projection: SelectProjectionList): SelectProjection = SelectProjectionList( - projection.items.map { it -> rewriteSelectListItem(it) }, rewriteMetas(projection)) - - open fun rewriteSelectProjectionValue(projection: SelectProjectionValue): SelectProjection = - SelectProjectionValue(rewriteExprNode(projection.expr), rewriteMetas(projection)) - - - open fun rewriteSelectProjectionPivot(projection: SelectProjectionPivot): SelectProjection = - SelectProjectionPivot( - rewriteExprNode(projection.nameExpr), - rewriteExprNode(projection.valueExpr), - rewriteMetas(projection)) - - open fun rewriteSelectListItem(item: SelectListItem): SelectListItem = - when(item) { - is SelectListItemStar -> rewriteSelectListItemStar(item) - is SelectListItemExpr -> rewriteSelectListItemExpr(item) - is SelectListItemProjectAll -> rewriteSelectListItemProjectAll(item) - } - - open fun rewriteSelectListItemProjectAll(item: SelectListItemProjectAll): SelectListItem = - SelectListItemProjectAll( - rewriteExprNode(item.expr)) - - open fun rewriteSelectListItemExpr(item: SelectListItemExpr): SelectListItem = - SelectListItemExpr( - rewriteExprNode(item.expr), - item.asName?.let { rewriteSymbolicName(it) }) - - open fun rewriteSelectListItemStar(item: SelectListItemStar): SelectListItem = - SelectListItemStar(rewriteMetas(item)) - - open fun rewritePathComponent(pathComponent: PathComponent): PathComponent = - when(pathComponent) { - is PathComponentUnpivot -> rewritePathComponentUnpivot(pathComponent) - is PathComponentWildcard -> rewritePathComponentWildcard(pathComponent) - is PathComponentExpr -> rewritePathComponentExpr(pathComponent) + projection.items.map { it -> rewriteSelectListItem(it) }, rewriteMetas(projection) + ) + + open fun rewriteSelectProjectionValue(projection: SelectProjectionValue): SelectProjection = + SelectProjectionValue(rewriteExprNode(projection.expr), rewriteMetas(projection)) + + open fun rewriteSelectProjectionPivot(projection: SelectProjectionPivot): SelectProjection = + SelectProjectionPivot( + rewriteExprNode(projection.nameExpr), + rewriteExprNode(projection.valueExpr), + rewriteMetas(projection) + ) + + open fun rewriteSelectListItem(item: SelectListItem): SelectListItem = + when (item) { + is SelectListItemStar -> rewriteSelectListItemStar(item) + is SelectListItemExpr -> rewriteSelectListItemExpr(item) + is SelectListItemProjectAll -> rewriteSelectListItemProjectAll(item) + } + + open fun rewriteSelectListItemProjectAll(item: SelectListItemProjectAll): SelectListItem = + SelectListItemProjectAll( + rewriteExprNode(item.expr) + ) + + open fun rewriteSelectListItemExpr(item: SelectListItemExpr): SelectListItem = + SelectListItemExpr( + rewriteExprNode(item.expr), + item.asName?.let { rewriteSymbolicName(it) } + ) + + open fun rewriteSelectListItemStar(item: SelectListItemStar): SelectListItem = + SelectListItemStar(rewriteMetas(item)) + + open fun rewritePathComponent(pathComponent: PathComponent): PathComponent = + when (pathComponent) { + is PathComponentUnpivot -> rewritePathComponentUnpivot(pathComponent) + is PathComponentWildcard -> rewritePathComponentWildcard(pathComponent) + is PathComponentExpr -> rewritePathComponentExpr(pathComponent) + } + + open fun rewritePathComponentUnpivot(pathComponent: PathComponentUnpivot): PathComponent = + PathComponentUnpivot(rewriteMetas(pathComponent)) + + open fun rewritePathComponentWildcard(pathComponent: PathComponentWildcard): PathComponent = + PathComponentWildcard(rewriteMetas(pathComponent)) + + open fun rewritePathComponentExpr(pathComponent: PathComponentExpr): PathComponent = + PathComponentExpr( + rewriteExprNode(pathComponent.expr), + pathComponent.case, + rewriteMetas(pathComponent) + ) + + open fun rewriteFromSource(fromSource: FromSource): FromSource = + when (fromSource) { + is FromSourceJoin -> rewriteFromSourceJoin(fromSource) + is FromSourceLet -> rewriteFromSourceLet(fromSource) + } + + open fun rewriteFromSourceLet(fromSourceLet: FromSourceLet): FromSourceLet = + when (fromSourceLet) { + is FromSourceExpr -> rewriteFromSourceExpr(fromSourceLet) + is FromSourceUnpivot -> rewriteFromSourceUnpivot(fromSourceLet) + } + + open fun rewriteLetVariables(variables: LetVariables) = + LetVariables( + variables.asName?.let { rewriteSymbolicName(it) }, + variables.atName?.let { rewriteSymbolicName(it) }, + variables.byName?.let { rewriteSymbolicName(it) } + ) + + open fun rewriteLetSource(letSource: LetSource) = + LetSource(letSource.bindings.map { rewriteLetBinding(it) }) + + open fun rewriteLetBinding(letBinding: LetBinding): LetBinding = + LetBinding(rewriteExprNode(letBinding.expr), rewriteSymbolicName(letBinding.name)) + + /** + * This is called by the methods responsible for rewriting instances of the [FromSourceLet] + * to rewrite their expression. This exists to provide a place for derived rewriters to + * affect state changes that apply *only* to the expression of these two node types. + */ + open fun rewriteFromSourceValueExpr(expr: ExprNode): ExprNode = + rewriteExprNode(expr) + + open fun rewriteFromSourceUnpivot(fromSource: FromSourceUnpivot): FromSourceLet = + FromSourceUnpivot( + rewriteFromSourceValueExpr(fromSource.expr), + rewriteLetVariables(fromSource.variables), + rewriteMetas(fromSource) + ) + + open fun rewriteFromSourceExpr(fromSource: FromSourceExpr): FromSourceLet = + FromSourceExpr( + rewriteFromSourceValueExpr(fromSource.expr), + rewriteLetVariables(fromSource.variables) + ) + + open fun rewriteFromSourceJoin(fromSource: FromSourceJoin): FromSource = + FromSourceJoin( + fromSource.joinOp, + rewriteFromSource(fromSource.leftRef), + rewriteFromSource(fromSource.rightRef), + rewriteExprNode(fromSource.condition), + rewriteMetas(fromSource) + ) + + open fun rewriteGroupBy(groupBy: GroupBy): GroupBy = + GroupBy( + groupBy.grouping, + groupBy.groupByItems.map { rewriteGroupByItem(it) }, + groupBy.groupName?.let { rewriteSymbolicName(it) } + ) + + open fun rewriteGroupByItem(item: GroupByItem): GroupByItem = + GroupByItem( + rewriteExprNode(item.expr), + item.asName?.let { rewriteSymbolicName(it) } + ) + + open fun rewriteOrderBy(orderBy: OrderBy): OrderBy = + OrderBy( + orderBy.sortSpecItems.map { rewriteSortSpec(it) } + ) + + open fun rewriteSortSpec(sortSpec: SortSpec): SortSpec = + SortSpec( + rewriteExprNode(sortSpec.expr), + sortSpec.orderingSpec + ) + + open fun rewriteDataType(dataType: DataType) = dataType + + open fun rewriteSimpleCaseWhen(case: SimpleCaseWhen): SimpleCaseWhen = + SimpleCaseWhen( + rewriteExprNode(case.valueExpr), + rewriteExprNode(case.thenExpr) + ) + + open fun rewriteSearchedCaseWhen(case: SearchedCaseWhen): SearchedCaseWhen = + SearchedCaseWhen( + rewriteExprNode(case.condition), + rewriteExprNode(case.thenExpr) + ) + + open fun rewriteSymbolicName(symbolicName: SymbolicName): SymbolicName = + SymbolicName( + symbolicName.name, + rewriteMetas(symbolicName) + ) + + open fun rewriteParameter(node: Parameter): Parameter = + Parameter(node.position, rewriteMetas(node)) + + open fun rewriteDataManipulation(node: DataManipulation): DataManipulation = + innerRewriteDataManipulation(node) + + /** + * Many subtypes of [AstRewriterBase] need to override [rewriteDataManipulation] to selectively apply a + * different nested instance of themselves to [DataManipulation] nodes. These subtypes can invoke this method + * instead of [rewriteDataManipulation] to avoid infinite recursion. They can also override this function if + * they need to customize how the new [DataManipulation] node is instantiated. + * + * The traversal order is in the semantic order--that is: + * + * 1. `FROM` ([DataManipulation.from]) + * 2. `WHERE` ([DataManipulation.where]) + * 3. The DML operation ([DataManipulation.dmlOperation]]] + * 4. The metas. ([DataManipulation.metas]) + */ + open fun innerRewriteDataManipulation(node: DataManipulation): DataManipulation { + val from = node.from?.let { rewriteFromSource(it) } + val where = node.where?.let { rewriteDataManipulationWhere(it) } + val returning = node.returning?.let { rewriteReturningExpr(it) } + val dmlOperations = rewriteDataManipulationOperations(node.dmlOperations) + val metas = rewriteMetas(node) + + return DataManipulation( + dmlOperations, + from, + where, + returning, + metas + ) } - open fun rewritePathComponentUnpivot(pathComponent: PathComponentUnpivot): PathComponent = - PathComponentUnpivot(rewriteMetas(pathComponent)) - - open fun rewritePathComponentWildcard(pathComponent: PathComponentWildcard): PathComponent = - PathComponentWildcard(rewriteMetas(pathComponent)) - - open fun rewritePathComponentExpr(pathComponent: PathComponentExpr): PathComponent = - PathComponentExpr( - rewriteExprNode(pathComponent.expr), - pathComponent.case, - rewriteMetas(pathComponent)) - - open fun rewriteFromSource(fromSource: FromSource): FromSource = - when(fromSource) { - is FromSourceJoin -> rewriteFromSourceJoin(fromSource) - is FromSourceLet -> rewriteFromSourceLet(fromSource) - } - - open fun rewriteFromSourceLet(fromSourceLet: FromSourceLet): FromSourceLet = - when(fromSourceLet) { - is FromSourceExpr -> rewriteFromSourceExpr(fromSourceLet) - is FromSourceUnpivot -> rewriteFromSourceUnpivot(fromSourceLet) - } - - open fun rewriteLetVariables(variables: LetVariables) = - LetVariables( - variables.asName?.let { rewriteSymbolicName(it) }, - variables.atName?.let { rewriteSymbolicName(it) }, - variables.byName?.let { rewriteSymbolicName(it) }) - - open fun rewriteLetSource(letSource: LetSource) = - LetSource(letSource.bindings.map { rewriteLetBinding(it) }) - - open fun rewriteLetBinding(letBinding: LetBinding): LetBinding = - LetBinding(rewriteExprNode(letBinding.expr), rewriteSymbolicName(letBinding.name)) - - /** - * This is called by the methods responsible for rewriting instances of the [FromSourceLet] - * to rewrite their expression. This exists to provide a place for derived rewriters to - * affect state changes that apply *only* to the expression of these two node types. - */ - open fun rewriteFromSourceValueExpr(expr: ExprNode): ExprNode = - rewriteExprNode(expr) - - open fun rewriteFromSourceUnpivot(fromSource: FromSourceUnpivot): FromSourceLet = - FromSourceUnpivot( - rewriteFromSourceValueExpr(fromSource.expr), - rewriteLetVariables(fromSource.variables), - rewriteMetas(fromSource)) - - open fun rewriteFromSourceExpr(fromSource: FromSourceExpr): FromSourceLet = - FromSourceExpr( - rewriteFromSourceValueExpr(fromSource.expr), - rewriteLetVariables(fromSource.variables)) - - open fun rewriteFromSourceJoin(fromSource: FromSourceJoin): FromSource = - FromSourceJoin( - fromSource.joinOp, - rewriteFromSource(fromSource.leftRef), - rewriteFromSource(fromSource.rightRef), - rewriteExprNode(fromSource.condition), - rewriteMetas(fromSource)) - - open fun rewriteGroupBy(groupBy: GroupBy): GroupBy = - GroupBy( - groupBy.grouping, - groupBy.groupByItems.map { rewriteGroupByItem(it)}, - groupBy.groupName?.let { rewriteSymbolicName(it)}) - - open fun rewriteGroupByItem(item: GroupByItem): GroupByItem = - GroupByItem( - rewriteExprNode(item.expr), - item.asName?.let { rewriteSymbolicName(it) } ) - - open fun rewriteOrderBy(orderBy: OrderBy): OrderBy = - OrderBy( - orderBy.sortSpecItems.map { rewriteSortSpec(it) }) - - open fun rewriteSortSpec(sortSpec: SortSpec): SortSpec = - SortSpec( - rewriteExprNode(sortSpec.expr), - sortSpec.orderingSpec - ) - - open fun rewriteDataType(dataType: DataType) = dataType - - open fun rewriteSimpleCaseWhen(case: SimpleCaseWhen): SimpleCaseWhen = - SimpleCaseWhen( - rewriteExprNode(case.valueExpr), - rewriteExprNode(case.thenExpr)) - - open fun rewriteSearchedCaseWhen(case: SearchedCaseWhen): SearchedCaseWhen = - SearchedCaseWhen( - rewriteExprNode(case.condition), - rewriteExprNode(case.thenExpr)) - - open fun rewriteSymbolicName(symbolicName: SymbolicName): SymbolicName = - SymbolicName( - symbolicName.name, - rewriteMetas(symbolicName)) - - open fun rewriteParameter(node: Parameter): Parameter = - Parameter(node.position, rewriteMetas(node)) - - open fun rewriteDataManipulation(node: DataManipulation): DataManipulation = - innerRewriteDataManipulation(node) - - /** - * Many subtypes of [AstRewriterBase] need to override [rewriteDataManipulation] to selectively apply a - * different nested instance of themselves to [DataManipulation] nodes. These subtypes can invoke this method - * instead of [rewriteDataManipulation] to avoid infinite recursion. They can also override this function if - * they need to customize how the new [DataManipulation] node is instantiated. - * - * The traversal order is in the semantic order--that is: - * - * 1. `FROM` ([DataManipulation.from]) - * 2. `WHERE` ([DataManipulation.where]) - * 3. The DML operation ([DataManipulation.dmlOperation]]] - * 4. The metas. ([DataManipulation.metas]) - */ - open fun innerRewriteDataManipulation(node: DataManipulation): DataManipulation { - val from = node.from?.let { rewriteFromSource(it) } - val where = node.where?.let { rewriteDataManipulationWhere(it) } - val returning = node.returning?.let { rewriteReturningExpr(it) } - val dmlOperations = rewriteDataManipulationOperations(node.dmlOperations) - val metas = rewriteMetas(node) - - return DataManipulation( - dmlOperations, - from, - where, - returning, - metas) - } - - open fun rewriteDataManipulationWhere(node: ExprNode):ExprNode = rewriteExprNode(node) - - open fun rewriteReturningExpr(returningExpr: ReturningExpr): ReturningExpr = - ReturningExpr( - returningExpr.returningElems.map { rewriteReturningElem(it) }) - - open fun rewriteReturningElem(returningElem: ReturningElem): ReturningElem = - ReturningElem( - returningElem.returningMapping, - returningElem.columnComponent) - - open fun rewriteDataManipulationOperations(node: DmlOpList) : DmlOpList = - DmlOpList(node.ops.map { rewriteDataManipulationOperation(it) }) - - open fun rewriteDataManipulationOperation(node: DataManipulationOperation): DataManipulationOperation = - when(node) { - is InsertOp -> rewriteDataManipulationOperationInsertOp(node) - is InsertValueOp -> rewriteDataManipulationOperationInsertValueOp(node) - is AssignmentOp -> rewriteDataManipulationOperationAssignmentOp(node) - is RemoveOp -> rewriteDataManipulationOperationRemoveOp(node) - is DeleteOp -> rewriteDataManipulationOperationDeleteOp() + open fun rewriteDataManipulationWhere(node: ExprNode): ExprNode = rewriteExprNode(node) + + open fun rewriteReturningExpr(returningExpr: ReturningExpr): ReturningExpr = + ReturningExpr( + returningExpr.returningElems.map { rewriteReturningElem(it) } + ) + + open fun rewriteReturningElem(returningElem: ReturningElem): ReturningElem = + ReturningElem( + returningElem.returningMapping, + returningElem.columnComponent + ) + + open fun rewriteDataManipulationOperations(node: DmlOpList): DmlOpList = + DmlOpList(node.ops.map { rewriteDataManipulationOperation(it) }) + + open fun rewriteDataManipulationOperation(node: DataManipulationOperation): DataManipulationOperation = + when (node) { + is InsertOp -> rewriteDataManipulationOperationInsertOp(node) + is InsertValueOp -> rewriteDataManipulationOperationInsertValueOp(node) + is AssignmentOp -> rewriteDataManipulationOperationAssignmentOp(node) + is RemoveOp -> rewriteDataManipulationOperationRemoveOp(node) + is DeleteOp -> rewriteDataManipulationOperationDeleteOp() + } + + open fun rewriteDataManipulationOperationInsertOp(node: InsertOp): DataManipulationOperation = + InsertOp( + rewriteExprNode(node.lvalue), + rewriteExprNode(node.values) + ) + + open fun rewriteDataManipulationOperationInsertValueOp(node: InsertValueOp): DataManipulationOperation = + InsertValueOp( + rewriteExprNode(node.lvalue), + rewriteExprNode(node.value), + node.position?.let { rewriteExprNode(it) }, + node.onConflict?.let { rewriteOnConflict(it) } + ) + + fun rewriteOnConflict(node: OnConflict): OnConflict { + return OnConflict(rewriteExprNode(node.condition), node.conflictAction) } - open fun rewriteDataManipulationOperationInsertOp(node: InsertOp): DataManipulationOperation = - InsertOp( - rewriteExprNode(node.lvalue), - rewriteExprNode(node.values) - ) - - open fun rewriteDataManipulationOperationInsertValueOp(node: InsertValueOp): DataManipulationOperation = - InsertValueOp( - rewriteExprNode(node.lvalue), - rewriteExprNode(node.value), - node.position?.let { rewriteExprNode(it) }, - node.onConflict?.let { rewriteOnConflict(it) }) - - fun rewriteOnConflict(node: OnConflict) : OnConflict { - return OnConflict(rewriteExprNode(node.condition), node.conflictAction) + open fun rewriteDataManipulationOperationAssignmentOp(node: AssignmentOp): DataManipulationOperation = + AssignmentOp(rewriteAssignment(node.assignment)) + + open fun rewriteDataManipulationOperationRemoveOp(node: RemoveOp): DataManipulationOperation = + RemoveOp(rewriteExprNode(node.lvalue)) + + open fun rewriteDataManipulationOperationDeleteOp(): DataManipulationOperation = DeleteOp + + open fun rewriteAssignment(node: Assignment): Assignment = + Assignment( + rewriteExprNode(node.lvalue), + rewriteExprNode(node.rvalue) + ) + + open fun rewriteCreateTable(node: CreateTable): CreateTable = + CreateTable(node.tableName, rewriteMetas(node)) + + open fun rewriteCreateIndex(node: CreateIndex): CreateIndex = + CreateIndex( + rewriteIdentifier(node.tableId), + node.keys.map { rewriteExprNode(it) }, + rewriteMetas(node) + ) + + open fun rewriteDropTable(node: DropTable): DropTable = + DropTable(rewriteIdentifier(node.tableId), rewriteMetas(node)) + + open fun rewriteDropIndex(node: DropIndex): DropIndex = + DropIndex( + rewriteIdentifier(node.tableId), + rewriteIdentifier(node.indexId), + rewriteMetas(node) + ) + + open fun rewriteExec(node: Exec): Exec = + Exec( + rewriteSymbolicName(node.procedureName), + node.args.map { rewriteExprNode(it) }, + rewriteMetas(node) + ) + + open fun rewriteDate(node: DateLiteral): DateLiteral = + DateLiteral( + node.year, + node.month, + node.day, + rewriteMetas(node) + ) + + open fun rewriteTime(node: TimeLiteral): TimeLiteral = + TimeLiteral( + node.hour, + node.minute, + node.second, + node.nano, + node.precision, + node.with_time_zone, + node.tz_minutes, + rewriteMetas(node) + ) + + open fun rewriteIdentifier(identifier: Identifier): Identifier = + Identifier( + identifier.id, + identifier.case, + rewriteMetas(identifier) + ) } - - open fun rewriteDataManipulationOperationAssignmentOp(node: AssignmentOp): DataManipulationOperation = - AssignmentOp(rewriteAssignment(node.assignment) ) - - open fun rewriteDataManipulationOperationRemoveOp(node: RemoveOp): DataManipulationOperation = - RemoveOp(rewriteExprNode(node.lvalue)) - - open fun rewriteDataManipulationOperationDeleteOp(): DataManipulationOperation = DeleteOp - - open fun rewriteAssignment(node: Assignment): Assignment = - Assignment( - rewriteExprNode(node.lvalue), - rewriteExprNode(node.rvalue)) - - open fun rewriteCreateTable(node: CreateTable): CreateTable = - CreateTable(node.tableName, rewriteMetas(node)) - - open fun rewriteCreateIndex(node: CreateIndex): CreateIndex = - CreateIndex( - rewriteIdentifier(node.tableId), - node.keys.map { rewriteExprNode(it) }, - rewriteMetas(node)) - - open fun rewriteDropTable(node: DropTable): DropTable = - DropTable(rewriteIdentifier(node.tableId), rewriteMetas(node)) - - open fun rewriteDropIndex(node: DropIndex): DropIndex = - DropIndex( - rewriteIdentifier(node.tableId), - rewriteIdentifier(node.indexId), - rewriteMetas(node)) - - open fun rewriteExec(node: Exec): Exec = - Exec( - rewriteSymbolicName(node.procedureName), - node.args.map { rewriteExprNode(it) }, - rewriteMetas(node)) - - open fun rewriteDate(node: DateLiteral): DateLiteral = - DateLiteral( - node.year, - node.month, - node.day, - rewriteMetas(node) - ) - - open fun rewriteTime(node: TimeLiteral): TimeLiteral = - TimeLiteral( - node.hour, - node.minute, - node.second, - node.nano, - node.precision, - node.with_time_zone, - node.tz_minutes, - rewriteMetas(node) - ) - - open fun rewriteIdentifier(identifier: Identifier): Identifier = - Identifier( - identifier.id, - identifier.case, - rewriteMetas(identifier) - ) -} + \ No newline at end of file diff --git a/lang/src/org/partiql/lang/ast/passes/AstVisitor.kt b/lang/src/org/partiql/lang/ast/passes/AstVisitor.kt index e5d2f19645..cc42b28592 100644 --- a/lang/src/org/partiql/lang/ast/passes/AstVisitor.kt +++ b/lang/src/org/partiql/lang/ast/passes/AstVisitor.kt @@ -148,4 +148,4 @@ open class AstVisitorBase : AstVisitor { override fun visitOnConflict(onConflict: OnConflict) { // Default implementation does nothing. } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/ast/passes/AstWalker.kt b/lang/src/org/partiql/lang/ast/passes/AstWalker.kt index 45f8ae345a..74de3739a8 100644 --- a/lang/src/org/partiql/lang/ast/passes/AstWalker.kt +++ b/lang/src/org/partiql/lang/ast/passes/AstWalker.kt @@ -184,7 +184,7 @@ open class AstWalker(private val visitor: AstVisitor) { walkExprNode(re.columnComponent.column) } is ReturningWildcard -> case { - //Leaf nodes have no children to walk. + // Leaf nodes have no children to walk. } } } @@ -210,7 +210,7 @@ open class AstWalker(private val visitor: AstVisitor) { when (it) { is PathComponentUnpivot, is PathComponentWildcard -> case { - //Leaf nodes have no children to walk. + // Leaf nodes have no children to walk. } is PathComponentExpr -> case { val (exp) = it @@ -257,7 +257,7 @@ open class AstWalker(private val visitor: AstVisitor) { visitor.visitSelectListItem(it) when (it) { is SelectListItemStar -> case { - //Leaf nodes have no children to walk. + // Leaf nodes have no children to walk. } is SelectListItemExpr -> case { walkExprNode(it.expr) diff --git a/lang/src/org/partiql/lang/ast/passes/SemanticException.kt b/lang/src/org/partiql/lang/ast/passes/SemanticException.kt index e5fc58fe95..7996aa9a83 100644 --- a/lang/src/org/partiql/lang/ast/passes/SemanticException.kt +++ b/lang/src/org/partiql/lang/ast/passes/SemanticException.kt @@ -28,7 +28,8 @@ class SemanticException( message: String = "", errorCode: ErrorCode, errorContext: PropertyValueMap?, - cause: Throwable? = null) : SqlException(message, errorCode, errorContext, cause) { + cause: Throwable? = null +) : SqlException(message, errorCode, errorContext, cause) { /** * Alternate constructor using a [Problem]. Error message is generated using [ProblemDetails.message]. diff --git a/lang/src/org/partiql/lang/ast/passes/SemanticProblemDetails.kt b/lang/src/org/partiql/lang/ast/passes/SemanticProblemDetails.kt index c5f41ffe02..8addbc2792 100644 --- a/lang/src/org/partiql/lang/ast/passes/SemanticProblemDetails.kt +++ b/lang/src/org/partiql/lang/ast/passes/SemanticProblemDetails.kt @@ -9,7 +9,7 @@ import org.partiql.lang.types.StaticType * Variants of [SemanticProblemDetails] contain info about various problems that can be encountered through semantic * passes. */ -sealed class SemanticProblemDetails(override val severity: ProblemSeverity, val messageFormatter: () -> String): ProblemDetails { +sealed class SemanticProblemDetails(override val severity: ProblemSeverity, val messageFormatter: () -> String) : ProblemDetails { override val message: String get() = messageFormatter() @@ -18,7 +18,7 @@ sealed class SemanticProblemDetails(override val severity: ProblemSeverity, val severity = ProblemSeverity.ERROR, messageFormatter = { "Incorrect number of arguments for '$functionName'. " + - "Expected $expectedArity but was supplied $actualArity." + "Expected $expectedArity but was supplied $actualArity." } ) @@ -61,7 +61,9 @@ sealed class SemanticProblemDetails(override val severity: ProblemSeverity, val data class NullOrMissingFunctionArgument(val functionName: String) : SemanticProblemDetails( severity = ProblemSeverity.ERROR, - messageFormatter = { "Function $functionName given an argument that will always be null or missing. " + - "As a result, this function call will always return null or missing." } + messageFormatter = { + "Function $functionName given an argument that will always be null or missing. " + + "As a result, this function call will always return null or missing." + } ) } diff --git a/lang/src/org/partiql/lang/ast/passes/StatementRedactor.kt b/lang/src/org/partiql/lang/ast/passes/StatementRedactor.kt index 2c5830e436..381becbd86 100644 --- a/lang/src/org/partiql/lang/ast/passes/StatementRedactor.kt +++ b/lang/src/org/partiql/lang/ast/passes/StatementRedactor.kt @@ -66,9 +66,11 @@ fun skipRedaction(node: PartiqlAst.Expr, safeFieldNames: Set): Boolean { * [userDefinedFunctionRedactionConfig] is an optional mapping of UDF names to functions determining which call * arguments are to be redacted. For an example, please check StatementRedactorTest.kt for more details. */ -fun redact(statement: String, - providedSafeFieldNames: Set = emptySet(), - userDefinedFunctionRedactionConfig: Map = emptyMap()): String { +fun redact( + statement: String, + providedSafeFieldNames: Set = emptySet(), + userDefinedFunctionRedactionConfig: Map = emptyMap() +): String { return redact(statement, parser.parseAstStatement(statement), providedSafeFieldNames, userDefinedFunctionRedactionConfig) } @@ -85,10 +87,12 @@ fun redact(statement: String, * [userDefinedFunctionRedactionConfig] is an optional mapping of UDF names to functions determining which call * arguments are to be redacted. For an example, please check StatementRedactorTest.kt for more details. */ -fun redact(statement: String, - partiqlAst: PartiqlAst.Statement, - providedSafeFieldNames: Set = emptySet(), - userDefinedFunctionRedactionConfig: Map = emptyMap()): String { +fun redact( + statement: String, + partiqlAst: PartiqlAst.Statement, + providedSafeFieldNames: Set = emptySet(), + userDefinedFunctionRedactionConfig: Map = emptyMap() +): String { val statementRedactionVisitor = StatementRedactionVisitor(statement, providedSafeFieldNames, userDefinedFunctionRedactionConfig) statementRedactionVisitor.walkStatement(partiqlAst) @@ -160,9 +164,7 @@ private class StatementRedactionVisitor( private fun redactExpr(node: PartiqlAst.Expr) { if (node.isNAry()) { redactNAry(node) - } - - else when (node) { + } else when (node) { is PartiqlAst.Expr.Lit -> redactLiteral(node) is PartiqlAst.Expr.List -> redactSeq(node) is PartiqlAst.Expr.Sexp -> redactSeq(node) @@ -290,8 +292,7 @@ private class StatementRedactionVisitor( is PartiqlAst.Expr.Lit -> if (!skipRedaction(it.first, safeFieldNames)) { redactExpr(it.second) - } - else { /* intentionally blank */ } + } else { /* intentionally blank */ } } } } @@ -300,26 +301,25 @@ private class StatementRedactionVisitor( // refactored // TODO: other NAry ops that not modeled (LIKE, INTERSECT, INTERSECT_ALL, EXCEPT, EXCEPT_ALL, UNION, UNION_ALL) private fun PartiqlAst.Expr.isNAry(): Boolean { - return this is PartiqlAst.Expr.And - || this is PartiqlAst.Expr.Or - || this is PartiqlAst.Expr.Not - || this is PartiqlAst.Expr.Eq - || this is PartiqlAst.Expr.Ne - || this is PartiqlAst.Expr.Gt - || this is PartiqlAst.Expr.Gte - || this is PartiqlAst.Expr.Lt - || this is PartiqlAst.Expr.Lte - || this is PartiqlAst.Expr.InCollection - || this is PartiqlAst.Expr.Pos - || this is PartiqlAst.Expr.Neg - || this is PartiqlAst.Expr.Plus - || this is PartiqlAst.Expr.Minus - || this is PartiqlAst.Expr.Times - || this is PartiqlAst.Expr.Divide - || this is PartiqlAst.Expr.Modulo - || this is PartiqlAst.Expr.Concat - || this is PartiqlAst.Expr.Between - || this is PartiqlAst.Expr.Call - + return this is PartiqlAst.Expr.And || + this is PartiqlAst.Expr.Or || + this is PartiqlAst.Expr.Not || + this is PartiqlAst.Expr.Eq || + this is PartiqlAst.Expr.Ne || + this is PartiqlAst.Expr.Gt || + this is PartiqlAst.Expr.Gte || + this is PartiqlAst.Expr.Lt || + this is PartiqlAst.Expr.Lte || + this is PartiqlAst.Expr.InCollection || + this is PartiqlAst.Expr.Pos || + this is PartiqlAst.Expr.Neg || + this is PartiqlAst.Expr.Plus || + this is PartiqlAst.Expr.Minus || + this is PartiqlAst.Expr.Times || + this is PartiqlAst.Expr.Divide || + this is PartiqlAst.Expr.Modulo || + this is PartiqlAst.Expr.Concat || + this is PartiqlAst.Expr.Between || + this is PartiqlAst.Expr.Call } } diff --git a/lang/src/org/partiql/lang/ast/passes/inference/StaticTypeInferencer.kt b/lang/src/org/partiql/lang/ast/passes/inference/StaticTypeInferencer.kt index 3eb71598e2..b1bed3c108 100644 --- a/lang/src/org/partiql/lang/ast/passes/inference/StaticTypeInferencer.kt +++ b/lang/src/org/partiql/lang/ast/passes/inference/StaticTypeInferencer.kt @@ -45,8 +45,9 @@ class StaticTypeInferencer( val inferencer = StaticTypeInferenceVisitorTransform(globalBindings, customFunctionSignatures, customTypedOpParameters, problemCollector) val transformedPartiqlAst = inferencer.transformStatement(node) val inferredStaticType = when (transformedPartiqlAst) { - is PartiqlAst.Statement.Query -> transformedPartiqlAst.expr.metas.staticType?.type - ?: error("Expected query's inferred StaticType to not be null") + is PartiqlAst.Statement.Query -> + transformedPartiqlAst.expr.metas.staticType?.type + ?: error("Expected query's inferred StaticType to not be null") is PartiqlAst.Statement.Dml, is PartiqlAst.Statement.Ddl, is PartiqlAst.Statement.Exec -> error("Type inference for DML, DDL, EXEC statements is not currently supported") @@ -75,7 +76,7 @@ class StaticTypeInferencer( * @param problems all of the [Problem]s encountered through static type inference, which will all have * [ProblemSeverity.WARNING] */ - data class Success(val staticType: StaticType, override val problems: List): InferenceResult() + data class Success(val staticType: StaticType, override val problems: List) : InferenceResult() /** * Unsuccessful static type inference result due to at least one [Problem] encountered with @@ -89,7 +90,7 @@ class StaticTypeInferencer( * It is used internally for testing the query's type inference behavior after an error is encountered. * @param problems all of the [Problem]s encountered through static type inference. */ - data class Failure(internal val staticType: StaticType, internal val partiqlAst: PartiqlAst.Statement, override val problems: List): InferenceResult() + data class Failure(internal val staticType: StaticType, internal val partiqlAst: PartiqlAst.Statement, override val problems: List) : InferenceResult() } } diff --git a/lang/src/org/partiql/lang/domains/util.kt b/lang/src/org/partiql/lang/domains/util.kt index 3bc3273fdd..713fdc4f54 100644 --- a/lang/src/org/partiql/lang/domains/util.kt +++ b/lang/src/org/partiql/lang/domains/util.kt @@ -14,7 +14,6 @@ import org.partiql.lang.eval.BindingCase fun PartiqlAst.Builder.id(name: String) = id(name, caseInsensitive(), unqualified()) - val MetaContainer.staticType: StaticTypeMeta? get() = this[StaticTypeMeta.TAG] as StaticTypeMeta? /** Constructs a container with the specified metas. */ @@ -55,7 +54,7 @@ fun PropertyValueMap.addSourceLocation(metas: MetaContainer): PropertyValueMap { /** * Converts a [PartiqlAst.CaseSensitivity] to a [BindingCase]. */ -fun PartiqlAst.CaseSensitivity.toBindingCase(): BindingCase = when(this) { +fun PartiqlAst.CaseSensitivity.toBindingCase(): BindingCase = when (this) { is PartiqlAst.CaseSensitivity.CaseInsensitive -> BindingCase.INSENSITIVE is PartiqlAst.CaseSensitivity.CaseSensitive -> BindingCase.SENSITIVE } diff --git a/lang/src/org/partiql/lang/errors/ErrorAndErrorContexts.kt b/lang/src/org/partiql/lang/errors/ErrorAndErrorContexts.kt index 5202e48177..7ae3e8b2e8 100644 --- a/lang/src/org/partiql/lang/errors/ErrorAndErrorContexts.kt +++ b/lang/src/org/partiql/lang/errors/ErrorAndErrorContexts.kt @@ -20,7 +20,6 @@ import java.util.EnumMap internal const val UNKNOWN: String = "" - /** * Categories for errors. Should map to stages in the Compiler and Evaluator. */ @@ -33,7 +32,6 @@ enum class ErrorCategory(val message: String) { override fun toString() = message } - /** Each possible value that can be reported as part of an error has a * [Property]. [Property] is used as a key in [PropertyValueMap]. * @@ -153,7 +151,6 @@ class PropertyValueMap(private val map: EnumMap = EnumM */ operator fun get(key: Property): PropertyValue? = map[key] - private fun verifyTypeAndSet(prop: Property, expectedType: PropertyType, value: T, pValue: PropertyValue) { if (prop.propertyType == expectedType) { map[prop] = pValue @@ -177,7 +174,6 @@ class PropertyValueMap(private val map: EnumMap = EnumM verifyTypeAndSet(key, PropertyType.STRING_CLASS, strValue, o) } - /** * Given a `key` and a [Long] value, insert the key-value pair into the [PropertyValueMap]. * @@ -193,7 +189,6 @@ class PropertyValueMap(private val map: EnumMap = EnumM verifyTypeAndSet(key, PropertyType.LONG_CLASS, longValue, o) } - /** * Given a `key` and a [Int] value, insert the key-value pair into the [PropertyValueMap]. * @@ -209,7 +204,6 @@ class PropertyValueMap(private val map: EnumMap = EnumM verifyTypeAndSet(key, PropertyType.INTEGER_CLASS, intValue, o) } - /** * Given a `key` and a [IonValue] value, insert the key-value pair into the [PropertyValueMap]. * @@ -225,7 +219,6 @@ class PropertyValueMap(private val map: EnumMap = EnumM verifyTypeAndSet(key, PropertyType.ION_VALUE_CLASS, ionValue, o) } - /** * Given a `key` and a [TokenType] value, insert the key-value pair into the [PropertyValueMap]. * @@ -241,7 +234,6 @@ class PropertyValueMap(private val map: EnumMap = EnumM verifyTypeAndSet(key, PropertyType.TOKEN_CLASS, tokenTypeValue, o) } - /** * Predicate to check if [property] is already in this [PropertyValueMap] * @@ -250,8 +242,5 @@ class PropertyValueMap(private val map: EnumMap = EnumM */ fun hasProperty(property: Property) = map.containsKey(property) - fun getProperties() = this.map.keys - } - diff --git a/lang/src/org/partiql/lang/errors/ErrorCode.kt b/lang/src/org/partiql/lang/errors/ErrorCode.kt index 8d7d0bf1c7..7ca7bb78fe 100644 --- a/lang/src/org/partiql/lang/errors/ErrorCode.kt +++ b/lang/src/org/partiql/lang/errors/ErrorCode.kt @@ -18,17 +18,15 @@ import org.partiql.lang.eval.ExprValueType import org.partiql.lang.syntax.DATE_TIME_PART_KEYWORDS import org.partiql.lang.syntax.TokenType - /** Property Set constants used in [ErrorCode] */ private val LOCATION = setOf(Property.LINE_NUMBER, Property.COLUMN_NUMBER) private val TOKEN_INFO = setOf(Property.TOKEN_TYPE, Property.TOKEN_VALUE) private val LOC_TOKEN = LOCATION + (TOKEN_INFO) private val LOC_TOKEN_STR = LOCATION + (setOf(Property.TOKEN_STRING)) - /** Helper function to reduce syntactical overhead of accessing property values as strings. */ private fun PropertyValueMap.getAsString(key: Property, defaultValue: String) = - this[key]?.toString() ?: defaultValue + this[key]?.toString() ?: defaultValue enum class ErrorBehaviorInPermissiveMode { THROW_EXCEPTION, RETURN_MISSING @@ -45,21 +43,24 @@ internal const val UNBOUND_QUOTED_IDENTIFIER_HINT = * - If is is RETURN_MISSING, evaluator will return MISSING in the permissive mode. * - in the LEGACY mode, the evaluator always throws exception irrespective of this flag. */ -enum class ErrorCode(internal val category: ErrorCategory, - private val properties: Set, - private val messagePrefix: String, - val errorBehaviorInPermissiveMode: ErrorBehaviorInPermissiveMode = ErrorBehaviorInPermissiveMode.THROW_EXCEPTION) { - +enum class ErrorCode( + internal val category: ErrorCategory, + private val properties: Set, + private val messagePrefix: String, + val errorBehaviorInPermissiveMode: ErrorBehaviorInPermissiveMode = ErrorBehaviorInPermissiveMode.THROW_EXCEPTION +) { INTERNAL_ERROR( ErrorCategory.EVALUATOR, LOCATION, - "internal error"), + "internal error" + ), LEXER_INVALID_CHAR( ErrorCategory.LEXER, LOC_TOKEN_STR, - "invalid character at") { + "invalid character at" + ) { override fun detailMessageSuffix(errorContext: PropertyValueMap?): String = getTokenString(errorContext) }, @@ -67,12 +68,14 @@ enum class ErrorCode(internal val category: ErrorCategory, LEXER_INVALID_NAME( ErrorCategory.LEXER, LOC_TOKEN_STR, - "invalid name"), + "invalid name" + ), LEXER_INVALID_OPERATOR( ErrorCategory.LEXER, LOC_TOKEN_STR, - "invalid operator at") { + "invalid operator at" + ) { override fun detailMessageSuffix(errorContext: PropertyValueMap?): String = getTokenString(errorContext) }, @@ -80,7 +83,8 @@ enum class ErrorCode(internal val category: ErrorCategory, LEXER_INVALID_LITERAL( ErrorCategory.LEXER, LOC_TOKEN_STR, - "invalid literal at") { + "invalid literal at" + ) { override fun detailMessageSuffix(errorContext: PropertyValueMap?): String = getTokenString(errorContext) }, @@ -88,7 +92,8 @@ enum class ErrorCode(internal val category: ErrorCategory, LEXER_INVALID_ION_LITERAL( ErrorCategory.LEXER, LOC_TOKEN_STR, - "invalid ion literal at") { + "invalid ion literal at" + ) { override fun detailMessageSuffix(errorContext: PropertyValueMap?): String = getTokenString(errorContext) }, @@ -96,12 +101,14 @@ enum class ErrorCode(internal val category: ErrorCategory, PARSE_MALFORMED_PARSE_TREE( ErrorCategory.PARSER, LOC_TOKEN, - "Internal error - malformed parse tree detected"), + "Internal error - malformed parse tree detected" + ), PARSE_EXPECTED_KEYWORD( ErrorCategory.PARSER, LOC_TOKEN + setOf(Property.KEYWORD), - "expected keyword") { + "expected keyword" + ) { override fun detailMessageSuffix(errorContext: PropertyValueMap?): String = getKeyword(errorContext) }, @@ -109,7 +116,8 @@ enum class ErrorCode(internal val category: ErrorCategory, PARSE_EXPECTED_TOKEN_TYPE( ErrorCategory.PARSER, LOC_TOKEN + setOf(Property.EXPECTED_TOKEN_TYPE), - "expected token of type") { + "expected token of type" + ) { override fun detailMessageSuffix(errorContext: PropertyValueMap?): String = errorContext?.get(Property.EXPECTED_TOKEN_TYPE)?.tokenTypeValue()?.toString() ?: UNKNOWN + "found ${getTokenType(errorContext)}" @@ -118,9 +126,10 @@ enum class ErrorCode(internal val category: ErrorCategory, PARSE_EXPECTED_2_TOKEN_TYPES( ErrorCategory.PARSER, LOC_TOKEN + setOf(Property.EXPECTED_TOKEN_TYPE_1_OF_2, Property.EXPECTED_TOKEN_TYPE_2_OF_2), - "unexpected token") { + "unexpected token" + ) { override fun detailMessageSuffix(errorContext: PropertyValueMap?): String = - "expected ${errorContext?.getAsString(Property.EXPECTED_TOKEN_TYPE_1_OF_2, UNKNOWN)}" + + "expected ${errorContext?.getAsString(Property.EXPECTED_TOKEN_TYPE_1_OF_2, UNKNOWN)}" + " or ${errorContext?.getAsString(Property.EXPECTED_TOKEN_TYPE_2_OF_2, UNKNOWN)}" + " but found ${getTokenType(errorContext)}" }, @@ -128,7 +137,8 @@ enum class ErrorCode(internal val category: ErrorCategory, PARSE_EXPECTED_NUMBER( ErrorCategory.PARSER, LOC_TOKEN, - "Expected number, found") { + "Expected number, found" + ) { override fun detailMessageSuffix(errorContext: PropertyValueMap?): String = getTokenValue(errorContext) }, @@ -136,275 +146,324 @@ enum class ErrorCode(internal val category: ErrorCategory, PARSE_EXPECTED_TYPE_NAME( ErrorCategory.PARSER, LOC_TOKEN, - "expected type name, found"), + "expected type name, found" + ), PARSE_EXPECTED_WHEN_CLAUSE( ErrorCategory.PARSER, LOC_TOKEN, - "expected WHEN clause in CASE"), + "expected WHEN clause in CASE" + ), PARSE_EXPECTED_WHERE_CLAUSE( - ErrorCategory.PARSER, - LOC_TOKEN, - "expected WHERE clause"), + ErrorCategory.PARSER, + LOC_TOKEN, + "expected WHERE clause" + ), PARSE_EXPECTED_CONFLICT_ACTION( - ErrorCategory.PARSER, - LOC_TOKEN, - "expected "), + ErrorCategory.PARSER, + LOC_TOKEN, + "expected " + ), PARSE_EXPECTED_RETURNING_CLAUSE( - ErrorCategory.PARSER, - LOC_TOKEN, - "expected "), + ErrorCategory.PARSER, + LOC_TOKEN, + "expected " + ), PARSE_UNSUPPORTED_RETURNING_CLAUSE_SYNTAX( - ErrorCategory.PARSER, - LOC_TOKEN, - "unsupported syntax in RETURNING clause"), + ErrorCategory.PARSER, + LOC_TOKEN, + "unsupported syntax in RETURNING clause" + ), PARSE_UNSUPPORTED_TOKEN( ErrorCategory.PARSER, LOC_TOKEN, - "Unexpected token"), + "Unexpected token" + ), PARSE_UNSUPPORTED_LITERALS_GROUPBY( ErrorCategory.PARSER, LOC_TOKEN, - "unsupported literal in GROUP BY"), + "unsupported literal in GROUP BY" + ), PARSE_EXPECTED_MEMBER( ErrorCategory.PARSER, LOC_TOKEN, - "expected MEMBER node"), + "expected MEMBER node" + ), PARSE_EXPECTED_DATE_TIME_PART( ErrorCategory.PARSER, LOC_TOKEN, - "expected one of: [${DATE_TIME_PART_KEYWORDS.joinToString()}]"), + "expected one of: [${DATE_TIME_PART_KEYWORDS.joinToString()}]" + ), PARSE_UNSUPPORTED_SELECT( ErrorCategory.PARSER, LOC_TOKEN, - "unsupported use of SELECT"), + "unsupported use of SELECT" + ), PARSE_UNSUPPORTED_CASE( ErrorCategory.PARSER, LOC_TOKEN, - "unsupported use of CASE"), + "unsupported use of CASE" + ), PARSE_UNSUPPORTED_CASE_CLAUSE( ErrorCategory.PARSER, LOC_TOKEN, - "Unsupported use of CASE statement"), + "Unsupported use of CASE statement" + ), PARSE_UNSUPPORTED_ALIAS( ErrorCategory.PARSER, LOC_TOKEN, - "unsupported syntax for alias, `at` and `as` are supported"), + "unsupported syntax for alias, `at` and `as` are supported" + ), PARSE_UNSUPPORTED_SYNTAX( ErrorCategory.PARSER, LOC_TOKEN, - "unsupported Syntax"), + "unsupported Syntax" + ), PARSE_UNKNOWN_OPERATOR( ErrorCategory.PARSER, LOC_TOKEN, - "unsupported operator"), + "unsupported operator" + ), PARSE_INVALID_PATH_COMPONENT( ErrorCategory.PARSER, LOC_TOKEN + setOf(Property.TOKEN_TYPE, Property.TOKEN_VALUE), - "invalid Path component") { + "invalid Path component" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String { return "Invalid path component, expecting either an ${TokenType.IDENTIFIER} or ${TokenType.STAR}, " + - "got: ${errorContext?.get(Property.TOKEN_TYPE) ?: UNKNOWN} " + - "with value: ${errorContext?.get(Property.TOKEN_VALUE) ?: UNKNOWN}" - + "got: ${errorContext?.get(Property.TOKEN_TYPE) ?: UNKNOWN} " + + "with value: ${errorContext?.get(Property.TOKEN_VALUE) ?: UNKNOWN}" } }, PARSE_MISSING_IDENT_AFTER_AT( ErrorCategory.PARSER, LOC_TOKEN, - "identifier expected after `@` symbol"), + "identifier expected after `@` symbol" + ), PARSE_UNEXPECTED_OPERATOR( ErrorCategory.PARSER, LOC_TOKEN, - "unexpected operator"), + "unexpected operator" + ), PARSE_UNEXPECTED_TERM( ErrorCategory.PARSER, LOC_TOKEN, - "unexpected term found"), + "unexpected term found" + ), PARSE_UNEXPECTED_TOKEN( ErrorCategory.PARSER, LOC_TOKEN, - "unexpected token found"), + "unexpected token found" + ), PARSE_UNEXPECTED_KEYWORD( ErrorCategory.PARSER, LOC_TOKEN, - "unexpected keyword found"), + "unexpected keyword found" + ), PARSE_EXPECTED_EXPRESSION( ErrorCategory.PARSER, LOC_TOKEN, - "expected expression"), + "expected expression" + ), PARSE_EXPECTED_LEFT_PAREN_AFTER_CAST( ErrorCategory.PARSER, LOC_TOKEN, - "expected left parenthesis after CAST"), + "expected left parenthesis after CAST" + ), PARSE_EXPECTED_LEFT_PAREN_VALUE_CONSTRUCTOR( ErrorCategory.PARSER, LOC_TOKEN, - "expected left parenthesis"), + "expected left parenthesis" + ), PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL( ErrorCategory.PARSER, LOC_TOKEN, - "expected left parenthesis"), + "expected left parenthesis" + ), PARSE_EXPECTED_RIGHT_PAREN_BUILTIN_FUNCTION_CALL( ErrorCategory.PARSER, LOC_TOKEN, - "expected right parenthesis"), + "expected right parenthesis" + ), PARSE_EXPECTED_ARGUMENT_DELIMITER( ErrorCategory.PARSER, LOC_TOKEN, - "expected argument delimiter"), + "expected argument delimiter" + ), PARSE_CAST_ARITY( ErrorCategory.PARSER, LOC_TOKEN + setOf(Property.CAST_TO, Property.EXPECTED_ARITY_MIN, Property.EXPECTED_ARITY_MAX), - "") { + "" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Cast to type ${errorContext?.get(Property.CAST_TO)?.stringValue() ?: UNKNOWN} has incorrect arity." + "Correct arity is ${errorContext?.get(Property.EXPECTED_ARITY_MIN)?.integerValue() ?: UNKNOWN}.." + "${errorContext?.get(Property.EXPECTED_ARITY_MAX)?.integerValue() ?: UNKNOWN}" - }, PARSE_TYPE_PARAMETER_EXCEEDED_MAXIMUM_VALUE( ErrorCategory.PARSER, LOC_TOKEN, - "Type parameter has exceeded the maximum allowed value of ${Int.MAX_VALUE}"), + "Type parameter has exceeded the maximum allowed value of ${Int.MAX_VALUE}" + ), PARSE_INVALID_TYPE_PARAM( ErrorCategory.PARSER, LOC_TOKEN, - "invalid value used for type parameter"), + "invalid value used for type parameter" + ), PARSE_INVALID_PRECISION_FOR_TIME( ErrorCategory.PARSER, LOC_TOKEN, - "invalid precision used for TIME type"), + "invalid precision used for TIME type" + ), PARSE_INVALID_DATE_STRING( ErrorCategory.PARSER, LOC_TOKEN, - "expected date string to be of the format YYYY-MM-DD"), + "expected date string to be of the format YYYY-MM-DD" + ), PARSE_INVALID_TIME_STRING( ErrorCategory.PARSER, LOC_TOKEN, - "expected time string to be of the format HH:MM:SS[.dddd...][+|-HH:MM]"), + "expected time string to be of the format HH:MM:SS[.dddd...][+|-HH:MM]" + ), PARSE_EMPTY_SELECT( ErrorCategory.PARSER, LOC_TOKEN, - "found empty SELECT list"), + "found empty SELECT list" + ), PARSE_SELECT_MISSING_FROM( ErrorCategory.PARSER, LOC_TOKEN, - "missing FROM after SELECT list"), + "missing FROM after SELECT list" + ), PARSE_MISSING_OPERATION( ErrorCategory.PARSER, LOC_TOKEN, - "expected DML or SELECT operation after FROM"), + "expected DML or SELECT operation after FROM" + ), PARSE_MISSING_SET_ASSIGNMENT( ErrorCategory.PARSER, LOC_TOKEN, - "expected assignment for SET"), + "expected assignment for SET" + ), PARSE_EXPECTED_IDENT_FOR_GROUP_NAME( ErrorCategory.PARSER, LOC_TOKEN, - "expected identifier for GROUP name"), + "expected identifier for GROUP name" + ), PARSE_EXPECTED_IDENT_FOR_ALIAS( ErrorCategory.PARSER, LOC_TOKEN, - "expected identifier for alias"), + "expected identifier for alias" + ), PARSE_EXPECTED_AS_FOR_LET( ErrorCategory.PARSER, LOC_TOKEN, - "expected AS for LET clause"), + "expected AS for LET clause" + ), PARSE_UNSUPPORTED_CALL_WITH_STAR( ErrorCategory.PARSER, LOC_TOKEN, - "function call, other than COUNT, with (*) as parameter is not supported"), + "function call, other than COUNT, with (*) as parameter is not supported" + ), PARSE_NON_UNARY_AGREGATE_FUNCTION_CALL( ErrorCategory.PARSER, LOC_TOKEN, - "Aggregate function calls take 1 argument only"), + "Aggregate function calls take 1 argument only" + ), PARSE_NO_STORED_PROCEDURE_PROVIDED( ErrorCategory.PARSER, LOC_TOKEN, - "No stored procedure provided"), + "No stored procedure provided" + ), PARSE_MALFORMED_JOIN( ErrorCategory.PARSER, LOC_TOKEN, - "malformed use of FROM with JOIN"), + "malformed use of FROM with JOIN" + ), PARSE_EXPECTED_IDENT_FOR_AT( ErrorCategory.PARSER, LOC_TOKEN, - "expected identifier for AT name"), + "expected identifier for AT name" + ), PARSE_INVALID_CONTEXT_FOR_WILDCARD_IN_SELECT_LIST( ErrorCategory.PARSER, LOC_TOKEN, - "Invalid use of * in select list"), + "Invalid use of * in select list" + ), - //SQB = SQuare Bracket + // SQB = SQuare Bracket PARSE_CANNOT_MIX_SQB_AND_WILDCARD_IN_SELECT_LIST( ErrorCategory.PARSER, LOC_TOKEN, - "Cannot mix [] and * in the same expression in a select list"), + "Cannot mix [] and * in the same expression in a select list" + ), PARSE_ASTERISK_IS_NOT_ALONE_IN_SELECT_LIST( ErrorCategory.PARSER, LOCATION, - "Other expressions may not be present in the select list when '*' is used without dot notation."), + "Other expressions may not be present in the select list when '*' is used without dot notation." + ), - //Evaluator errors + // Evaluator errors // TODO: replace uses of this with UNIMPLEMENTED_FEATURE EVALUATOR_FEATURE_NOT_SUPPORTED_YET( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.FEATURE_NAME), - "") { + "" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Feature '${errorContext?.get(Property.FEATURE_NAME)?.stringValue() ?: UNKNOWN}' not supported yet" - }, + }, EVALUATOR_COUNT_DISTINCT_STAR( ErrorCategory.EVALUATOR, LOCATION, - "") { + "" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "COUNT(DISTINCT *) is not supported" }, @@ -413,7 +472,8 @@ enum class ErrorCode(internal val category: ErrorCategory, ErrorCategory.EVALUATOR, LOCATION + setOf(Property.BINDING_NAME), "Binding does not exist", - ErrorBehaviorInPermissiveMode.RETURN_MISSING) { + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Binding '${errorContext?.get(Property.BINDING_NAME)?.stringValue() ?: UNKNOWN}' does not exist" }, @@ -422,7 +482,8 @@ enum class ErrorCode(internal val category: ErrorCategory, ErrorCategory.EVALUATOR, LOCATION + setOf(Property.BINDING_NAME), "Binding does not exist. $UNBOUND_QUOTED_IDENTIFIER_HINT", - ErrorBehaviorInPermissiveMode.RETURN_MISSING) { + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Binding '${errorContext?.get(Property.BINDING_NAME)?.stringValue() ?: UNKNOWN}' does not exist" }, @@ -430,77 +491,86 @@ enum class ErrorCode(internal val category: ErrorCategory, EVALUATOR_VARIABLE_NOT_INCLUDED_IN_GROUP_BY( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.BINDING_NAME), - "") { + "" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Variable '${errorContext?.get(Property.BINDING_NAME)?.stringValue() ?: UNKNOWN}' " + - "must appear in the GROUP BY clause or be used in an aggregation function" + "must appear in the GROUP BY clause or be used in an aggregation function" }, EVALUATOR_UNBOUND_PARAMETER( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.EXPECTED_PARAMETER_ORDINAL, Property.BOUND_PARAMETER_COUNT), - "No parameter bound for position!"), + "No parameter bound for position!" + ), EVALUATOR_INVALID_CAST( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.CAST_TO, Property.CAST_FROM), "", - ErrorBehaviorInPermissiveMode.RETURN_MISSING){ - override fun getErrorMessage(errorContext: PropertyValueMap?): String = - "Cannot convert ${errorContext?.get(Property.CAST_FROM)?.stringValue() ?: UNKNOWN} " + + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { + override fun getErrorMessage(errorContext: PropertyValueMap?): String = + "Cannot convert ${errorContext?.get(Property.CAST_FROM)?.stringValue() ?: UNKNOWN} " + "to ${errorContext?.get(Property.CAST_TO)?.stringValue() ?: UNKNOWN}" - }, + }, EVALUATOR_INVALID_CAST_NO_LOCATION( ErrorCategory.EVALUATOR, setOf(Property.CAST_TO, Property.CAST_FROM), "", - ErrorBehaviorInPermissiveMode.RETURN_MISSING){ - override fun getErrorMessage(errorContext: PropertyValueMap?): String = - "Cannot convert ${errorContext?.get(Property.CAST_FROM)?.stringValue() ?: UNKNOWN} " + + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { + override fun getErrorMessage(errorContext: PropertyValueMap?): String = + "Cannot convert ${errorContext?.get(Property.CAST_FROM)?.stringValue() ?: UNKNOWN} " + "to ${errorContext?.get(Property.CAST_TO)?.stringValue() ?: UNKNOWN}" - }, + }, EVALUATOR_CAST_FAILED( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.CAST_TO, Property.CAST_FROM), "", - ErrorBehaviorInPermissiveMode.RETURN_MISSING){ + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Failed to convert ${errorContext?.get(Property.CAST_FROM)?.stringValue() ?: UNKNOWN} " + - "to ${errorContext?.get(Property.CAST_TO)?.stringValue() ?: UNKNOWN}" - }, + "to ${errorContext?.get(Property.CAST_TO)?.stringValue() ?: UNKNOWN}" + }, EVALUATOR_CAST_FAILED_NO_LOCATION( ErrorCategory.EVALUATOR, setOf(Property.CAST_TO, Property.CAST_FROM), "", - ErrorBehaviorInPermissiveMode.RETURN_MISSING){ + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Failed to convert ${errorContext?.get(Property.CAST_FROM)?.stringValue() ?: UNKNOWN} " + - "to ${errorContext?.get(Property.CAST_TO)?.stringValue() ?: UNKNOWN}" - }, + "to ${errorContext?.get(Property.CAST_TO)?.stringValue() ?: UNKNOWN}" + }, EVALUATOR_NO_SUCH_FUNCTION( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.FUNCTION_NAME), - ""){ - override fun getErrorMessage(errorContext: PropertyValueMap?): String = - "No such function: ${errorContext?.get(Property.FUNCTION_NAME)?.stringValue() ?: UNKNOWN} " - }, + "" + ) { + override fun getErrorMessage(errorContext: PropertyValueMap?): String = + "No such function: ${errorContext?.get(Property.FUNCTION_NAME)?.stringValue() ?: UNKNOWN} " + }, SEMANTIC_DUPLICATE_ALIASES_IN_SELECT_LIST_ITEM( ErrorCategory.SEMANTIC, LOCATION, - ""){ + "" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Duplicate projection field encountered in SelectListItem expression" - }, + }, SEMANTIC_NO_SUCH_FUNCTION( ErrorCategory.SEMANTIC, LOCATION + setOf(Property.FUNCTION_NAME), - ""){ + "" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "No such function: ${errorContext?.get(Property.FUNCTION_NAME)?.stringValue() ?: UNKNOWN} " }, @@ -508,45 +578,53 @@ enum class ErrorCode(internal val category: ErrorCategory, EVALUATOR_NO_SUCH_PROCEDURE( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.PROCEDURE_NAME), - ""){ - override fun getErrorMessage(errorContext: PropertyValueMap?): String = - "No such stored procedure: ${errorContext?.get(Property.PROCEDURE_NAME)?.stringValue() ?: UNKNOWN} " - }, + "" + ) { + override fun getErrorMessage(errorContext: PropertyValueMap?): String = + "No such stored procedure: ${errorContext?.get(Property.PROCEDURE_NAME)?.stringValue() ?: UNKNOWN} " + }, EVALUATOR_INCORRECT_NUMBER_OF_ARGUMENTS_TO_FUNC_CALL( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.FUNCTION_NAME, Property.EXPECTED_ARITY_MIN, Property.EXPECTED_ARITY_MAX, Property.ACTUAL_ARITY), - "Incorrect number of arguments to function call"), + "Incorrect number of arguments to function call" + ), EVALUATOR_INCORRECT_NUMBER_OF_ARGUMENTS_TO_PROCEDURE_CALL( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.EXPECTED_ARITY_MIN, Property.EXPECTED_ARITY_MAX), - "Incorrect number of arguments to procedure call"), + "Incorrect number of arguments to procedure call" + ), EVALUATOR_DATE_FIELD_OUT_OF_RANGE( ErrorCategory.EVALUATOR, LOCATION, - "Date field out of range."), + "Date field out of range." + ), EVALUATOR_INCORRECT_TYPE_OF_ARGUMENTS_TO_FUNC_CALL( ErrorCategory.EVALUATOR, - LOCATION + setOf(Property.FUNCTION_NAME, - Property.EXPECTED_ARGUMENT_TYPES, - Property.ACTUAL_ARGUMENT_TYPES, - Property.ARGUMENT_POSITION), + LOCATION + setOf( + Property.FUNCTION_NAME, + Property.EXPECTED_ARGUMENT_TYPES, + Property.ACTUAL_ARGUMENT_TYPES, + Property.ARGUMENT_POSITION + ), "Incorrect type of arguments to function call", - ErrorBehaviorInPermissiveMode.RETURN_MISSING) { + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Invalid argument type for ${errorContext?.get(Property.FUNCTION_NAME) ?: UNKNOWN} " + - "argument number ${errorContext?.get(Property.ARGUMENT_POSITION) ?: UNKNOWN}, " + - "expected: [${errorContext?.get(Property.EXPECTED_ARGUMENT_TYPES) ?: UNKNOWN}] " + - "got: ${errorContext?.get(Property.ACTUAL_ARGUMENT_TYPES) ?: UNKNOWN}" + "argument number ${errorContext?.get(Property.ARGUMENT_POSITION) ?: UNKNOWN}, " + + "expected: [${errorContext?.get(Property.EXPECTED_ARGUMENT_TYPES) ?: UNKNOWN}] " + + "got: ${errorContext?.get(Property.ACTUAL_ARGUMENT_TYPES) ?: UNKNOWN}" }, SEMANTIC_INCORRECT_ARGUMENT_TYPES_TO_FUNC_CALL( ErrorCategory.SEMANTIC, LOCATION + setOf(Property.EXPECTED_ARGUMENT_TYPES, Property.ACTUAL_ARGUMENT_TYPES, Property.FUNCTION_NAME), - "Incorrect type of arguments to function call") { + "Incorrect type of arguments to function call" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Invalid argument types for ${errorContext?.get(Property.FUNCTION_NAME) ?: UNKNOWN}, " + "expected: ${errorContext?.get(Property.EXPECTED_ARGUMENT_TYPES) ?: UNKNOWN} " + @@ -556,7 +634,8 @@ enum class ErrorCode(internal val category: ErrorCategory, SEMANTIC_INFERENCER_ERROR( ErrorCategory.SEMANTIC, LOCATION + setOf(Property.MESSAGE), - ""){ + "" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String { return errorContext?.get(Property.MESSAGE)?.stringValue() ?: UNKNOWN } @@ -565,28 +644,31 @@ enum class ErrorCode(internal val category: ErrorCategory, EVALUATOR_INCORRECT_TYPE_OF_ARGUMENTS_TO_PROCEDURE_CALL( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.EXPECTED_ARGUMENT_TYPES, Property.ACTUAL_ARGUMENT_TYPES, Property.FUNCTION_NAME), - "Incorrect type of arguments to procedure call") { + "Incorrect type of arguments to procedure call" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Invalid argument types for ${errorContext?.get(Property.FUNCTION_NAME) ?: UNKNOWN}, " + - "expected: ${errorContext?.get(Property.EXPECTED_ARGUMENT_TYPES) ?: UNKNOWN} " + - "got: ${errorContext?.get(Property.ACTUAL_ARGUMENT_TYPES) ?: UNKNOWN}" + "expected: ${errorContext?.get(Property.EXPECTED_ARGUMENT_TYPES) ?: UNKNOWN} " + + "got: ${errorContext?.get(Property.ACTUAL_ARGUMENT_TYPES) ?: UNKNOWN}" }, EVALUATOR_CONCAT_FAILED_DUE_TO_INCOMPATIBLE_TYPE( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.ACTUAL_ARGUMENT_TYPES), "Incorrect type of arguments for operator '||'", - ErrorBehaviorInPermissiveMode.RETURN_MISSING) { + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Incorrect type of arguments for operator '||', " + - "expected one of ${ExprValueType.values().filter { it.isText }} " + - "got ${errorContext?.get(Property.ACTUAL_ARGUMENT_TYPES)}" + "expected one of ${ExprValueType.values().filter { it.isText }} " + + "got ${errorContext?.get(Property.ACTUAL_ARGUMENT_TYPES)}" }, EVALUATOR_INVALID_PRECISION_FOR_TIME( ErrorCategory.EVALUATOR, LOCATION, - "invalid precision used for TIME type"), + "invalid precision used for TIME type" + ), /** * This is a generic error wrapper for the DateTimeException thrown by Java's [java.time] when attempting to create @@ -596,7 +678,8 @@ enum class ErrorCode(internal val category: ErrorCategory, EVALUATOR_TIME_FIELD_OUT_OF_RANGE( ErrorCategory.EVALUATOR, LOCATION, - "Invalid value for TIME type"), + "Invalid value for TIME type" + ), /** * This is a generic error thrown whenever Java's [DateTimeFormatter] throws an exception when attempting to @@ -609,7 +692,8 @@ enum class ErrorCode(internal val category: ErrorCategory, ErrorCategory.EVALUATOR, LOCATION + setOf(Property.TIMESTAMP_FORMAT_PATTERN), "", - ErrorBehaviorInPermissiveMode.RETURN_MISSING) { + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Invalid timestamp format pattern: '${errorContext?.get(Property.TIMESTAMP_FORMAT_PATTERN)}'." }, @@ -618,7 +702,8 @@ enum class ErrorCode(internal val category: ErrorCategory, ErrorCategory.EVALUATOR, LOCATION + setOf(Property.TIMESTAMP_FORMAT_PATTERN), "", - ErrorBehaviorInPermissiveMode.RETURN_MISSING) { + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Timestamp format pattern contains invalid token: '${errorContext?.get(Property.TIMESTAMP_FORMAT_PATTERN)}'." }, @@ -627,7 +712,8 @@ enum class ErrorCode(internal val category: ErrorCategory, ErrorCategory.EVALUATOR, LOCATION + setOf(Property.TIMESTAMP_FORMAT_PATTERN), "", - ErrorBehaviorInPermissiveMode.RETURN_MISSING) { + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Timestamp format pattern contains invalid symbol: '${errorContext?.get(Property.TIMESTAMP_FORMAT_PATTERN)}'." }, @@ -636,50 +722,54 @@ enum class ErrorCode(internal val category: ErrorCategory, ErrorCategory.EVALUATOR, LOCATION + setOf(Property.TIMESTAMP_FORMAT_PATTERN), "", - ErrorBehaviorInPermissiveMode.RETURN_MISSING) { + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Timestamp format pattern contains unterminated token: '${errorContext?.get(Property.TIMESTAMP_FORMAT_PATTERN)}'." }, - EVALUATOR_INCOMPLETE_TIMESTAMP_FORMAT_PATTERN( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.TIMESTAMP_FORMAT_PATTERN, Property.TIMESTAMP_FORMAT_PATTERN_FIELDS), "", - ErrorBehaviorInPermissiveMode.RETURN_MISSING) { + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Timestamp format pattern '${errorContext?.get(Property.TIMESTAMP_FORMAT_PATTERN)}' " + - "requires additional fields '${errorContext?.get(Property.TIMESTAMP_FORMAT_PATTERN_FIELDS)}'." + "requires additional fields '${errorContext?.get(Property.TIMESTAMP_FORMAT_PATTERN_FIELDS)}'." }, EVALUATOR_TIMESTAMP_FORMAT_PATTERN_DUPLICATE_FIELDS( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.TIMESTAMP_FORMAT_PATTERN, Property.TIMESTAMP_FORMAT_PATTERN_FIELDS), "", - ErrorBehaviorInPermissiveMode.RETURN_MISSING) { + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "The format pattern '${errorContext?.get(Property.TIMESTAMP_FORMAT_PATTERN)}' contains multiple format " + - "specifiers representing the timestamp field '${errorContext?.get(Property.TIMESTAMP_FORMAT_PATTERN_FIELDS)}'." + "specifiers representing the timestamp field '${errorContext?.get(Property.TIMESTAMP_FORMAT_PATTERN_FIELDS)}'." }, EVALUATOR_TIMESTAMP_FORMAT_PATTERN_HOUR_CLOCK_AM_PM_MISMATCH( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.TIMESTAMP_FORMAT_PATTERN), "", - ErrorBehaviorInPermissiveMode.RETURN_MISSING) { + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "The format pattern '${errorContext?.get(Property.TIMESTAMP_FORMAT_PATTERN)}' contains a 12-hour hour of " + - "day format symbol but doesn't also contain an AM/PM field, or it contains a 24-hour hour of day format " + - "specifier and contains an AM/PM field." + "day format symbol but doesn't also contain an AM/PM field, or it contains a 24-hour hour of day format " + + "specifier and contains an AM/PM field." }, EVALUATOR_INVALID_TIMESTAMP_FORMAT_PATTERN_SYMBOL_FOR_PARSING( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.TIMESTAMP_FORMAT_PATTERN), "", - ErrorBehaviorInPermissiveMode.RETURN_MISSING) { - override fun getErrorMessage(errorContext: PropertyValueMap?): String = - "The format pattern '${errorContext?.get(Property.TIMESTAMP_FORMAT_PATTERN)}' contains a valid format " + + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { + override fun getErrorMessage(errorContext: PropertyValueMap?): String = + "The format pattern '${errorContext?.get(Property.TIMESTAMP_FORMAT_PATTERN)}' contains a valid format " + "symbol that cannot be applied to timestamp parsing." }, @@ -687,25 +777,29 @@ enum class ErrorCode(internal val category: ErrorCategory, ErrorCategory.EVALUATOR, LOCATION, "Failed to parse Ion timestamp", - ErrorBehaviorInPermissiveMode.RETURN_MISSING), + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ), EVALUATOR_CUSTOM_TIMESTAMP_PARSE_FAILURE( ErrorCategory.EVALUATOR, - LOCATION+ setOf(Property.TIMESTAMP_FORMAT_PATTERN), + LOCATION + setOf(Property.TIMESTAMP_FORMAT_PATTERN), "Failed to parse custom timestamp using the specified format pattern", - ErrorBehaviorInPermissiveMode.RETURN_MISSING), + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ), EVALUATOR_PRECISION_LOSS_WHEN_PARSING_TIMESTAMP( ErrorCategory.EVALUATOR, LOCATION, "loss of precision when parsing timestamp", - ErrorBehaviorInPermissiveMode.RETURN_MISSING), + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ), EVALUATOR_INTEGER_OVERFLOW( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.INT_SIZE_IN_BYTES), "Int overflow or underflow", - ErrorBehaviorInPermissiveMode.RETURN_MISSING) { + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "INT-${errorContext?.get(Property.INT_SIZE_IN_BYTES) ?: UNKNOWN} overflow or underflow" }, @@ -714,7 +808,8 @@ enum class ErrorCode(internal val category: ErrorCategory, ErrorCategory.EVALUATOR, LOCATION + setOf(Property.BINDING_NAME, Property.BINDING_NAME_MATCHES), "Binding name was ambiguous", - ErrorBehaviorInPermissiveMode.RETURN_MISSING) { + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Binding name was '${errorContext?.get(Property.BINDING_NAME)}'" }, @@ -722,28 +817,31 @@ enum class ErrorCode(internal val category: ErrorCategory, EVALUATOR_LIKE_INVALID_INPUTS( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.LIKE_VALUE, Property.LIKE_PATTERN, Property.LIKE_ESCAPE), - "Invalid argument given to LIKE expression") { + "Invalid argument given to LIKE expression" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Given: " + - "value = ${errorContext?.get(Property.LIKE_VALUE)?.stringValue() ?: UNKNOWN}, " + - "pattern = ${errorContext?.get(Property.LIKE_PATTERN)?.stringValue() ?: UNKNOWN}, " + - "escape char = ${errorContext?.get(Property.LIKE_ESCAPE)?.stringValue() ?: "none given"}" + "value = ${errorContext?.get(Property.LIKE_VALUE)?.stringValue() ?: UNKNOWN}, " + + "pattern = ${errorContext?.get(Property.LIKE_PATTERN)?.stringValue() ?: UNKNOWN}, " + + "escape char = ${errorContext?.get(Property.LIKE_ESCAPE)?.stringValue() ?: "none given"}" }, EVALUATOR_LIKE_PATTERN_INVALID_ESCAPE_SEQUENCE( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.LIKE_PATTERN, Property.LIKE_ESCAPE), - "Pattern contains an invalid or malformed escape sequence"){ + "Pattern contains an invalid or malformed escape sequence" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Given: " + - "pattern = ${errorContext?.get(Property.LIKE_PATTERN)?.stringValue() ?: UNKNOWN}, " + - "escape char = ${errorContext?.get(Property.LIKE_ESCAPE)?.stringValue() ?: "none given"}" + "pattern = ${errorContext?.get(Property.LIKE_PATTERN)?.stringValue() ?: UNKNOWN}, " + + "escape char = ${errorContext?.get(Property.LIKE_ESCAPE)?.stringValue() ?: "none given"}" }, EVALUATOR_NON_INT_LIMIT_VALUE( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.ACTUAL_TYPE), - "") { + "" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "LIMIT value must be an integer but found ${errorContext.getProperty(Property.ACTUAL_TYPE)}}" }, @@ -752,7 +850,8 @@ enum class ErrorCode(internal val category: ErrorCategory, ErrorCategory.EVALUATOR, LOCATION + setOf(Property.ACTUAL_TYPE), "", - ErrorBehaviorInPermissiveMode.RETURN_MISSING) { + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Struct field key should be text but found ${errorContext.getProperty(Property.ACTUAL_TYPE)}}." }, @@ -760,12 +859,14 @@ enum class ErrorCode(internal val category: ErrorCategory, EVALUATOR_NEGATIVE_LIMIT( ErrorCategory.EVALUATOR, LOCATION, - "LIMIT must not be negative"), + "LIMIT must not be negative" + ), - EVALUATOR_NON_INT_OFFSET_VALUE ( + EVALUATOR_NON_INT_OFFSET_VALUE( ErrorCategory.EVALUATOR, LOCATION + setOf(Property.ACTUAL_TYPE), - "") { + "" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "OFFSET value must be an integer but found ${errorContext.getProperty(Property.ACTUAL_TYPE)}" }, @@ -773,101 +874,119 @@ enum class ErrorCode(internal val category: ErrorCategory, EVALUATOR_NEGATIVE_OFFSET( ErrorCategory.EVALUATOR, LOCATION, - "OFFSET must not be negative"), + "OFFSET must not be negative" + ), EVALUATOR_DIVIDE_BY_ZERO( ErrorCategory.EVALUATOR, LOCATION, "/ by zero", - ErrorBehaviorInPermissiveMode.RETURN_MISSING), + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ), EVALUATOR_MODULO_BY_ZERO( ErrorCategory.EVALUATOR, LOCATION, "% by zero", - ErrorBehaviorInPermissiveMode.RETURN_MISSING), + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ), EVALUATOR_INVALID_CONVERSION( ErrorCategory.EVALUATOR, LOCATION, "Invalid conversion", - ErrorBehaviorInPermissiveMode.RETURN_MISSING), + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ), EVALUATOR_UNEXPECTED_VALUE( ErrorCategory.EVALUATOR, LOCATION, - "Unexpected value"), + "Unexpected value" + ), EVALUATOR_UNEXPECTED_VALUE_TYPE( ErrorCategory.EVALUATOR, LOCATION, "Unexpected value type", - ErrorBehaviorInPermissiveMode.RETURN_MISSING), + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ), EVALUATOR_INVALID_ARGUMENTS_FOR_TRIM( ErrorCategory.EVALUATOR, setOf(), - "Invalid arguments for trim"), + "Invalid arguments for trim" + ), EVALUATOR_TIMESTAMP_OUT_OF_BOUNDS( ErrorCategory.EVALUATOR, setOf(), "Timestamp out of bounds", - ErrorBehaviorInPermissiveMode.RETURN_MISSING), + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ), EVALUATOR_INVALID_ARGUMENTS_FOR_FUNC_CALL( ErrorCategory.EVALUATOR, setOf(), "Invalid arguments for function call", - ErrorBehaviorInPermissiveMode.RETURN_MISSING), + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ), EVALUATOR_INVALID_ARGUMENTS_FOR_DATE_PART( ErrorCategory.EVALUATOR, setOf(), "Invalid arguments for date", - ErrorBehaviorInPermissiveMode.RETURN_MISSING), + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ), EVALUATOR_INVALID_ARGUMENTS_FOR_AGG_FUNCTION( ErrorCategory.EVALUATOR, setOf(), - "Invalid arguments for agg function"), + "Invalid arguments for agg function" + ), EVALUATOR_INVALID_COMPARISION( ErrorCategory.EVALUATOR, LOCATION, "Invalid comparision", - ErrorBehaviorInPermissiveMode.RETURN_MISSING), + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ), EVALUATOR_INVALID_BINDING( ErrorCategory.EVALUATOR, LOCATION, - "Invalid binding"), + "Invalid binding" + ), EVALUATOR_ARITHMETIC_EXCEPTION( ErrorCategory.EVALUATOR, LOCATION, "Arithmetic exception", - ErrorBehaviorInPermissiveMode.RETURN_MISSING), + ErrorBehaviorInPermissiveMode.RETURN_MISSING + ), EVALUATOR_SQL_EXCEPTION( ErrorCategory.EVALUATOR, LOCATION, - "SQL exception"), + "SQL exception" + ), EVALUATOR_COUNT_START_NOT_ALLOWED( ErrorCategory.EVALUATOR, LOCATION, - "COUNT(*) not allowed"), + "COUNT(*) not allowed" + ), EVALUATOR_GENERIC_EXCEPTION( ErrorCategory.EVALUATOR, LOCATION, - "Generic exception"), + "Generic exception" + ), EVALUATOR_VALUE_NOT_INSTANCE_OF_EXPECTED_TYPE( ErrorCategory.EVALUATOR, LOCATION + Property.EXPECTED_STATIC_TYPE, - "") { + "" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?) = "Value was not an instance of the expected static type: ${errorContext.getProperty(Property.EXPECTED_STATIC_TYPE)}" }, @@ -882,7 +1001,8 @@ enum class ErrorCode(internal val category: ErrorCategory, SEMANTIC_NON_TEXT_STRUCT_FIELD_KEY( ErrorCategory.SEMANTIC, LOCATION + setOf(Property.ACTUAL_TYPE), - "") { + "" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Struct field key should be text but found ${errorContext.getProperty(Property.ACTUAL_TYPE)}}." }, @@ -890,7 +1010,8 @@ enum class ErrorCode(internal val category: ErrorCategory, SEMANTIC_ILLEGAL_GLOBAL_VARIABLE_ACCESS( ErrorCategory.SEMANTIC, LOCATION + setOf(Property.BINDING_NAME), - "") { + "" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Global variable access is illegal in this context, variable name: '${errorContext.getProperty(Property.BINDING_NAME)}'" }, @@ -898,7 +1019,8 @@ enum class ErrorCode(internal val category: ErrorCategory, SEMANTIC_UNBOUND_BINDING( ErrorCategory.SEMANTIC, LOCATION + setOf(Property.BINDING_NAME), - "") { + "" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "No such variable named '${errorContext.getProperty(Property.BINDING_NAME)}'." }, @@ -906,7 +1028,8 @@ enum class ErrorCode(internal val category: ErrorCategory, SEMANTIC_UNBOUND_QUOTED_BINDING( ErrorCategory.SEMANTIC, LOCATION + setOf(Property.BINDING_NAME), - "") { + "" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "No such variable named '${errorContext.getProperty(Property.BINDING_NAME)}'. $UNBOUND_QUOTED_IDENTIFIER_HINT" }, @@ -914,7 +1037,8 @@ enum class ErrorCode(internal val category: ErrorCategory, SEMANTIC_AMBIGUOUS_BINDING( ErrorCategory.SEMANTIC, LOCATION + setOf(Property.BINDING_NAME), - "") { + "" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "A variable named '${errorContext.getProperty(Property.BINDING_NAME)}' was already defined in this scope" }, @@ -925,53 +1049,62 @@ enum class ErrorCode(internal val category: ErrorCategory, SEMANTIC_INCORRECT_NODE_ARITY( ErrorCategory.SEMANTIC, LOCATION + setOf(Property.EXPECTED_ARITY_MAX, Property.EXPECTED_ARITY_MIN, Property.ACTUAL_ARITY, Property.FUNCTION_NAME), - "Incorrect number of arguments for node") { + "Incorrect number of arguments for node" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Incorrect number of arguments supplied to `${errorContext.getProperty(Property.FUNCTION_NAME)}`. " + - "Min = ${errorContext.getProperty(Property.EXPECTED_ARITY_MIN)}, max = ${errorContext.getProperty(Property.EXPECTED_ARITY_MAX)} " + - "Actual = ${errorContext.getProperty(Property.ACTUAL_ARITY)}" + "Min = ${errorContext.getProperty(Property.EXPECTED_ARITY_MIN)}, max = ${errorContext.getProperty(Property.EXPECTED_ARITY_MAX)} " + + "Actual = ${errorContext.getProperty(Property.ACTUAL_ARITY)}" }, SEMANTIC_INVALID_DECIMAL_ARGUMENTS( ErrorCategory.SEMANTIC, LOCATION, - "Invalid precision or scale for decimal"), + "Invalid precision or scale for decimal" + ), SEMANTIC_HAVING_USED_WITHOUT_GROUP_BY( ErrorCategory.EVALUATOR, LOCATION, - "HAVING cannot be used without GROUP BY or GROUP ALL"), + "HAVING cannot be used without GROUP BY or GROUP ALL" + ), SEMANTIC_ASTERISK_USED_WITH_OTHER_ITEMS( ErrorCategory.EVALUATOR, LOCATION, - "`*` may not be used with other items in a select list"), + "`*` may not be used with other items in a select list" + ), SEMANTIC_MISSING_AS_NAME( ErrorCategory.EVALUATOR, LOCATION, - "Missing AS name"), + "Missing AS name" + ), SEMANTIC_LITERAL_INT_OVERFLOW( ErrorCategory.SEMANTIC, LOCATION, - "Literal int overflow or underflow"), + "Literal int overflow or underflow" + ), SEMANTIC_FLOAT_PRECISION_UNSUPPORTED( ErrorCategory.SEMANTIC, LOCATION, - "FLOAT precision not supported"), + "FLOAT precision not supported" + ), SEMANTIC_UNION_TYPE_INVALID( ErrorCategory.SEMANTIC, LOCATION, - "Union type not permitted"), + "Union type not permitted" + ), // Generic errors UNIMPLEMENTED_FEATURE( ErrorCategory.SEMANTIC, LOCATION + setOf(Property.FEATURE_NAME), - "") { + "" + ) { override fun getErrorMessage(errorContext: PropertyValueMap?): String = "Feature '${errorContext?.get(Property.FEATURE_NAME)?.stringValue() ?: UNKNOWN}' not implemented yet" }; @@ -991,10 +1124,9 @@ enum class ErrorCode(internal val category: ErrorCategory, protected fun getTokenTypeAndTokenValue(errorContext: PropertyValueMap?): String = getTokenType(errorContext) + " : " + getTokenValue(errorContext) + protected open fun detailMessagePrefix(): String = messagePrefix - open protected fun detailMessagePrefix(): String = messagePrefix - - open protected fun detailMessageSuffix(errorContext: PropertyValueMap?): String = + protected open fun detailMessageSuffix(errorContext: PropertyValueMap?): String = getTokenTypeAndTokenValue(errorContext) /** @@ -1010,9 +1142,7 @@ enum class ErrorCode(internal val category: ErrorCategory, fun errorCategory(): String = category.toString() - fun getProperties(): Set = properties - } private fun PropertyValueMap?.getProperty(prop: Property): String = diff --git a/lang/src/org/partiql/lang/errors/ProblemHandler.kt b/lang/src/org/partiql/lang/errors/ProblemHandler.kt index b9397229d1..a59ed799d1 100644 --- a/lang/src/org/partiql/lang/errors/ProblemHandler.kt +++ b/lang/src/org/partiql/lang/errors/ProblemHandler.kt @@ -17,7 +17,7 @@ internal interface ProblemHandler { * inference pass that can result in multiple errors and/or warnings). This handler does not collect other exceptions * that may be thrown. */ -internal class ProblemCollector: ProblemHandler { +internal class ProblemCollector : ProblemHandler { private val problemList = mutableListOf() val problems: List @@ -41,7 +41,7 @@ internal class ProblemCollector: ProblemHandler { * * @throws SemanticException on the first [Problem] logged with severity of [ProblemSeverity.ERROR] */ -internal class ProblemThrower: ProblemHandler { +internal class ProblemThrower : ProblemHandler { override fun handleProblem(problem: Problem) { if (problem.details.severity == ProblemSeverity.ERROR) { throw SemanticException(problem) diff --git a/lang/src/org/partiql/lang/eval/AnyOfCastTable.kt b/lang/src/org/partiql/lang/eval/AnyOfCastTable.kt index 5ba88d9111..81b5c7071b 100644 --- a/lang/src/org/partiql/lang/eval/AnyOfCastTable.kt +++ b/lang/src/org/partiql/lang/eval/AnyOfCastTable.kt @@ -117,12 +117,12 @@ private data class CastError(val error: EvaluationException) : CastResult() { override fun unwrap() = throw error } -private data class CastValue(val value: ExprValue): CastResult() { +private data class CastValue(val value: ExprValue) : CastResult() { override fun unwrap() = value } /** Sentinel case to deal with empty target table--no compatible cast available for the source. */ -private data class CastNil(val sourceType: ExprValueType, val metas: MetaContainer): CastResult() { +private data class CastNil(val sourceType: ExprValueType, val metas: MetaContainer) : CastResult() { override fun unwrap(): Nothing { val errorContext = PropertyValueMap().also { it[Property.CAST_FROM] = sourceType.toString() @@ -152,10 +152,12 @@ private data class CastNil(val sourceType: ExprValueType, val metas: MetaContain * @param metas The metadata of the compilation context. * @param singleTypeCast The function to delegate the implementation of a cast to a single type. */ -internal class AnyOfCastTable(private val anyOfType: AnyOfType, - private val metas: MetaContainer, - private val valueFactory: ExprValueFactory, - singleTypeCast: (SingleType) -> CastFunc) { +internal class AnyOfCastTable( + private val anyOfType: AnyOfType, + private val metas: MetaContainer, + private val valueFactory: ExprValueFactory, + singleTypeCast: (SingleType) -> CastFunc +) { val castFuncTable: Map> val castTypeTable: Map> @@ -264,4 +266,4 @@ internal class AnyOfCastTable(private val anyOfType: AnyOfType, errorContextFrom(metas), internal = true ) -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/BaseExprValue.kt b/lang/src/org/partiql/lang/eval/BaseExprValue.kt index 75e978c2ca..134e0dd2da 100644 --- a/lang/src/org/partiql/lang/eval/BaseExprValue.kt +++ b/lang/src/org/partiql/lang/eval/BaseExprValue.kt @@ -44,4 +44,3 @@ abstract class BaseExprValue : ExprValue { override fun toString(): String = stringify() } - diff --git a/lang/src/org/partiql/lang/eval/Bindings.kt b/lang/src/org/partiql/lang/eval/Bindings.kt index 3e9dabbdbb..4f3edee59b 100644 --- a/lang/src/org/partiql/lang/eval/Bindings.kt +++ b/lang/src/org/partiql/lang/eval/Bindings.kt @@ -28,29 +28,32 @@ enum class BindingCase { companion object { fun fromIonValue(sym: IonValue): BindingCase = - when(sym.stringValue()) { + when (sym.stringValue()) { "case_sensitive" -> SENSITIVE "case_insensitive" -> INSENSITIVE - else -> errNoContext("Unable to convert ion value '${sym.stringValue()}' to a BindingCase instance", - errorCode = ErrorCode.EVALUATOR_INVALID_CONVERSION, - internal = true) + else -> errNoContext( + "Unable to convert ion value '${sym.stringValue()}' to a BindingCase instance", + errorCode = ErrorCode.EVALUATOR_INVALID_CONVERSION, + internal = true + ) } - } + } fun toSymbol(ions: IonSystem) = ions.newSymbol( - when(this) { + when (this) { SENSITIVE -> "case_sensitive" INSENSITIVE -> "case_insensitive" - }) + } + ) } /** * Converts a [CaseSensitivity] to a [BindingCase]. */ -fun CaseSensitivity.toBindingCase(): BindingCase = when(this) { +fun CaseSensitivity.toBindingCase(): BindingCase = when (this) { CaseSensitivity.INSENSITIVE -> BindingCase.INSENSITIVE - CaseSensitivity.SENSITIVE -> BindingCase.SENSITIVE + CaseSensitivity.SENSITIVE -> BindingCase.SENSITIVE } /** @@ -129,8 +132,8 @@ interface Bindings { fun ofMap(backingMap: Map): Bindings = MapBindings(backingMap) /** - * Returns an instance of [Bindings] that is backed by an [IonStruct]. - */ + * Returns an instance of [Bindings] that is backed by an [IonStruct]. + */ @JvmStatic fun ofIonStruct(struct: IonStruct, valueFactory: ExprValueFactory): Bindings = IonStructBindings(valueFactory, struct) } @@ -140,7 +143,7 @@ interface Bindings { private val bindings = HashMap> () fun addBinding(name: String, getter: () -> T): LazyBindingBuilder = - this.apply { bindings[name] = lazy(getter)} + this.apply { bindings[name] = lazy(getter) } fun build(): Bindings = LazyBindings(bindings) @@ -167,20 +170,19 @@ class MapBindings(val originalCaseMap: Map) : Bindings { override fun get(bindingName: BindingName): T? = when (bindingName.bindingCase) { - BindingCase.SENSITIVE -> originalCaseMap[bindingName.name] + BindingCase.SENSITIVE -> originalCaseMap[bindingName.name] BindingCase.INSENSITIVE -> { val foundBindings = loweredCaseMap[bindingName.loweredName] when { - foundBindings == null -> null + foundBindings == null -> null foundBindings.size == 1 -> foundBindings.first().value - else -> + else -> errAmbiguousBinding(bindingName.name, foundBindings.map { it.key }) } } } } - /** A [Bindings] implementation that lazily materializes the values of the bindings contained within. */ private class LazyBindings(originalCaseMap: Map>) : Bindings { private val delegate: Bindings> = MapBindings(originalCaseMap) diff --git a/lang/src/org/partiql/lang/eval/BindingsExtensions.kt b/lang/src/org/partiql/lang/eval/BindingsExtensions.kt index db52b7637a..bc8619b784 100644 --- a/lang/src/org/partiql/lang/eval/BindingsExtensions.kt +++ b/lang/src/org/partiql/lang/eval/BindingsExtensions.kt @@ -37,13 +37,13 @@ fun Bindings.delegate(fallback: Bindings): Bindings = * @receiver The [Bindings] to delegate over. * @param names, the blacklisted names */ -fun Bindings.blacklist(vararg names: String) = object: Bindings { +fun Bindings.blacklist(vararg names: String) = object : Bindings { val blacklisted = names.toSet() val loweredBlacklisted = names.map { it.toLowerCase() }.toSet() override fun get(bindingName: BindingName): T? { val isBlacklisted = when (bindingName.bindingCase) { - BindingCase.SENSITIVE -> blacklisted.contains(bindingName.name) + BindingCase.SENSITIVE -> blacklisted.contains(bindingName.name) BindingCase.INSENSITIVE -> loweredBlacklisted.contains(bindingName.loweredName) } return when { diff --git a/lang/src/org/partiql/lang/eval/CompileOptions.kt b/lang/src/org/partiql/lang/eval/CompileOptions.kt index e7ddb132cc..a26511e013 100644 --- a/lang/src/org/partiql/lang/eval/CompileOptions.kt +++ b/lang/src/org/partiql/lang/eval/CompileOptions.kt @@ -45,7 +45,6 @@ enum class ProjectionIterationBehavior { FILTER_MISSING, UNFILTERED } - /** * Indicates how the evaluator is to handle type checking errors and how `MISSING` values are propagated * when encountered while evaluating binary operators and function calls. @@ -73,7 +72,7 @@ enum class TypingMode { */ PERMISSIVE - //TODO: STRICT + // TODO: STRICT } /** @@ -192,13 +191,13 @@ data class CompileOptions private constructor ( fun undefinedVariable(value: UndefinedVariableBehavior) = set { copy(undefinedVariable = value) } fun projectionIteration(value: ProjectionIterationBehavior) = set { copy(projectionIteration = value) } fun visitorTransformMode(value: VisitorTransformMode) = set { copy(visitorTransformMode = value) } - fun typingMode(value: TypingMode) = set { copy(typingMode = value)} - fun typedOpBehavior(value: TypedOpBehavior) = set { copy(typedOpBehavior = value)} - fun thunkOptions(value: ThunkOptions) = set { copy(thunkOptions = value)} - fun evaluationTimeTypeChecks(value: ThunkReturnTypeAssertions) = set { copy(thunkReturnTypeAssertions = value )} + fun typingMode(value: TypingMode) = set { copy(typingMode = value) } + fun typedOpBehavior(value: TypedOpBehavior) = set { copy(typedOpBehavior = value) } + fun thunkOptions(value: ThunkOptions) = set { copy(thunkOptions = value) } + fun evaluationTimeTypeChecks(value: ThunkReturnTypeAssertions) = set { copy(thunkReturnTypeAssertions = value) } fun defaultTimezoneOffset(value: ZoneOffset) = set { copy(defaultTimezoneOffset = value) } - private inline fun set(block: CompileOptions.() -> CompileOptions) : Builder { + private inline fun set(block: CompileOptions.() -> CompileOptions): Builder { options = block(options) return this } diff --git a/lang/src/org/partiql/lang/eval/Environment.kt b/lang/src/org/partiql/lang/eval/Environment.kt index 7a8e7d9a45..a86a3553a0 100644 --- a/lang/src/org/partiql/lang/eval/Environment.kt +++ b/lang/src/org/partiql/lang/eval/Environment.kt @@ -30,7 +30,8 @@ data class Environment( val current: Bindings = locals, val session: EvaluationSession, val groups: MutableMap = createGroupMap(), - val currentGroup: Group? = null) { + val currentGroup: Group? = null +) { companion object { fun standard() = Environment(locals = Bindings.empty(), session = EvaluationSession.standard()) @@ -47,7 +48,8 @@ data class Environment( internal fun nest( newLocals: Bindings, currentMode: CurrentMode = CurrentMode.LOCALS, - newGroup: Group? = currentGroup): Environment { + newGroup: Group? = currentGroup + ): Environment { val derivedLocals = newLocals.delegate(locals) val newCurrent = when (currentMode) { @@ -64,12 +66,12 @@ data class Environment( */ internal fun nestQuery() = copy( currentGroup = null, - groups = createGroupMap()) + groups = createGroupMap() + ) /** Constructs a copy of this environment with the locals being the current bindings. */ internal fun flipToLocals(): Environment = copy(current = locals) /** Constructs a copy of this environment with the [globals] being the current bindings. */ internal fun flipToGlobalsFirst(): Environment = copy(current = session.globals.delegate(locals)) - } diff --git a/lang/src/org/partiql/lang/eval/ErrorSignaler.kt b/lang/src/org/partiql/lang/eval/ErrorSignaler.kt index 652732007b..4585c27229 100644 --- a/lang/src/org/partiql/lang/eval/ErrorSignaler.kt +++ b/lang/src/org/partiql/lang/eval/ErrorSignaler.kt @@ -48,31 +48,30 @@ internal class ErrorDetails( val metas: MetaContainer, /** The programmer readable exception message. */ val message: String, - val errorContext: PropertyValueMap? = null) + val errorContext: PropertyValueMap? = null +) internal fun TypingMode.createErrorSignaler(valueFactory: ExprValueFactory) = - when(this) { + when (this) { TypingMode.LEGACY -> LegacyErrorSignaler() TypingMode.PERMISSIVE -> PermissiveErrorSignaler(valueFactory.missingValue) } - /** Defines legacy error signaling. */ -private class LegacyErrorSignaler: ErrorSignaler { +private class LegacyErrorSignaler : ErrorSignaler { /** Invokes [createErrorDetails] and uses the return value to construct and throw an [EvaluationException]. */ override fun error(errorCode: ErrorCode, createErrorDetails: () -> ErrorDetails): ExprValue = throwEE(errorCode, createErrorDetails) } /** Defines permissive error signaling. */ -private class PermissiveErrorSignaler(private val theMissingValue: ExprValue): ErrorSignaler { +private class PermissiveErrorSignaler(private val theMissingValue: ExprValue) : ErrorSignaler { /** Ignores [createErrorDetails] and simply returns [theMissingValue]. */ override fun error(errorCode: ErrorCode, createErrorDetails: () -> ErrorDetails): ExprValue = - when(errorCode.errorBehaviorInPermissiveMode) { + when (errorCode.errorBehaviorInPermissiveMode) { ErrorBehaviorInPermissiveMode.THROW_EXCEPTION -> throwEE(errorCode, createErrorDetails) ErrorBehaviorInPermissiveMode.RETURN_MISSING -> theMissingValue - } } @@ -82,7 +81,7 @@ private fun throwEE(errorCode: ErrorCode, createErrorDetails: () -> ErrorDetails // Add source location if we need to and if we can val srcLoc = metas[SourceLocationMeta.TAG] as? SourceLocationMeta val errCtx = this.errorContext ?: propertyValueMapOf() - if(srcLoc != null) { + if (srcLoc != null) { if (!errCtx.hasProperty(Property.LINE_NUMBER)) { errCtx[Property.LINE_NUMBER] = srcLoc.lineNum } @@ -96,6 +95,7 @@ private fun throwEE(errorCode: ErrorCode, createErrorDetails: () -> ErrorDetails errorCode = errorCode, errorContext = errCtx, cause = null, - internal = false) + internal = false + ) } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/EvaluatingCompiler.kt b/lang/src/org/partiql/lang/eval/EvaluatingCompiler.kt index ad553a4c7d..4419c32b5c 100644 --- a/lang/src/org/partiql/lang/eval/EvaluatingCompiler.kt +++ b/lang/src/org/partiql/lang/eval/EvaluatingCompiler.kt @@ -23,20 +23,20 @@ import com.amazon.ion.Timestamp import com.amazon.ionelement.api.MetaContainer import com.amazon.ionelement.api.ionBool import com.amazon.ionelement.api.toIonValue -import org.partiql.lang.ast.ExprNode -import org.partiql.lang.ast.toAstStatement -import org.partiql.lang.ast.UniqueNameMeta -import org.partiql.lang.ast.SourceLocationMeta import org.partiql.lang.ast.AggregateCallSiteListMeta import org.partiql.lang.ast.AggregateRegisterIdMeta -import org.partiql.lang.ast.IsCountStarMeta -import org.partiql.lang.domains.staticType import org.partiql.lang.ast.AstDeserializerBuilder import org.partiql.lang.ast.AstVersion -import org.partiql.lang.ast.sourceLocation +import org.partiql.lang.ast.ExprNode import org.partiql.lang.ast.IonElementMetaContainer +import org.partiql.lang.ast.IsCountStarMeta +import org.partiql.lang.ast.SourceLocationMeta +import org.partiql.lang.ast.UniqueNameMeta +import org.partiql.lang.ast.sourceLocation +import org.partiql.lang.ast.toAstStatement import org.partiql.lang.ast.toPartiQlMetaContainer import org.partiql.lang.domains.PartiqlAst +import org.partiql.lang.domains.staticType import org.partiql.lang.domains.toBindingCase import org.partiql.lang.errors.ErrorCode import org.partiql.lang.errors.Property @@ -51,16 +51,16 @@ import org.partiql.lang.eval.like.parsePattern import org.partiql.lang.eval.time.Time import org.partiql.lang.eval.visitors.PartiqlAstSanityValidator import org.partiql.lang.syntax.SqlParser -import org.partiql.lang.types.TypedOpParameter +import org.partiql.lang.types.AnyOfType +import org.partiql.lang.types.AnyType +import org.partiql.lang.types.FunctionSignature import org.partiql.lang.types.IntType +import org.partiql.lang.types.SingleType import org.partiql.lang.types.StaticType -import org.partiql.lang.types.FunctionSignature +import org.partiql.lang.types.TypedOpParameter import org.partiql.lang.types.UnknownArguments -import org.partiql.lang.types.SingleType import org.partiql.lang.types.UnsupportedTypeCheckException import org.partiql.lang.types.toTypedOpParameter -import org.partiql.lang.types.AnyType -import org.partiql.lang.types.AnyOfType import org.partiql.lang.util.bigDecimalOf import org.partiql.lang.util.checkThreadInterrupted import org.partiql.lang.util.codePointSequence @@ -122,11 +122,12 @@ internal class EvaluatingCompiler( "compilationContextStack was empty.", ErrorCode.EVALUATOR_UNEXPECTED_VALUE, internal = true ) - //Note: please don't make this inline -- it messes up [EvaluationException] stack traces and - //isn't a huge benefit because this is only used at SQL-compile time anyway. + // Note: please don't make this inline -- it messes up [EvaluationException] stack traces and + // isn't a huge benefit because this is only used at SQL-compile time anyway. private fun nestCompilationContext( expressionContext: ExpressionContext, - fromSourceNames: Set, block: () -> R + fromSourceNames: Set, + block: () -> R ): R { compilationContextStack.push( when { @@ -415,7 +416,8 @@ internal class EvaluatingCompiler( ErrorCode.EVALUATOR_FEATURE_NOT_SUPPORTED_YET, errorContextFrom(metas).also { it[Property.FEATURE_NAME] = expr.javaClass.canonicalName - }, internal = false + }, + internal = false ) } } @@ -478,7 +480,7 @@ internal class EvaluatingCompiler( val longValue: Long = value.scalar.numberValue()?.toLong() ?: error( "ExprValue.numberValue() must not be `NULL` when its type is INT." + - "This indicates that the ExprValue instance has a bug." + "This indicates that the ExprValue instance has a bug." ) // PRO-TIP: make sure to use the `Long` primitive type here with `.contains` otherwise @@ -488,7 +490,7 @@ internal class EvaluatingCompiler( } else -> error( "The expression's static type was supposed to be INT but instead it was ${value.type}" + - "This may indicate the presence of a bug in the type inferencer." + "This may indicate the presence of a bug in the type inferencer." ) } } @@ -530,7 +532,8 @@ internal class EvaluatingCompiler( !validator(naryResult), ErrorCode.EVALUATOR_INTEGER_OVERFLOW, { ErrorDetails(metas, "Integer overflow", errorContextFrom(metas)) }, - { naryResult }) + { naryResult } + ) } } // If there is no IntType StaticType, can't validate the integer size either. @@ -574,9 +577,9 @@ internal class EvaluatingCompiler( val exprThunk = compileAstExpr(expr.expr) val computeThunk = thunkFactory.thunkEnvOperands(metas, exprThunk) { _, value -> - //Invoking .numberValue() here makes this essentially just a type check + // Invoking .numberValue() here makes this essentially just a type check value.numberValue() - //Original value is returned unmodified. + // Original value is returned unmodified. value } @@ -719,7 +722,7 @@ internal class EvaluatingCompiler( fun isOptimizedCase(values: List): Boolean = values.all { it is PartiqlAst.Expr.Lit && !it.value.isNull } - fun optimizedCase(values: List): ThunkEnv{ + fun optimizedCase(values: List): ThunkEnv { // Put all the literals in the sequence into a pre-computed map to be checked later by the thunk. // If the left-hand value is one of these we can short-circuit with a result of TRUE. // This is the fastest possible case and allows for hundreds of literal values (or more) in the @@ -822,7 +825,7 @@ internal class EvaluatingCompiler( val currValue = currThunk(env) when { currValue.isUnknown() -> hasUnknowns = true - //Short circuit only if we encounter a known false value. + // Short circuit only if we encounter a known false value. !currValue.booleanValue() -> return@thunk valueFactory.newBoolean(false) } } @@ -838,7 +841,7 @@ internal class EvaluatingCompiler( argThunks.forEach { currThunk -> val currValue = currThunk(env) when (currValue.type) { - //Short circuit only if we encounter a known false value. + // Short circuit only if we encounter a known false value. ExprValueType.BOOL -> if (!currValue.booleanValue()) return@thunk valueFactory.newBoolean(false) ExprValueType.NULL -> hasNull = true // type mismatch, return missing @@ -889,7 +892,7 @@ internal class EvaluatingCompiler( argThunks.forEach { currThunk -> val currValue = currThunk(env) when (currValue.type) { - //Short circuit only if we encounter a known true value. + // Short circuit only if we encounter a known true value. ExprValueType.BOOL -> if (currValue.booleanValue()) return@thunk valueFactory.newBoolean(true) ExprValueType.NULL -> hasNull = true else -> hasMissing = true // type mismatch, return missing. @@ -955,7 +958,7 @@ internal class EvaluatingCompiler( "${func.signature.name} takes exactly ${func.signature.arity.first} arguments, received: ${funcArgThunks.size}" else -> "${func.signature.name} takes between ${func.signature.arity.first} and " + - "${func.signature.arity.last} arguments, received: ${funcArgThunks.size}" + "${func.signature.arity.last} arguments, received: ${funcArgThunks.size}" } throw EvaluationException( @@ -1185,7 +1188,8 @@ internal class EvaluatingCompiler( "FLOAT precision parameter is unsupported", ErrorCode.SEMANTIC_FLOAT_PRECISION_UNSUPPORTED, errorContextFrom(expr.type.metas), - internal = false) + internal = false + ) } val typeMatchFunc = when (val staticType = typedOpParameter.staticType) { @@ -1223,7 +1227,8 @@ internal class EvaluatingCompiler( "FLOAT precision parameter is unsupported", ErrorCode.SEMANTIC_FLOAT_PRECISION_UNSUPPORTED, errorContextFrom(asType.metas), - internal = false) + internal = false + ) } fun typeOpValidate( @@ -1241,7 +1246,7 @@ internal class EvaluatingCompiler( locationMeta?.let { fillErrorContext(errorContext, it) } throw EvaluationException( - "Validation failure for ${asType}", + "Validation failure for $asType", ErrorCode.EVALUATOR_CAST_FAILED, errorContext, internal = false @@ -1329,7 +1334,6 @@ internal class EvaluatingCompiler( } } } - } catch (e: EvaluationException) { if (e.internal) { throw e @@ -1715,11 +1719,13 @@ internal class EvaluatingCompiler( is PartiqlAst.SetQuantifier.All -> projectedRows }.let { rowsWithOffsetAndLimit(it, env) } - valueFactory.newBag(quantifiedRows.map { - // TODO make this expose the ordinal for ordered sequences - // make sure we don't expose the underlying value's name out of a SELECT - it.unnamedValue() - }) + valueFactory.newBag( + quantifiedRows.map { + // TODO make this expose the ordinal for ordered sequences + // make sure we don't expose the underlying value's name out of a SELECT + it.unnamedValue() + } + ) } else -> { // Grouping is needed @@ -1747,7 +1753,7 @@ internal class EvaluatingCompiler( } else -> { -> RegisterBank(aggregateListMeta.aggregateCallSites.size).apply { - //set up aggregate registers + // set up aggregate registers compiledAggregates?.forEachIndexed { index, ca -> set(index, ca.factory.create()) } @@ -1782,7 +1788,6 @@ internal class EvaluatingCompiler( listOf(syntheticGroup.key) ) - valueFactory.newBag(listOf(groupResult).asSequence()) } } @@ -1817,7 +1822,7 @@ internal class EvaluatingCompiler( // For each "row" in the output of the FROM clause fromProductions.forEach { fromProduction -> - //Determine the group key for this value + // Determine the group key for this value val groupKey = groupKeyThunk(fromProduction.env) // look up existing group using group key (this is slow) @@ -1834,11 +1839,13 @@ internal class EvaluatingCompiler( groupAsName.run { val seq = fromSourceBindingNames.asSequence().map { pair -> - (fromProduction.env.current[pair.bindingName] ?: errNoContext( - "Could not resolve from source binding name during group as variable mapping", - errorCode = ErrorCode.INTERNAL_ERROR, - internal = true - )).namedValue(pair.nameExprValue) + ( + fromProduction.env.current[pair.bindingName] ?: errNoContext( + "Could not resolve from source binding name during group as variable mapping", + errorCode = ErrorCode.INTERNAL_ERROR, + internal = true + ) + ).namedValue(pair.nameExprValue) }.asSequence() group.groupValues.add(createStructExprValue(seq, StructOrdering.UNORDERED)) @@ -1867,11 +1874,13 @@ internal class EvaluatingCompiler( is PartiqlAst.Projection.ProjectValue -> { nestCompilationContext(ExpressionContext.NORMAL, allFromSourceAliases) { val valueThunk = compileAstExpr(project.value) - getQueryThunk(thunkFactory.thunkEnvValueList(project.metas) { env, _ -> - valueThunk( - env - ) - }) + getQueryThunk( + thunkFactory.thunkEnvValueList(project.metas) { env, _ -> + valueThunk( + env + ) + } + ) } } is PartiqlAst.Projection.ProjectPivot -> { @@ -2005,7 +2014,6 @@ internal class EvaluatingCompiler( groupByEnv.nest(currentGroup.key.bindings, newGroup = currentGroup) .nest(groupAsBindings) - } else -> { groupByEnv, currentGroup -> groupByEnv.nest(currentGroup.key.bindings, newGroup = currentGroup) @@ -2033,7 +2041,7 @@ internal class EvaluatingCompiler( } } else -> { groupByEnv, currentGroup -> - //Create a closure that simply performs the final projection and + // Create a closure that simply performs the final projection and // returns the result. selectProjectionThunk(groupByEnv, listOf(currentGroup.key)) } @@ -2366,8 +2374,8 @@ internal class EvaluatingCompiler( val indexExpr = pathComponent.index val caseSensitivity = pathComponent.case when { - //If indexExpr is a literal string, there is no need to evaluate it--just compile a - //thunk that directly returns a bound value + // If indexExpr is a literal string, there is no need to evaluate it--just compile a + // thunk that directly returns a bound value indexExpr is PartiqlAst.Expr.Lit && indexExpr.value.toIonValue(valueFactory.ion) is IonString -> { val lookupName = BindingName( indexExpr.value.toIonValue(valueFactory.ion).stringValue()!!, @@ -2400,7 +2408,6 @@ internal class EvaluatingCompiler( ) TypingMode.PERMISSIVE -> valueFactory.missingValue } - } } ?: valueFactory.missingValue } @@ -2462,7 +2469,8 @@ internal class EvaluatingCompiler( } } } - }) + } + ) } return when (componentThunks.size) { 1 -> componentThunks.first() @@ -2550,7 +2558,8 @@ internal class EvaluatingCompiler( val patternParts = getPatternParts( valueFactory.newFromIonValue(patternExpr.value.toIonValue(valueFactory.ion)), (escapeExpr as? PartiqlAst.Expr.Lit)?.value?.toIonValue(valueFactory.ion) - ?.let { valueFactory.newFromIonValue(it) }) + ?.let { valueFactory.newFromIonValue(it) } + ) // If valueExpr is also a literal then we can evaluate this at compile time and return a constant. if (valueExpr is PartiqlAst.Expr.Lit) { @@ -2569,14 +2578,14 @@ internal class EvaluatingCompiler( val patternThunk = compileAstExpr(patternExpr) when (escapeExpr) { null -> { - //thunk that re-compiles the DFA every evaluation without a custom escape sequence + // thunk that re-compiles the DFA every evaluation without a custom escape sequence thunkFactory.thunkEnvOperands(metas, valueThunk, patternThunk) { _, value, pattern -> val pps = getPatternParts(pattern, null) runPatternParts(value, pps) } } else -> { - //thunk that re-compiles the pattern every evaluation but *with* a custom escape sequence + // thunk that re-compiles the pattern every evaluation but *with* a custom escape sequence val escapeThunk = compileAstExpr(escapeExpr) thunkFactory.thunkEnvOperands( metas, @@ -2634,7 +2643,7 @@ internal class EvaluatingCompiler( escape?.let { val escapeCharString = checkEscapeChar(escape, escapeLocationMeta) - val escapeCharCodePoint = escapeCharString.codePointAt(0) // escape is a string of length 1 + val escapeCharCodePoint = escapeCharString.codePointAt(0) // escape is a string of length 1 val validEscapedChars = setOf('_'.toInt(), '%'.toInt(), escapeCharCodePoint) val iter = patternString.codePointSequence().iterator() @@ -2707,7 +2716,8 @@ internal class EvaluatingCompiler( ErrorCode.EVALUATOR_FEATURE_NOT_SUPPORTED_YET, errorContextFrom(node.metas).also { it[Property.FEATURE_NAME] = "DDL Operations" - }, internal = false + }, + internal = false ) } @@ -2718,7 +2728,8 @@ internal class EvaluatingCompiler( ErrorCode.EVALUATOR_FEATURE_NOT_SUPPORTED_YET, errorContextFrom(node.metas).also { it[Property.FEATURE_NAME] = "DML Operations" - }, internal = false + }, + internal = false ) } @@ -2749,7 +2760,7 @@ internal class EvaluatingCompiler( "${procedure.signature.name} takes exactly ${procedure.signature.arity.first} arguments, received: ${args.size}" else -> "${procedure.signature.name} takes between ${procedure.signature.arity.first} and " + - "${procedure.signature.arity.last} arguments, received: ${args.size}" + "${procedure.signature.arity.last} arguments, received: ${args.size}" } throw EvaluationException( @@ -2881,7 +2892,6 @@ private enum class ExpressionContext { AGG_ARG } - /** * Tracks state used by the compiler while compiling. * @@ -2917,10 +2927,9 @@ private class SingleProjectionElement(val name: ExprValue, val thunk: ThunkEnv) */ private class MultipleProjectionElement(val thunks: List) : ProjectionElement() - private val MetaContainer.sourceLocationMeta get() = this[SourceLocationMeta.TAG] as? SourceLocationMeta private fun StaticType.getTypes() = when (val flattened = this.flatten()) { is AnyOfType -> flattened.types else -> listOf(this) -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/EvaluationSession.kt b/lang/src/org/partiql/lang/eval/EvaluationSession.kt index 59dad25066..72088f8523 100644 --- a/lang/src/org/partiql/lang/eval/EvaluationSession.kt +++ b/lang/src/org/partiql/lang/eval/EvaluationSession.kt @@ -24,9 +24,11 @@ import com.amazon.ion.Timestamp * @property parameters List of parameters to be substituted for positional placeholders * @property now Timestamp to consider as the current time, used by functions like `utcnow()` and `now()`. Defaults to [Timestamp.nowZ] */ -class EvaluationSession private constructor(val globals: Bindings, - val parameters: List, - val now: Timestamp) { +class EvaluationSession private constructor( + val globals: Bindings, + val parameters: List, + val now: Timestamp +) { companion object { /** * Java style builder to construct a new [EvaluationSession]. Uses the default value for any non specified field @@ -68,8 +70,10 @@ class EvaluationSession private constructor(val globals: Bindings, return this } - fun build(): EvaluationSession = EvaluationSession(now = now ?: Timestamp.nowZ(), - parameters = parameters, - globals = globals) + fun build(): EvaluationSession = EvaluationSession( + now = now ?: Timestamp.nowZ(), + parameters = parameters, + globals = globals + ) } } diff --git a/lang/src/org/partiql/lang/eval/Exceptions.kt b/lang/src/org/partiql/lang/eval/Exceptions.kt index d1da283a09..5e3f608edd 100644 --- a/lang/src/org/partiql/lang/eval/Exceptions.kt +++ b/lang/src/org/partiql/lang/eval/Exceptions.kt @@ -26,21 +26,26 @@ import org.partiql.lang.util.propertyValueMapOf import org.partiql.lang.util.to /** Error for evaluation problems. */ -open class EvaluationException(message: String, - errorCode: ErrorCode, - errorContext: PropertyValueMap? = null, - cause: Throwable? = null, - val internal: Boolean) : SqlException(message, errorCode, errorContext, cause) { - - - constructor(cause: Throwable, - errorCode: ErrorCode, - errorContext: PropertyValueMap? = null, - internal: Boolean) : this(message = cause.message ?: "", - errorCode = errorCode, - errorContext = errorContext, - internal = internal, - cause = cause) +open class EvaluationException( + message: String, + errorCode: ErrorCode, + errorContext: PropertyValueMap? = null, + cause: Throwable? = null, + val internal: Boolean +) : SqlException(message, errorCode, errorContext, cause) { + + constructor( + cause: Throwable, + errorCode: ErrorCode, + errorContext: PropertyValueMap? = null, + internal: Boolean + ) : this( + message = cause.message ?: "", + errorCode = errorCode, + errorContext = errorContext, + internal = internal, + cause = cause + ) } /** @@ -52,7 +57,7 @@ internal fun errNoContext(message: String, errorCode: ErrorCode, internal: Boole internal fun err(message: String, errorCode: ErrorCode, errorContext: PropertyValueMap?, internal: Boolean): Nothing = throw EvaluationException(message, errorCode, errorContext, internal = internal) -internal fun expectedArgTypeErrorMsg (types: List) : String = when (types.size) { +internal fun expectedArgTypeErrorMsg(types: List): String = when (types.size) { 0 -> throw IllegalStateException("Should have at least one expected argument type. ") 1 -> types[0].toString() else -> { @@ -82,7 +87,7 @@ internal fun errInvalidArgumentType( ) err( - message = "Invalid type for argument ${position} of ${signature.name}.", + message = "Invalid type for argument $position of ${signature.name}.", errorCode = ErrorCode.EVALUATOR_INCORRECT_TYPE_OF_ARGUMENTS_TO_FUNC_CALL, errorContext = errorContext, internal = false @@ -90,24 +95,26 @@ internal fun errInvalidArgumentType( } internal fun errIntOverflow(intSizeInBytes: Int, errorContext: PropertyValueMap? = null): Nothing { - throw EvaluationException(message = "Int overflow or underflow", - errorCode = ErrorCode.EVALUATOR_INTEGER_OVERFLOW, - errorContext = (errorContext ?: PropertyValueMap()).also { - it[Property.INT_SIZE_IN_BYTES] = intSizeInBytes - }, - internal = false) + throw EvaluationException( + message = "Int overflow or underflow", + errorCode = ErrorCode.EVALUATOR_INTEGER_OVERFLOW, + errorContext = (errorContext ?: PropertyValueMap()).also { + it[Property.INT_SIZE_IN_BYTES] = intSizeInBytes + }, + internal = false + ) } fun errorContextFrom(location: SourceLocationMeta?): PropertyValueMap { val errorContext = PropertyValueMap() - if(location != null) { + if (location != null) { fillErrorContext(errorContext, location) } return errorContext } fun fillErrorContext(errorContext: PropertyValueMap, location: SourceLocationMeta?) { - if(location != null) { + if (location != null) { errorContext[Property.LINE_NUMBER] = location.lineNum errorContext[Property.COLUMN_NUMBER] = location.charOffset } @@ -115,19 +122,19 @@ fun fillErrorContext(errorContext: PropertyValueMap, location: SourceLocationMet fun fillErrorContext(errorContext: PropertyValueMap, metaContainer: MetaContainer) { val location = metaContainer[SourceLocationMeta.TAG] as? SourceLocationMeta - if(location != null) { + if (location != null) { fillErrorContext(errorContext, location) } } fun errorContextFrom(metaContainer: MetaContainer?): PropertyValueMap { - if(metaContainer == null) { + if (metaContainer == null) { return PropertyValueMap() } val location = metaContainer[SourceLocationMeta.TAG] as? SourceLocationMeta - return if(location != null) { + return if (location != null) { errorContextFrom(location) } else { PropertyValueMap() } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/ExprFunction.kt b/lang/src/org/partiql/lang/eval/ExprFunction.kt index 550e1ff1bc..28d3b76c12 100644 --- a/lang/src/org/partiql/lang/eval/ExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/ExprFunction.kt @@ -69,7 +69,6 @@ interface ExprFunction { errNoContext("Invalid implementation for ${signature.name}#call", ErrorCode.INTERNAL_ERROR, true) } - /** * Invokes the function with its required parameters and any variadic parameters. * @@ -88,7 +87,6 @@ interface ExprFunction { } } - /** * Invokes the function. * @@ -105,4 +103,4 @@ fun ExprFunction.call(env: Environment, args: Arguments): ExprValue = is RequiredArgs -> callWithRequired(env, args.required) is RequiredWithOptional -> callWithOptional(env, args.required, args.opt) is RequiredWithVariadic -> callWithVariadic(env, args.required, args.variadic) - } \ No newline at end of file + } diff --git a/lang/src/org/partiql/lang/eval/ExprNodeExtensions.kt b/lang/src/org/partiql/lang/eval/ExprNodeExtensions.kt index 38ac0c97ac..d6ffb4d5be 100644 --- a/lang/src/org/partiql/lang/eval/ExprNodeExtensions.kt +++ b/lang/src/org/partiql/lang/eval/ExprNodeExtensions.kt @@ -63,10 +63,9 @@ private fun Path.extractColumnAlias(idx: Int): String { val maybeLiteral = nameOrigin.expr when { maybeLiteral is Literal && maybeLiteral.ionValue is IonString -> maybeLiteral.ionValue.stringValue() - else -> syntheticColumnName(idx) + else -> syntheticColumnName(idx) } } else -> syntheticColumnName(idx) } } - diff --git a/lang/src/org/partiql/lang/eval/ExprValueExtensions.kt b/lang/src/org/partiql/lang/eval/ExprValueExtensions.kt index bea22e497d..0dc75aa129 100644 --- a/lang/src/org/partiql/lang/eval/ExprValueExtensions.kt +++ b/lang/src/org/partiql/lang/eval/ExprValueExtensions.kt @@ -109,7 +109,6 @@ val ExprValue.name: ExprValue? val ExprValue.address: ExprValue? get() = asFacet(Addressed::class.java)?.address - fun ExprValue.booleanValue(): Boolean = scalar.booleanValue() ?: errNoContext("Expected boolean: $ionValue", errorCode = ErrorCode.EVALUATOR_UNEXPECTED_VALUE_TYPE, internal = false) @@ -134,12 +133,13 @@ fun ExprValue.bytesValue(): ByteArray = internal fun ExprValue.dateTimePartValue(): DateTimePart = try { DateTimePart.valueOf(this.stringValue().toUpperCase()) - } - catch (e : IllegalArgumentException) { - throw EvaluationException(cause = e, - message = "invalid datetime part, valid values: [${DATE_TIME_PART_KEYWORDS.joinToString()}]", - errorCode = ErrorCode.EVALUATOR_INVALID_ARGUMENTS_FOR_DATE_PART, - internal = false) + } catch (e: IllegalArgumentException) { + throw EvaluationException( + cause = e, + message = "invalid datetime part, valid values: [${DATE_TIME_PART_KEYWORDS.joinToString()}]", + errorCode = ErrorCode.EVALUATOR_INVALID_ARGUMENTS_FOR_DATE_PART, + internal = false + ) } internal fun ExprValue.intValue(): Int = this.numberValue().toInt() @@ -177,10 +177,10 @@ fun ExprValue.exprEquals(other: ExprValue): Boolean = DEFAULT_COMPARATOR.compare */ operator fun ExprValue.compareTo(other: ExprValue): Int { return when { - type.isUnknown || other.type.isUnknown -> + type.isUnknown || other.type.isUnknown -> throw EvaluationException("Null value cannot be compared: $this, $other", errorCode = ErrorCode.EVALUATOR_INVALID_COMPARISION, internal = false) - isDirectlyComparableTo(other) -> DEFAULT_COMPARATOR.compare(this, other) - else -> errNoContext("Cannot compare values: $this, $other", errorCode = ErrorCode.EVALUATOR_INVALID_COMPARISION, internal = false) + isDirectlyComparableTo(other) -> DEFAULT_COMPARATOR.compare(this, other) + else -> errNoContext("Cannot compare values: $this, $other", errorCode = ErrorCode.EVALUATOR_INVALID_COMPARISION, internal = false) } } @@ -284,16 +284,17 @@ fun ExprValue.cast( val errorCode = if (locationMeta == null) { ErrorCode.EVALUATOR_CAST_FAILED_NO_LOCATION - } - else { + } else { ErrorCode.EVALUATOR_CAST_FAILED } - throw EvaluationException(message = message, - errorCode = errorCode, - errorContext = errorContext, - internal = internal, - cause = cause) + throw EvaluationException( + message = message, + errorCode = errorCode, + errorContext = errorContext, + internal = internal, + cause = cause + ) } val longMaxDecimal = bigDecimalOf(Long.MAX_VALUE) @@ -335,13 +336,13 @@ fun ExprValue.cast( else -> this } TypedOpBehavior.HONOR_PARAMETERS -> when (this) { - is BigDecimal -> this.setScale(0, RoundingMode.HALF_EVEN) - // [kotlin.math.round] rounds towards the closes even number on tie - // https://kotlinlang.org/api/latest/jvm/stdlib/kotlin.math/round.html - is Float -> round(this) - is Double -> round(this) - else -> this - } + is BigDecimal -> this.setScale(0, RoundingMode.HALF_EVEN) + // [kotlin.math.round] rounds towards the closes even number on tie + // https://kotlinlang.org/api/latest/jvm/stdlib/kotlin.math/round.html + is Float -> round(this) + is Double -> round(this) + else -> this + } }.let { // after rounding, check that the value can fit into range of the type being casted into if (it < rangeForType.first || it > rangeForType.last) { @@ -386,7 +387,7 @@ fun ExprValue.cast( is StringType.StringLengthConstraint.Constrained -> { val actualCodepointCount = this.codePointCount(0, this.length) val lengthConstraint = type.lengthConstraint.length.value - val truncatedString = if(actualCodepointCount <= lengthConstraint) { + val truncatedString = if (actualCodepointCount <= lengthConstraint) { this // no truncation needed } else { this.substring(0, this.offsetByCodePoints(0, lengthConstraint)) @@ -414,7 +415,7 @@ fun ExprValue.cast( // We further need to check for the time zone and hence we do not short circuit here when the type is TIME. type == targetType.runtimeType && type != ExprValueType.TIME -> { return when (targetType) { - is IntType, is FloatType, is DecimalType-> numberValue().exprValue(targetType) + is IntType, is FloatType, is DecimalType -> numberValue().exprValue(targetType) is StringType -> stringValue().exprValue(targetType) else -> this } @@ -433,13 +434,13 @@ fun ExprValue.cast( } } is IntType -> when { - type == ExprValueType.BOOL -> return if(booleanValue()) 1L.exprValue(targetType) else 0L.exprValue(targetType) + type == ExprValueType.BOOL -> return if (booleanValue()) 1L.exprValue(targetType) else 0L.exprValue(targetType) type.isNumber -> return numberValue().exprValue(targetType) type.isText -> { val value = try { val normalized = stringValue().normalizeForCastToInt() valueFactory.ion.singleValue(normalized) as IonInt - } catch (e : Exception) { + } catch (e: Exception) { castFailedErr("can't convert string value to INT", internal = false, cause = e) } @@ -456,7 +457,7 @@ fun ExprValue.cast( type.isText -> try { return stringValue().toDouble().exprValue(targetType) - } catch(e: NumberFormatException) { + } catch (e: NumberFormatException) { castFailedErr("can't convert string value to FLOAT", internal = false, cause = e) } } @@ -469,9 +470,7 @@ fun ExprValue.cast( type.isNumber -> return numberValue().exprValue(targetType) type.isText -> try { return bigDecimalOf(stringValue()).exprValue(targetType) - } - catch (e: NumberFormatException) - { + } catch (e: NumberFormatException) { castFailedErr("can't convert string value to DECIMAL", internal = false, cause = e) } } @@ -490,16 +489,20 @@ fun ExprValue.cast( type.isText -> try { // validate that the date string follows the format YYYY-MM-DD if (!datePatternRegex.matches(stringValue())) { - castFailedErr("Can't convert string value to DATE. Expected valid date string " + - "and the date format to be YYYY-MM-DD", internal = false) + castFailedErr( + "Can't convert string value to DATE. Expected valid date string " + + "and the date format to be YYYY-MM-DD", + internal = false + ) } val date = LocalDate.parse(stringValue()) return valueFactory.newDate(date) - } - catch (e: DateTimeParseException) - { - castFailedErr("Can't convert string value to DATE. Expected valid date string " + - "and the date format to be YYYY-MM-DD", internal = false, cause = e) + } catch (e: DateTimeParseException) { + castFailedErr( + "Can't convert string value to DATE. Expected valid date string " + + "and the date format to be YYYY-MM-DD", + internal = false, cause = e + ) } } is TimeType -> { @@ -508,33 +511,36 @@ fun ExprValue.cast( type == ExprValueType.TIME -> { val time = timeValue() val timeZoneOffset = when (targetType.withTimeZone) { - true -> time.zoneOffset?: defaultTimezoneOffset + true -> time.zoneOffset ?: defaultTimezoneOffset else -> null } return valueFactory.newTime( Time.of( time.localTime, - precision?: time.precision, + precision ?: time.precision, timeZoneOffset - )) + ) + ) } type == ExprValueType.TIMESTAMP -> { val ts = timestampValue() val timeZoneOffset = when (targetType.withTimeZone) { - true -> ts.localOffset?: castFailedErr( + true -> ts.localOffset ?: castFailedErr( "Can't convert timestamp value with unknown local offset (i.e. -00:00) to TIME WITH TIME ZONE.", internal = false ) else -> null } - return valueFactory.newTime(Time.of( - ts.hour, - ts.minute, - ts.second, - (ts.decimalSecond.remainder(BigDecimal.ONE).multiply(NANOS_PER_SECOND.toBigDecimal())).toInt(), - precision?: ts.decimalSecond.scale(), - timeZoneOffset - )) + return valueFactory.newTime( + Time.of( + ts.hour, + ts.minute, + ts.second, + (ts.decimalSecond.remainder(BigDecimal.ONE).multiply(NANOS_PER_SECOND.toBigDecimal())).toInt(), + precision ?: ts.decimalSecond.scale(), + timeZoneOffset + ) + ) } type.isText -> try { // validate that the time string follows the format HH:MM:SS[.ddddd...][+|-HH:MM] @@ -556,16 +562,18 @@ fun ExprValue.cast( return valueFactory.newTime( Time.of( localTime, - precision?: getPrecisionFromTimeString(stringValue()), + precision ?: getPrecisionFromTimeString(stringValue()), when (targetType.withTimeZone) { true -> zoneOffset else -> null } - )) + ) + ) } catch (e: DateTimeParseException) { castFailedErr( "Can't convert string value to TIME. Expected valid time string " + - "and the time format to be HH:MM:SS[.ddddd...][+|-HH:MM]", internal = false, cause = e + "and the time format to be HH:MM:SS[.ddddd...][+|-HH:MM]", + internal = false, cause = e ) } } @@ -583,9 +591,9 @@ fun ExprValue.cast( is BlobType -> when { type.isLob -> return valueFactory.newBlob(bytesValue()) } - is ListType -> if(type.isSequence) return valueFactory.newList(asSequence()) - is SexpType -> if(type.isSequence) return valueFactory.newSexp(asSequence()) - is BagType -> if(type.isSequence) return valueFactory.newBag(asSequence()) + is ListType -> if (type.isSequence) return valueFactory.newList(asSequence()) + is SexpType -> if (type.isSequence) return valueFactory.newSexp(asSequence()) + is BagType -> if (type.isSequence) return valueFactory.newBag(asSequence()) // no support for anything else else -> {} } @@ -620,22 +628,21 @@ private fun String.normalizeForCastToInt(): String { } fun String.possiblyHexOrBase2() = (length >= 2 && this[1].isHexOrBase2Marker()) || - (length >= 3 && this[0].isSign() && this[2].isHexOrBase2Marker()) + (length >= 3 && this[0].isSign() && this[2].isHexOrBase2Marker()) return when { - length == 0 -> this + length == 0 -> this possiblyHexOrBase2() -> { if (this[0] == '+') { this.drop(1) - } - else { + } else { this } } - else -> { + else -> { val (isNegative, startIndex) = when (this[0]) { - '-' -> Pair(true, 1) - '+' -> Pair(false, 1) + '-' -> Pair(true, 1) + '+' -> Pair(false, 1) else -> Pair(false, 0) } @@ -645,11 +652,11 @@ private fun String.normalizeForCastToInt(): String { } when { - toDrop == length -> "0" // string is all zeros - toDrop == 0 -> this + toDrop == length -> "0" // string is all zeros + toDrop == 0 -> this toDrop == 1 && isNegative -> this - toDrop > 1 && isNegative -> '-' + this.drop(toDrop) - else -> this.drop(toDrop) + toDrop > 1 && isNegative -> '-' + this.drop(toDrop) + else -> this.drop(toDrop) } } } @@ -658,11 +665,11 @@ private fun String.normalizeForCastToInt(): String { /** * An Unknown value is one of `MISSING` or `NULL` */ -internal fun ExprValue.isUnknown() : Boolean = this.type.isUnknown +internal fun ExprValue.isUnknown(): Boolean = this.type.isUnknown /** * The opposite of [isUnknown]. */ -internal fun ExprValue.isNotUnknown() : Boolean = !this.type.isUnknown +internal fun ExprValue.isNotUnknown(): Boolean = !this.type.isUnknown /** * Creates a filter for unique ExprValues consistent with exprEquals. This filter is stateful keeping track of diff --git a/lang/src/org/partiql/lang/eval/ExprValueFactory.kt b/lang/src/org/partiql/lang/eval/ExprValueFactory.kt index a4bb882688..ba3e6594c7 100644 --- a/lang/src/org/partiql/lang/eval/ExprValueFactory.kt +++ b/lang/src/org/partiql/lang/eval/ExprValueFactory.kt @@ -117,7 +117,7 @@ interface ExprValueFactory { fun newTime(value: Time): ExprValue /** Returns an PartiQL `SYMBOL` [ExprValue] instance representing the specified [String]. */ - fun newSymbol(value: String) : ExprValue + fun newSymbol(value: String): ExprValue /** Returns a PartiQL `CLOB` [ExprValue] instance representing the specified [ByteArray]. */ fun newClob(value: ByteArray): ExprValue @@ -194,15 +194,15 @@ private class ExprValueFactoryImpl(override val ion: IonSystem) : ExprValueFacto override val emptyBag = newBag(sequenceOf()) override fun newBoolean(value: Boolean): ExprValue = - if(value) trueValue else falseValue + if (value) trueValue else falseValue override fun newString(value: String): ExprValue = when { value.isEmpty() -> emptyString - else -> StringExprValue(ion, value) + else -> StringExprValue(ion, value) } - override fun newInt(value: Int):ExprValue = IntExprValue(ion, value.toLong()) + override fun newInt(value: Int): ExprValue = IntExprValue(ion, value.toLong()) override fun newInt(value: Long) = IntExprValue(ion, value) @@ -253,7 +253,7 @@ private class ExprValueFactoryImpl(override val ion: IonSystem) : ExprValueFacto StructExprValue(ion, ordering, value) override fun newStruct(value: Iterable, ordering: StructOrdering): ExprValue = - newStruct(value.asSequence(), ordering) + newStruct(value.asSequence(), ordering) override fun newBag(value: Sequence): ExprValue = SequenceExprValue(ion, ExprValueType.BAG, value) @@ -261,24 +261,24 @@ private class ExprValueFactoryImpl(override val ion: IonSystem) : ExprValueFacto override fun newBag(value: Iterable): ExprValue = newBag(value.asSequence()) override fun newList(value: Sequence): ExprValue = - SequenceExprValue(ion, ExprValueType.LIST, value.mapIndexed { i, v -> v.namedValue(newInt(i))}) + SequenceExprValue(ion, ExprValueType.LIST, value.mapIndexed { i, v -> v.namedValue(newInt(i)) }) override fun newList(value: Iterable): ExprValue = newList(value.asSequence()) override fun newSexp(value: Sequence): ExprValue = - SequenceExprValue(ion, ExprValueType.SEXP, value.mapIndexed { i, v -> v.namedValue(newInt(i))}) + SequenceExprValue(ion, ExprValueType.SEXP, value.mapIndexed { i, v -> v.namedValue(newInt(i)) }) override fun newSexp(value: Iterable): ExprValue = newSexp(value.asSequence()) } /** A base class for the `NULL` value, intended to be memoized. */ -private class NullExprValue(value: IonNull): BaseExprValue() { +private class NullExprValue(value: IonNull) : BaseExprValue() { override val ionValue = value override val type: ExprValueType get() = ExprValueType.NULL } /** A base class for the `MISSING` value, intended to be memoized. */ -private class MissingExprValue(value: IonNull): BaseExprValue() { +private class MissingExprValue(value: IonNull) : BaseExprValue() { override val ionValue = value.also { if (!it.hasTypeAnnotation(MISSING_ANNOTATION)) { it.addTypeAnnotation(MISSING_ANNOTATION) @@ -289,7 +289,7 @@ private class MissingExprValue(value: IonNull): BaseExprValue() { /** An ExprValue class just for boolean values. [value] holds a memoized instance of [IonBool]. */ -private abstract class BooleanExprValue(value: IonBool): BaseExprValue(), Scalar { +private abstract class BooleanExprValue(value: IonBool) : BaseExprValue(), Scalar { override val scalar: Scalar get() = this @@ -317,29 +317,29 @@ private class TrueBoolExprValue(val value: IonBool) : BooleanExprValue(value) { } /** A base class for the `false` boolean value, intended to be memoized. */ -private class FalseBoolExprValue(val value: IonBool): BooleanExprValue(value) { +private class FalseBoolExprValue(val value: IonBool) : BooleanExprValue(value) { override fun booleanValue(): Boolean? = false } -private class StringExprValue(val ion: IonSystem, val value: String): ScalarExprValue() { +private class StringExprValue(val ion: IonSystem, val value: String) : ScalarExprValue() { override val type: ExprValueType = ExprValueType.STRING override fun stringValue() = value override fun ionValueFun(): IonValue = ion.newString(value) } -private class IntExprValue(val ion: IonSystem, val value: Long): ScalarExprValue() { +private class IntExprValue(val ion: IonSystem, val value: Long) : ScalarExprValue() { override val type: ExprValueType = ExprValueType.INT override fun numberValue() = value override fun ionValueFun(): IonValue = ion.newInt(value) } -private class FloatExprValue(val ion: IonSystem, val value: Double): ScalarExprValue() { +private class FloatExprValue(val ion: IonSystem, val value: Double) : ScalarExprValue() { override val type: ExprValueType = ExprValueType.FLOAT override fun numberValue() = value override fun ionValueFun(): IonValue = ion.newFloat(value) } -private class DecimalExprValue(val ion: IonSystem, val value: BigDecimal): ScalarExprValue() { +private class DecimalExprValue(val ion: IonSystem, val value: BigDecimal) : ScalarExprValue() { override val type: ExprValueType = ExprValueType.DECIMAL override fun numberValue() = value override fun ionValueFun(): IonValue = ion.newDecimal(value) @@ -349,15 +349,17 @@ private class DecimalExprValue(val ion: IonSystem, val value: BigDecimal): Scala * [ExprValue] to represent DATE in PartiQL. * [LocalDate] represents date without time and time zone. */ -private class DateExprValue(val ion: IonSystem, val value: LocalDate): ScalarExprValue() { +private class DateExprValue(val ion: IonSystem, val value: LocalDate) : ScalarExprValue() { init { // validate that the local date is not an extended date. if (value.year < 0 || value.year > 9999) { - err("Year should be in the range 0 to 9999 inclusive.", + err( + "Year should be in the range 0 to 9999 inclusive.", ErrorCode.EVALUATOR_DATE_FIELD_OUT_OF_RANGE, propertyValueMapOf(), - false) + false + ) } } private val PARTIQL_DATE_ANNOTATION = "\$partiql_date" @@ -372,31 +374,31 @@ private class DateExprValue(val ion: IonSystem, val value: LocalDate): ScalarExp override fun ionValueFun(): IonValue = createIonDate() } -private class TimestampExprValue(val ion: IonSystem, val value: Timestamp): ScalarExprValue() { +private class TimestampExprValue(val ion: IonSystem, val value: Timestamp) : ScalarExprValue() { override val type: ExprValueType = ExprValueType.TIMESTAMP override fun timestampValue(): Timestamp? = value override fun ionValueFun(): IonValue = ion.newTimestamp(value) } -private class TimeExprValue(val ion: IonSystem, val value: Time): ScalarExprValue() { +private class TimeExprValue(val ion: IonSystem, val value: Time) : ScalarExprValue() { override val type = ExprValueType.TIME override fun timeValue(): Time = value override fun ionValueFun() = value.toIonValue(ion) } -private class SymbolExprValue(val ion: IonSystem, val value: String): ScalarExprValue() { +private class SymbolExprValue(val ion: IonSystem, val value: String) : ScalarExprValue() { override val type: ExprValueType = ExprValueType.SYMBOL override fun stringValue() = value override fun ionValueFun(): IonValue = ion.newSymbol(value) } -private class ClobExprValue(val ion: IonSystem, val value: ByteArray): ScalarExprValue() { +private class ClobExprValue(val ion: IonSystem, val value: ByteArray) : ScalarExprValue() { override val type: ExprValueType = ExprValueType.CLOB override fun bytesValue() = value override fun ionValueFun(): IonValue = ion.newClob(value) } -private class BlobExprValue(val ion: IonSystem, val value: ByteArray): ScalarExprValue() { +private class BlobExprValue(val ion: IonSystem, val value: ByteArray) : ScalarExprValue() { override val type: ExprValueType = ExprValueType.BLOB override fun bytesValue() = value override fun ionValueFun(): IonValue = ion.newBlob(value) @@ -408,16 +410,16 @@ private class BlobExprValue(val ion: IonSystem, val value: ByteArray): ScalarExp internal class IonExprValue(private val valueFactory: ExprValueFactory, override val ionValue: IonValue) : BaseExprValue() { init { - if(valueFactory.ion !== ionValue.system) { + if (valueFactory.ion !== ionValue.system) { throw IllegalArgumentException("valueFactory must have the same instance of IonSystem as ionValue") } } private val namedFacet: Named? = when { ionValue.fieldName != null -> valueFactory.newString(ionValue.fieldName).asNamed() - ionValue.type != IonType.DATAGRAM - && ionValue.container != null - && ionValue.ordinal >= 0 -> valueFactory.newInt(ionValue.ordinal).asNamed() + ionValue.type != IonType.DATAGRAM && + ionValue.container != null && + ionValue.ordinal >= 0 -> valueFactory.newInt(ionValue.ordinal).asNamed() else -> null } @@ -431,7 +433,7 @@ internal class IonExprValue(private val valueFactory: ExprValueFactory, override override val scalar: Scalar by lazy { object : Scalar { override fun booleanValue(): Boolean? = ionValue.booleanValueOrNull() - override fun numberValue(): Number? = ionValue.numberValueOrNull() + override fun numberValue(): Number? = ionValue.numberValueOrNull() override fun timestampValue(): Timestamp? = ionValue.timestampValueOrNull() override fun stringValue(): String? = ionValue.stringValueOrNull() override fun bytesValue(): ByteArray? = ionValue.bytesValueOrNull() @@ -441,8 +443,7 @@ internal class IonExprValue(private val valueFactory: ExprValueFactory, override override val bindings by lazy { if (ionValue is IonStruct) { IonStructBindings(valueFactory, ionValue) - } - else { + } else { Bindings.empty() } } @@ -471,7 +472,7 @@ internal class IonExprValue(private val valueFactory: ExprValueFactory, override } @Suppress("UNCHECKED_CAST") - override fun provideFacet(type: Class?) = when(type) { + override fun provideFacet(type: Class?) = when (type) { Named::class.java -> namedFacet else -> null } as T? @@ -489,10 +490,11 @@ internal class IonExprValue(private val valueFactory: ExprValueFactory, override * @param type The reported [ExprValueType] for this value. * @param sequence The [Sequence] generating function. */ -internal class SequenceExprValue( //dl todo: make private again +internal class SequenceExprValue( // dl todo: make private again private val ion: IonSystem, override val type: ExprValueType, - private val sequence: Sequence) : BaseExprValue() { + private val sequence: Sequence +) : BaseExprValue() { init { if (!type.isSequence) { @@ -505,17 +507,17 @@ internal class SequenceExprValue( //dl todo: make private again .mapTo( when (type) { // dont add annotation if already present. - ExprValueType.BAG -> ion.newEmptyList().also { + ExprValueType.BAG -> ion.newEmptyList().also { if (!it.hasTypeAnnotation(BAG_ANNOTATION)) { it.addTypeAnnotation(BAG_ANNOTATION) } } ExprValueType.LIST -> ion.newEmptyList() - ExprValueType.SEXP -> ion.newEmptySexp() - else -> throw IllegalStateException("Invalid type: $type") + ExprValueType.SEXP -> ion.newEmptySexp() + else -> throw IllegalStateException("Invalid type: $type") } ) { - if(it is StructExprValue) + if (it is StructExprValue) it.createMutableValue() else it.ionValue.clone() @@ -527,7 +529,7 @@ internal class SequenceExprValue( //dl todo: make private again when (type) { // no ordinal access over BAG ExprValueType.BAG -> OrdinalBindings.EMPTY - else -> { + else -> { // materialize the sequence as a backing list OrdinalBindings.ofList(toList()) } diff --git a/lang/src/org/partiql/lang/eval/ExprValueType.kt b/lang/src/org/partiql/lang/eval/ExprValueType.kt index e1d46f225e..ec273ab59e 100644 --- a/lang/src/org/partiql/lang/eval/ExprValueType.kt +++ b/lang/src/org/partiql/lang/eval/ExprValueType.kt @@ -27,13 +27,15 @@ import org.partiql.lang.syntax.TYPE_ALIASES * @param typeNames The normalized type names and aliases associated with the runtime type. * @param isRangedFrom Whether or not the `FROM` clause uses the value's iterator directly. */ -enum class ExprValueType(val typeNames: List, - val isUnknown: Boolean = false, - val isNumber: Boolean = false, - val isText: Boolean = false, - val isLob: Boolean = false, - val isSequence: Boolean = false, - val isRangedFrom: Boolean = false) { +enum class ExprValueType( + val typeNames: List, + val isUnknown: Boolean = false, + val isNumber: Boolean = false, + val isText: Boolean = false, + val isLob: Boolean = false, + val isSequence: Boolean = false, + val isRangedFrom: Boolean = false +) { MISSING( typeNames = listOf("missing"), isUnknown = true @@ -46,8 +48,10 @@ enum class ExprValueType(val typeNames: List, typeNames = listOf("bool", "boolean") ), INT( - typeNames = listOf("int", "smallint", "integer2", "int2", "integer", "integer4", "int4", "integer8", "int8", - "bigint"), + typeNames = listOf( + "int", "smallint", "integer2", "int2", "integer", "integer4", "int4", "integer8", "int8", + "bigint" + ), isNumber = true ), FLOAT( @@ -101,7 +105,6 @@ enum class ExprValueType(val typeNames: List, isRangedFrom = true ); - @Deprecated("Please use isUnknown instead", ReplaceWith("isUnknown")) fun isNull() = isUnknown @@ -110,10 +113,10 @@ enum class ExprValueType(val typeNames: List, /** Whether or not the given type is in the same type grouping as another. */ fun isDirectlyComparableTo(other: ExprValueType): Boolean = - (this == other) - || (isNumber && other.isNumber) - || (isText && other.isText) - || (isLob && other.isLob) + (this == other) || + (isNumber && other.isNumber) || + (isText && other.isText) || + (isLob && other.isLob) companion object { init { @@ -169,12 +172,12 @@ enum class ExprValueType(val typeNames: List, }.toTypedArray() ) - fun fromTypeName(name: String): ExprValueType = LEX_TYPE_MAP[name] ?: throw EvaluationException( "No such value type for $name", ErrorCode.LEXER_INVALID_NAME, - internal = true) + internal = true + ) fun fromSqlDataType(sqlDataType: PartiqlAst.Type) = fromSqlDataTypeOrNull(sqlDataType) ?: throw EvaluationException( @@ -233,4 +236,4 @@ enum class ExprValueType(val typeNames: List, is PartiqlAst.Type.SparkShort -> error("$this node should not be present in PartiQLAST. Consider transforming the AST using CustomTypeVisitorTransform.") } } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/Group.kt b/lang/src/org/partiql/lang/eval/Group.kt index f095031aba..9714c98ddd 100644 --- a/lang/src/org/partiql/lang/eval/Group.kt +++ b/lang/src/org/partiql/lang/eval/Group.kt @@ -19,4 +19,4 @@ package org.partiql.lang.eval */ class Group(val key: ExprValue, val registers: RegisterBank) { val groupValues: MutableList = ArrayList() -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/GroupKeyExprValue.kt b/lang/src/org/partiql/lang/eval/GroupKeyExprValue.kt index 95455ce45a..0185a14a41 100644 --- a/lang/src/org/partiql/lang/eval/GroupKeyExprValue.kt +++ b/lang/src/org/partiql/lang/eval/GroupKeyExprValue.kt @@ -17,7 +17,6 @@ package org.partiql.lang.eval import com.amazon.ion.IonSystem import org.partiql.lang.eval.visitors.GroupByItemAliasVisitorTransform - /** * This is a special [ExprValue] just for group keys. * @@ -25,16 +24,16 @@ import org.partiql.lang.eval.visitors.GroupByItemAliasVisitorTransform * group by expressions. See [GroupByItemAliasVisitorTransform] and other uses of * [org.partiql.lang.ast.UniqueNameMeta]. */ -internal class GroupKeyExprValue(ion: IonSystem, sequence: Sequence, private val uniqueNames: Map) - : StructExprValue(ion, StructOrdering.UNORDERED, sequence) { +internal class GroupKeyExprValue(ion: IonSystem, sequence: Sequence, private val uniqueNames: Map) : + StructExprValue(ion, StructOrdering.UNORDERED, sequence) { private val keyBindings by lazy { when { uniqueNames.any() -> Bindings.ofMap(uniqueNames).delegate(super.bindings) - else -> super.bindings + else -> super.bindings } } override val bindings: Bindings get() = keyBindings -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/IonStructBindings.kt b/lang/src/org/partiql/lang/eval/IonStructBindings.kt index fc59271d2e..d9612e9458 100644 --- a/lang/src/org/partiql/lang/eval/IonStructBindings.kt +++ b/lang/src/org/partiql/lang/eval/IonStructBindings.kt @@ -56,16 +56,15 @@ internal class IonStructBindings(private val valueFactory: ExprValueFactory, pri private fun handleMatches(entries: List, fieldName: String): IonValue? = when (entries.size) { - 0 -> null - 1 -> entries[0] + 0 -> null + 1 -> entries[0] else -> errAmbiguousBinding(fieldName, entries.map { it.fieldName }) } - override operator fun get(bindingName: BindingName): ExprValue? = when (bindingName.bindingCase) { - BindingCase.SENSITIVE -> caseSensitiveLookup(bindingName.name) + BindingCase.SENSITIVE -> caseSensitiveLookup(bindingName.name) BindingCase.INSENSITIVE -> caseInsensitiveLookup(bindingName.name) }?.let { valueFactory.newFromIonValue(it) } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/OrdinalBindings.kt b/lang/src/org/partiql/lang/eval/OrdinalBindings.kt index fe2ae976e2..e926ae9d6d 100644 --- a/lang/src/org/partiql/lang/eval/OrdinalBindings.kt +++ b/lang/src/org/partiql/lang/eval/OrdinalBindings.kt @@ -44,7 +44,6 @@ interface OrdinalBindings { } } } - } /** diff --git a/lang/src/org/partiql/lang/eval/PartiqlAstExtensions.kt b/lang/src/org/partiql/lang/eval/PartiqlAstExtensions.kt index e4e092e673..ef9d73e919 100644 --- a/lang/src/org/partiql/lang/eval/PartiqlAstExtensions.kt +++ b/lang/src/org/partiql/lang/eval/PartiqlAstExtensions.kt @@ -6,7 +6,6 @@ import org.partiql.lang.ast.SourceLocationMeta import org.partiql.lang.ast.sourceLocation import org.partiql.lang.domains.PartiqlAst - /** * Determines an appropriate column name for the given [PartiqlAst.Expr]. * @@ -57,8 +56,10 @@ internal fun PartiqlAst.Expr.getStartingSourceLocationMeta(): SourceLocationMeta override fun visitMetas(node: MetaContainer, accumulator: SourceLocationMeta): SourceLocationMeta { val nodeSourceLocation = node.sourceLocation return nodeSourceLocation?.takeIf { - (nodeSourceLocation.lineNum < accumulator.lineNum || - (nodeSourceLocation.lineNum == accumulator.lineNum && nodeSourceLocation.charOffset < accumulator.charOffset)) + ( + nodeSourceLocation.lineNum < accumulator.lineNum || + (nodeSourceLocation.lineNum == accumulator.lineNum && nodeSourceLocation.charOffset < accumulator.charOffset) + ) } ?: accumulator } } diff --git a/lang/src/org/partiql/lang/eval/StructExprValue.kt b/lang/src/org/partiql/lang/eval/StructExprValue.kt index 1d95de8463..83ca6e7261 100644 --- a/lang/src/org/partiql/lang/eval/StructExprValue.kt +++ b/lang/src/org/partiql/lang/eval/StructExprValue.kt @@ -60,9 +60,11 @@ internal open class StructExprValue( } /** The backing data structured for operations that require materialization. */ - private data class Materialized(val bindings: Bindings, - val ordinalBindings: OrdinalBindings, - val orderedBindNames: OrderedBindNames?) + private data class Materialized( + val bindings: Bindings, + val ordinalBindings: OrdinalBindings, + val orderedBindNames: OrderedBindNames? + ) private val materialized by lazy { val bindMap = HashMap() @@ -79,7 +81,7 @@ internal open class StructExprValue( val bindings = Bindings.ofMap(bindMap) val ordinalBindings = OrdinalBindings.ofList(bindList) - val orderedBindNames = when(ordering) { + val orderedBindNames = when (ordering) { StructOrdering.ORDERED -> object : OrderedBindNames { override val orderedNames = bindNames } @@ -97,7 +99,7 @@ internal open class StructExprValue( @Suppress("UNCHECKED_CAST") override fun provideFacet(type: Class?): T? = when (type) { - OrderedBindNames::class.java -> when(ordering){ + OrderedBindNames::class.java -> when (ordering) { StructOrdering.ORDERED -> materialized.orderedBindNames else -> null } as T? @@ -106,4 +108,3 @@ internal open class StructExprValue( override fun iterator() = sequence.iterator() } - diff --git a/lang/src/org/partiql/lang/eval/Thunk.kt b/lang/src/org/partiql/lang/eval/Thunk.kt index 1b1855564c..339e64f854 100644 --- a/lang/src/org/partiql/lang/eval/Thunk.kt +++ b/lang/src/org/partiql/lang/eval/Thunk.kt @@ -22,7 +22,6 @@ import org.partiql.lang.errors.ErrorBehaviorInPermissiveMode import org.partiql.lang.errors.ErrorCode import org.partiql.lang.errors.Property - /** * A thunk with no parameters other than the current environment. * @@ -88,9 +87,9 @@ data class ThunkOptions private constructor( */ class Builder { private var options = ThunkOptions() - fun handleExceptionForLegacyMode(value: ThunkExceptionHandlerForLegacyMode) = set { copy(handleExceptionForLegacyMode = value)} + fun handleExceptionForLegacyMode(value: ThunkExceptionHandlerForLegacyMode) = set { copy(handleExceptionForLegacyMode = value) } fun handleExceptionForPermissiveMode(value: ThunkExceptionHandlerForPermissiveMode) = set { copy(handleExceptionForPermissiveMode = value) } - private inline fun set(block: ThunkOptions.() -> ThunkOptions) : Builder { + private inline fun set(block: ThunkOptions.() -> ThunkOptions): Builder { options = block(options) return this } @@ -103,10 +102,11 @@ val DEFAULT_EXCEPTION_HANDLER_FOR_LEGACY_MODE: ThunkExceptionHandlerForLegacyMod val message = e.message ?: "" throw EvaluationException( "Internal error, $message", - errorCode = (e as? EvaluationException)?.errorCode?: ErrorCode.EVALUATOR_GENERIC_EXCEPTION, + errorCode = (e as? EvaluationException)?.errorCode ?: ErrorCode.EVALUATOR_GENERIC_EXCEPTION, errorContext = errorContextFrom(sourceLocation), cause = e, - internal = true) + internal = true + ) } val DEFAULT_EXCEPTION_HANDLER_FOR_PERMISSIVE_MODE: ThunkExceptionHandlerForPermissiveMode = { _, _ -> } @@ -119,7 +119,7 @@ val DEFAULT_EXCEPTION_HANDLER_FOR_PERMISSIVE_MODE: ThunkExceptionHandlerForPermi internal fun TypingMode.createThunkFactory( compileOptions: CompileOptions, valueFactory: ExprValueFactory -) : ThunkFactory = when(this) { +): ThunkFactory = when (this) { TypingMode.LEGACY -> LegacyThunkFactory(compileOptions, valueFactory) TypingMode.PERMISSIVE -> PermissiveThunkFactory(compileOptions, valueFactory) } @@ -280,8 +280,8 @@ internal abstract class ThunkFactory( ): ThunkEnv = this.thunkEnv(metas) { env -> propagateUnknowns({ t1(env) }, { t2(env) }, null) { v1, v2, _ -> - compute(env, v1, v2!!) - } + compute(env, v1, v2!!) + } }.typeCheck(metas) /** See the [thunkEnvOperands] with three [ThunkEnv] operands. */ @@ -292,8 +292,8 @@ internal abstract class ThunkFactory( ): ThunkEnv = this.thunkEnv(metas) { env -> propagateUnknowns({ t1(env) }, null, null) { v1, _, _ -> - compute(env, v1) - } + compute(env, v1) + } }.typeCheck(metas) /** See the [thunkEnvOperands] with a variadic list of [ThunkEnv] operands. */ @@ -486,7 +486,7 @@ internal class LegacyThunkFactory( return thunkEnv(metas) thunkBlock@{ env -> val firstValue = firstThunk(env) when { - //If the first value is unknown, short circuit returning null. + // If the first value is unknown, short circuit returning null. firstValue.isUnknown() -> valueFactory.nullValue else -> { otherThunks.fold(firstValue) { lastValue, currentThunk -> @@ -506,7 +506,6 @@ internal class LegacyThunkFactory( valueFactory.newBoolean(true) } - } } } @@ -521,33 +520,33 @@ internal class LegacyThunkFactory( * with the original exception as the cause. */ override fun handleException( - sourceLocation: SourceLocationMeta?, - block: () -> ExprValue + sourceLocation: SourceLocationMeta?, + block: () -> ExprValue ): ExprValue = - try { - block() - } catch (e: EvaluationException) { - when { - e.errorContext == null -> - throw EvaluationException( - message = e.message, - errorCode = e.errorCode, - errorContext = errorContextFrom(sourceLocation), - cause = e, - internal = e.internal) - else -> { - // Only add source location data to the error context if it doesn't already exist - // in [errorContext]. - if (!e.errorContext.hasProperty(Property.LINE_NUMBER)) { - sourceLocation?.let { fillErrorContext(e.errorContext, sourceLocation) } - } - throw e + try { + block() + } catch (e: EvaluationException) { + when { + e.errorContext == null -> + throw EvaluationException( + message = e.message, + errorCode = e.errorCode, + errorContext = errorContextFrom(sourceLocation), + cause = e, + internal = e.internal + ) + else -> { + // Only add source location data to the error context if it doesn't already exist + // in [errorContext]. + if (!e.errorContext.hasProperty(Property.LINE_NUMBER)) { + sourceLocation?.let { fillErrorContext(e.errorContext, sourceLocation) } } + throw e } - } catch (e: Exception) { - compileOptions.thunkOptions.handleExceptionForLegacyMode(e, sourceLocation) } - + } catch (e: Exception) { + compileOptions.thunkOptions.handleExceptionForLegacyMode(e, sourceLocation) + } } /** @@ -614,13 +613,12 @@ internal class PermissiveThunkFactory( compute: (ExprValue, ExprValue?, ExprValue?) -> ExprValue ): ExprValue = when { - v1.type == ExprValueType.NULL - || (v2?.let { it.type == ExprValueType.NULL }) ?: false - || (v3?.let { it.type == ExprValueType.NULL }) ?: false -> valueFactory.nullValue + v1.type == ExprValueType.NULL || + (v2?.let { it.type == ExprValueType.NULL }) ?: false || + (v3?.let { it.type == ExprValueType.NULL }) ?: false -> valueFactory.nullValue else -> compute(v1, v2, v3) } - /** See [ThunkFactory.thunkFold]. */ override fun thunkFold( metas: MetaContainer, diff --git a/lang/src/org/partiql/lang/eval/binding/LocalsBinder.kt b/lang/src/org/partiql/lang/eval/binding/LocalsBinder.kt index 4e05e297f5..8f48dbe68a 100644 --- a/lang/src/org/partiql/lang/eval/binding/LocalsBinder.kt +++ b/lang/src/org/partiql/lang/eval/binding/LocalsBinder.kt @@ -28,7 +28,7 @@ import org.partiql.lang.util.errAmbiguousBinding * Think of this as a factory which precomputes the name-bindings map for a list of locals. */ abstract class LocalsBinder { - fun bindLocals(locals: List) : Bindings { + fun bindLocals(locals: List): Bindings { return object : Bindings { override fun get(bindingName: BindingName): ExprValue? = binderForName(bindingName)(locals) } @@ -65,18 +65,19 @@ fun List.localsBinder(missingValue: ExprValue): LocalsBinder { // the alias binds to the name of the value when { alias.atName == null -> null - else -> Binder(alias.atName) { it[index].name ?: missingValue } + else -> Binder(alias.atName) { it[index].name ?: missingValue } }, when { alias.byName == null -> null - else -> Binder(alias.byName) { it[index].address ?: missingValue } - }) - }.asSequence() + else -> Binder(alias.byName) { it[index].address ?: missingValue } + } + ) + }.asSequence() .flatten() .filterNotNull() // There may be multiple accessors per name. // Squash the accessor list to either the sole element or an error function - .groupBy { keyMangler(it.name) } + .groupBy { keyMangler(it.name) } .mapValues { (name, binders) -> when (binders.size) { 1 -> binders[0].func @@ -94,22 +95,25 @@ fun List.localsBinder(missingValue: ExprValue): LocalsBinder { val dynamicLocalsBinder: (BindingName) -> (List) -> ExprValue? = when (this.count()) { 0 -> { _ -> { _ -> null } } 1 -> { name -> { locals -> locals.first().bindings[name] } } - else -> { name -> { locals -> locals.asSequence() - .map { it.bindings[name] } - .filterNotNull() - .firstOrNull() - }} + else -> { name -> + { locals -> + locals.asSequence() + .map { it.bindings[name] } + .filterNotNull() + .firstOrNull() + } + } } // Compile case-[in]sensitive bindings and return the accessor - return object: LocalsBinder() { + return object : LocalsBinder() { val caseSensitiveBindings = compileBindings() val caseInsensitiveBindings = compileBindings { it.toLowerCase() } override fun binderForName(bindingName: BindingName): (List) -> ExprValue? { return when (bindingName.bindingCase) { - BindingCase.INSENSITIVE -> caseInsensitiveBindings[bindingName.name.toLowerCase()] - BindingCase.SENSITIVE -> caseSensitiveBindings[bindingName.name] - } ?: dynamicLocalsBinder(bindingName) + BindingCase.INSENSITIVE -> caseInsensitiveBindings[bindingName.name.toLowerCase()] + BindingCase.SENSITIVE -> caseSensitiveBindings[bindingName.name] + } ?: dynamicLocalsBinder(bindingName) } } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/builtins/BuiltinFunctions.kt b/lang/src/org/partiql/lang/eval/builtins/BuiltinFunctions.kt index 3d37daf2d5..057b4e8d61 100644 --- a/lang/src/org/partiql/lang/eval/builtins/BuiltinFunctions.kt +++ b/lang/src/org/partiql/lang/eval/builtins/BuiltinFunctions.kt @@ -51,8 +51,8 @@ internal fun createBuiltinFunctions(valueFactory: ExprValueFactory) = ToTimestampExprFunction(valueFactory), SizeExprFunction(valueFactory), FromUnixTimeFunction(valueFactory), - UnixTimestampFunction(valueFactory)) - + UnixTimestampFunction(valueFactory) + ) internal fun createExists(valueFactory: ExprValueFactory): ExprFunction = object : ExprFunction { override val signature = FunctionSignature( diff --git a/lang/src/org/partiql/lang/eval/builtins/Constants.kt b/lang/src/org/partiql/lang/eval/builtins/Constants.kt index 0b85043ce0..2b1298f591 100644 --- a/lang/src/org/partiql/lang/eval/builtins/Constants.kt +++ b/lang/src/org/partiql/lang/eval/builtins/Constants.kt @@ -16,24 +16,24 @@ package org.partiql.lang.eval.builtins internal val TIMESTAMP_FORMAT_SYMBOLS: Set = setOf( - 'y', //Year of era, e.g. "1978"; "78" - 'M', //Month of year (1-12) - 'L', //Month of year e.g. "Jan"; "January" - 'd', //day of month (1-31) - 'a', //am-pm of day - 'h', //Clock hour of am-pm (1-12) - 'H', //hour of day (0-23) - 'm', //Minute of hour (0-59) - 's', //Second of minute (0-59) + 'y', // Year of era, e.g. "1978"; "78" + 'M', // Month of year (1-12) + 'L', // Month of year e.g. "Jan"; "January" + 'd', // day of month (1-31) + 'a', // am-pm of day + 'h', // Clock hour of am-pm (1-12) + 'H', // hour of day (0-23) + 'm', // Minute of hour (0-59) + 's', // Second of minute (0-59) - //Note: S and n both use ChronoField.NANO_OF_SECOND so we cannot remove support for one without - //removing support for the other AFAIK. - 'S', //fraction of second, in milliseconds (0-999) - 'n', //Nano of second (0-999,999,999) + // Note: S and n both use ChronoField.NANO_OF_SECOND so we cannot remove support for one without + // removing support for the other AFAIK. + 'S', // fraction of second, in milliseconds (0-999) + 'n', // Nano of second (0-999,999,999) - //Note: Same with X, x O and Z for ChronoField.OFFSET_SECONDS - 'X', //Zone offset or Z for zero: e.g. "-08", "-0830", "-08:30", "-083000", "-08:30:00" Note: the seconds portion will always be "00" because Ion-Timestamp offset is specified in minutes - 'x', //Zone offset "+0000", "-08", "-0830", "-08:30", "-083000", "-08:30:00" Note: the seconds portion will always be "00" because Ion-Timestamp offset is specified in minutes - 'O', //Localized zone offset, e.g. "GMT+8", "GMT+08:00", "UTC-08:00"; - 'Z' //4 digit zone offset, e.g "+0000", "-0800", "-08:00" + // Note: Same with X, x O and Z for ChronoField.OFFSET_SECONDS + 'X', // Zone offset or Z for zero: e.g. "-08", "-0830", "-08:30", "-083000", "-08:30:00" Note: the seconds portion will always be "00" because Ion-Timestamp offset is specified in minutes + 'x', // Zone offset "+0000", "-08", "-0830", "-08:30", "-083000", "-08:30:00" Note: the seconds portion will always be "00" because Ion-Timestamp offset is specified in minutes + 'O', // Localized zone offset, e.g. "GMT+8", "GMT+08:00", "UTC-08:00"; + 'Z' // 4 digit zone offset, e.g "+0000", "-0800", "-08:00" ) diff --git a/lang/src/org/partiql/lang/eval/builtins/DateAddExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/DateAddExprFunction.kt index 301e39cb7f..d896105606 100644 --- a/lang/src/org/partiql/lang/eval/builtins/DateAddExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/DateAddExprFunction.kt @@ -38,17 +38,21 @@ internal class DateAddExprFunction(val valueFactory: ExprValueFactory) : ExprFun ) companion object { - @JvmStatic private val precisionOrder = listOf(Timestamp.Precision.YEAR, - Timestamp.Precision.MONTH, - Timestamp.Precision.DAY, - Timestamp.Precision.MINUTE, - Timestamp.Precision.SECOND) - @JvmStatic private val dateTimePartToPrecision = mapOf(DateTimePart.YEAR to Precision.YEAR, - DateTimePart.MONTH to Precision.MONTH, - DateTimePart.DAY to Precision.DAY, - DateTimePart.HOUR to Precision.MINUTE, - DateTimePart.MINUTE to Precision.MINUTE, - DateTimePart.SECOND to Precision.SECOND) + @JvmStatic private val precisionOrder = listOf( + Timestamp.Precision.YEAR, + Timestamp.Precision.MONTH, + Timestamp.Precision.DAY, + Timestamp.Precision.MINUTE, + Timestamp.Precision.SECOND + ) + @JvmStatic private val dateTimePartToPrecision = mapOf( + DateTimePart.YEAR to Precision.YEAR, + DateTimePart.MONTH to Precision.MONTH, + DateTimePart.DAY to Precision.DAY, + DateTimePart.HOUR to Precision.MINUTE, + DateTimePart.MINUTE to Precision.MINUTE, + DateTimePart.SECOND to Precision.SECOND + ) } private fun Timestamp.hasSufficientPrecisionFor(requiredPrecision: Timestamp.Precision): Boolean { @@ -66,25 +70,31 @@ internal class DateAddExprFunction(val valueFactory: ExprValueFactory) : ExprFun } return when (requiredPrecision) { - Timestamp.Precision.YEAR -> Timestamp.forYear(this.year) - Timestamp.Precision.MONTH -> Timestamp.forMonth(this.year, this.month) - Timestamp.Precision.DAY -> Timestamp.forDay(this.year, this.month, this.day) - Timestamp.Precision.SECOND -> Timestamp.forSecond(this.year, - this.month, - this.day, - this.hour, - this.minute, - this.second, - this.localOffset) - Timestamp.Precision.MINUTE -> Timestamp.forMinute(this.year, - this.month, - this.day, - this.hour, - this.minute, - this.localOffset) - else -> errNoContext("invalid datetime part for date_add: ${dateTimePart.toString().toLowerCase()}", - errorCode = ErrorCode.EVALUATOR_INVALID_ARGUMENTS_FOR_DATE_PART, - internal = false) + Timestamp.Precision.YEAR -> Timestamp.forYear(this.year) + Timestamp.Precision.MONTH -> Timestamp.forMonth(this.year, this.month) + Timestamp.Precision.DAY -> Timestamp.forDay(this.year, this.month, this.day) + Timestamp.Precision.SECOND -> Timestamp.forSecond( + this.year, + this.month, + this.day, + this.hour, + this.minute, + this.second, + this.localOffset + ) + Timestamp.Precision.MINUTE -> Timestamp.forMinute( + this.year, + this.month, + this.day, + this.hour, + this.minute, + this.localOffset + ) + else -> errNoContext( + "invalid datetime part for date_add: ${dateTimePart.toString().toLowerCase()}", + errorCode = ErrorCode.EVALUATOR_INVALID_ARGUMENTS_FOR_DATE_PART, + internal = false + ) } } @@ -95,15 +105,17 @@ internal class DateAddExprFunction(val valueFactory: ExprValueFactory) : ExprFun try { val addedTimestamp = when (dateTimePart) { - DateTimePart.YEAR -> timestamp.adjustPrecisionTo(dateTimePart).addYear(interval) - DateTimePart.MONTH -> timestamp.adjustPrecisionTo(dateTimePart).addMonth(interval) - DateTimePart.DAY -> timestamp.adjustPrecisionTo(dateTimePart).addDay(interval) - DateTimePart.HOUR -> timestamp.adjustPrecisionTo(dateTimePart).addHour(interval) + DateTimePart.YEAR -> timestamp.adjustPrecisionTo(dateTimePart).addYear(interval) + DateTimePart.MONTH -> timestamp.adjustPrecisionTo(dateTimePart).addMonth(interval) + DateTimePart.DAY -> timestamp.adjustPrecisionTo(dateTimePart).addDay(interval) + DateTimePart.HOUR -> timestamp.adjustPrecisionTo(dateTimePart).addHour(interval) DateTimePart.MINUTE -> timestamp.adjustPrecisionTo(dateTimePart).addMinute(interval) DateTimePart.SECOND -> timestamp.adjustPrecisionTo(dateTimePart).addSecond(interval) - else -> errNoContext("invalid datetime part for date_add: ${dateTimePart.toString().toLowerCase()}", - errorCode = ErrorCode.EVALUATOR_INVALID_ARGUMENTS_FOR_DATE_PART, - internal = false) + else -> errNoContext( + "invalid datetime part for date_add: ${dateTimePart.toString().toLowerCase()}", + errorCode = ErrorCode.EVALUATOR_INVALID_ARGUMENTS_FOR_DATE_PART, + internal = false + ) } return valueFactory.newTimestamp(addedTimestamp) @@ -113,5 +125,3 @@ internal class DateAddExprFunction(val valueFactory: ExprValueFactory) : ExprFun } } } - - diff --git a/lang/src/org/partiql/lang/eval/builtins/DateDiffExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/DateDiffExprFunction.kt index b1620029fd..1f46c74c7e 100644 --- a/lang/src/org/partiql/lang/eval/builtins/DateDiffExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/DateDiffExprFunction.kt @@ -58,14 +58,16 @@ internal class DateDiffExprFunction(val valueFactory: ExprValueFactory) : ExprFu // // If we introduce the `milliseconds` datetime part this will need to be // revisited - private fun Timestamp.toJava() = OffsetDateTime.of(year, - month, - day, - hour, - minute, - second, - 0, - ZoneOffset.ofTotalSeconds((localOffset ?: 0) * 60)) + private fun Timestamp.toJava() = OffsetDateTime.of( + year, + month, + day, + hour, + minute, + second, + 0, + ZoneOffset.ofTotalSeconds((localOffset ?: 0) * 60) + ) private fun yearsSince(left: OffsetDateTime, right: OffsetDateTime): Number = Period.between(left.toLocalDate(), right.toLocalDate()).years @@ -94,17 +96,19 @@ internal class DateDiffExprFunction(val valueFactory: ExprValueFactory) : ExprFu val rightAsJava = right.toJava() val difference = when (dateTimePart) { - DateTimePart.YEAR -> yearsSince(leftAsJava, rightAsJava) - DateTimePart.MONTH -> monthsSince(leftAsJava, rightAsJava) - DateTimePart.DAY -> daysSince(leftAsJava, rightAsJava) - DateTimePart.HOUR -> hoursSince(leftAsJava, rightAsJava) + DateTimePart.YEAR -> yearsSince(leftAsJava, rightAsJava) + DateTimePart.MONTH -> monthsSince(leftAsJava, rightAsJava) + DateTimePart.DAY -> daysSince(leftAsJava, rightAsJava) + DateTimePart.HOUR -> hoursSince(leftAsJava, rightAsJava) DateTimePart.MINUTE -> minutesSince(leftAsJava, rightAsJava) DateTimePart.SECOND -> secondsSince(leftAsJava, rightAsJava) - else -> errNoContext("invalid datetime part for date_diff: ${dateTimePart.toString().toLowerCase()}", - errorCode = ErrorCode.EVALUATOR_INVALID_ARGUMENTS_FOR_DATE_PART, - internal = false) + else -> errNoContext( + "invalid datetime part for date_diff: ${dateTimePart.toString().toLowerCase()}", + errorCode = ErrorCode.EVALUATOR_INVALID_ARGUMENTS_FOR_DATE_PART, + internal = false + ) } return valueFactory.newInt(difference.toLong()) } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/builtins/ExtractExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/ExtractExprFunction.kt index aef22990d4..65316120be 100644 --- a/lang/src/org/partiql/lang/eval/builtins/ExtractExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/ExtractExprFunction.kt @@ -51,9 +51,16 @@ private const val SECONDS_PER_MINUTE = 60 internal class ExtractExprFunction(val valueFactory: ExprValueFactory) : ExprFunction { override val signature = FunctionSignature( name = "extract", - requiredParameters = listOf(StaticType.SYMBOL, AnyOfType(setOf(StaticType.TIMESTAMP, - StaticType.TIME, - StaticType.DATE))), + requiredParameters = listOf( + StaticType.SYMBOL, + AnyOfType( + setOf( + StaticType.TIMESTAMP, + StaticType.TIME, + StaticType.DATE + ) + ) + ), returnType = StaticType.DECIMAL ) @@ -65,11 +72,11 @@ internal class ExtractExprFunction(val valueFactory: ExprValueFactory) : ExprFun override fun callWithRequired(env: Environment, required: List): ExprValue { return when { required[1].isUnknown() -> valueFactory.nullValue - else -> eval(env, required) + else -> eval(env, required) } } - private fun Timestamp.extractedValue(dateTimePart: DateTimePart) : BigDecimal { + private fun Timestamp.extractedValue(dateTimePart: DateTimePart): BigDecimal { return when (dateTimePart) { DateTimePart.YEAR -> year DateTimePart.MONTH -> month @@ -82,7 +89,7 @@ internal class ExtractExprFunction(val valueFactory: ExprValueFactory) : ExprFun }.toBigDecimal() } - private fun LocalDate.extractedValue(dateTimePart: DateTimePart) : BigDecimal { + private fun LocalDate.extractedValue(dateTimePart: DateTimePart): BigDecimal { return when (dateTimePart) { DateTimePart.YEAR -> year DateTimePart.MONTH -> monthValue @@ -97,7 +104,7 @@ internal class ExtractExprFunction(val valueFactory: ExprValueFactory) : ExprFun }.toBigDecimal() } - private fun Time.extractedValue(dateTimePart: DateTimePart) : BigDecimal { + private fun Time.extractedValue(dateTimePart: DateTimePart): BigDecimal { return when (dateTimePart) { DateTimePart.HOUR -> localTime.hour.toBigDecimal() DateTimePart.MINUTE -> localTime.minute.toBigDecimal() @@ -122,7 +129,7 @@ internal class ExtractExprFunction(val valueFactory: ExprValueFactory) : ExprFun private fun eval(env: Environment, args: List): ExprValue { val dateTimePart = args[0].dateTimePartValue() - val extractedValue = when(args[1].type) { + val extractedValue = when (args[1].type) { ExprValueType.TIMESTAMP -> args[1].timestampValue().extractedValue(dateTimePart) ExprValueType.DATE -> args[1].dateValue().extractedValue(dateTimePart) ExprValueType.TIME -> args[1].timeValue().extractedValue(dateTimePart) diff --git a/lang/src/org/partiql/lang/eval/builtins/MakeDateExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/MakeDateExprFunction.kt index c5949f5b0b..fab88af132 100644 --- a/lang/src/org/partiql/lang/eval/builtins/MakeDateExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/MakeDateExprFunction.kt @@ -48,8 +48,7 @@ internal class MakeDateExprFunction(val valueFactory: ExprValueFactory) : ExprFu try { return valueFactory.newDate(year, month, day) - } - catch (e: DateTimeException) { + } catch (e: DateTimeException) { err( message = "Date field value out of range. $year-$month-$day", errorCode = ErrorCode.EVALUATOR_DATE_FIELD_OUT_OF_RANGE, @@ -58,4 +57,4 @@ internal class MakeDateExprFunction(val valueFactory: ExprValueFactory) : ExprFu ) } } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/builtins/MakeTimeExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/MakeTimeExprFunction.kt index 12886716f8..7ccb947ddc 100644 --- a/lang/src/org/partiql/lang/eval/builtins/MakeTimeExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/MakeTimeExprFunction.kt @@ -65,4 +65,4 @@ internal class MakeTimeExprFunction(val valueFactory: ExprValueFactory) : ExprFu ) } } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/builtins/SizeExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/SizeExprFunction.kt index 97121195a3..a673ec38fc 100644 --- a/lang/src/org/partiql/lang/eval/builtins/SizeExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/SizeExprFunction.kt @@ -42,4 +42,4 @@ internal class SizeExprFunction(val valueFactory: ExprValueFactory) : ExprFuncti return valueFactory.newInt(ionContainer.size) } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/builtins/SubstringExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/SubstringExprFunction.kt index c1d28e15bd..45a47349db 100644 --- a/lang/src/org/partiql/lang/eval/builtins/SubstringExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/SubstringExprFunction.kt @@ -94,7 +94,7 @@ import org.partiql.lang.types.StaticType * L1 = E1 - S1 * return java's substring(C, S1, E1) */ -internal class SubstringExprFunction(private val valueFactory: ExprValueFactory): ExprFunction { +internal class SubstringExprFunction(private val valueFactory: ExprValueFactory) : ExprFunction { override val signature = FunctionSignature( name = "substring", requiredParameters = listOf(StaticType.STRING, StaticType.INT), @@ -142,4 +142,4 @@ internal class SubstringExprFunction(private val valueFactory: ExprValueFactory) return valueFactory.newString(target.substring(byteIndexStart, byteIndexEnd)) } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/builtins/TimestampParser.kt b/lang/src/org/partiql/lang/eval/builtins/TimestampParser.kt index 2788ffb965..380f98d79d 100644 --- a/lang/src/org/partiql/lang/eval/builtins/TimestampParser.kt +++ b/lang/src/org/partiql/lang/eval/builtins/TimestampParser.kt @@ -57,17 +57,18 @@ internal class TimestampParser { * @throws EvaluationException if the offset seconds value was not a multiple of 60. */ private fun TemporalAccessor.getLocalOffset(): Int? = - if(!this.isSupported(ChronoField.OFFSET_SECONDS)) + if (!this.isSupported(ChronoField.OFFSET_SECONDS)) null else { val offsetSeconds = this.get(ChronoField.OFFSET_SECONDS) if (offsetSeconds % 60 != 0) { throw EvaluationException( "The parsed timestamp has a UTC offset that not a multiple of 1 minute. " + - "This timestamp cannot be parsed accurately because the maximum " + - "resolution for an Ion timestamp offset is 1 minute.", + "This timestamp cannot be parsed accurately because the maximum " + + "resolution for an Ion timestamp offset is 1 minute.", ErrorCode.EVALUATOR_PRECISION_LOSS_WHEN_PARSING_TIMESTAMP, - internal = false) + internal = false + ) } offsetSeconds / 60 } @@ -77,7 +78,7 @@ internal class TimestampParser { */ fun parseTimestamp(timestampString: String, formatPattern: String): Timestamp { val pattern = FormatPattern.fromString(formatPattern) - //TODO: do this during compilation + // TODO: do this during compilation pattern.validateForTimestampParsing() val accessor: TemporalAccessor by lazy { @@ -88,11 +89,12 @@ internal class TimestampParser { .toFormatter() .parse(timestampString) - //DateTimeFormatter.ofPattern(formatPattern).parse(timestampString) - } - catch (ex: IllegalArgumentException) { - throw EvaluationException(ex, ErrorCode.EVALUATOR_INVALID_TIMESTAMP_FORMAT_PATTERN, - internal = false) + // DateTimeFormatter.ofPattern(formatPattern).parse(timestampString) + } catch (ex: IllegalArgumentException) { + throw EvaluationException( + ex, ErrorCode.EVALUATOR_INVALID_TIMESTAMP_FORMAT_PATTERN, + internal = false + ) } } val year: Int by lazy { @@ -108,75 +110,92 @@ internal class TimestampParser { TimestampField.FRACTION_OF_SECOND -> { val nanoSeconds = BigDecimal.valueOf(accessor.getLong(ChronoField.NANO_OF_SECOND)) val secondsFraction = nanoSeconds.scaleByPowerOfTen(-9).stripTrailingZeros() - //Note that this overload of Timestamp.forSecond(...) creates a timestamp with "fraction" precision. - Timestamp.forSecond(year, - accessor.get(ChronoField.MONTH_OF_YEAR), - accessor.get(ChronoField.DAY_OF_MONTH), - accessor.get(ChronoField.HOUR_OF_DAY), - accessor.get(ChronoField.MINUTE_OF_HOUR), - BigDecimal.valueOf(accessor.getLong(ChronoField.SECOND_OF_MINUTE)).add( - secondsFraction) as BigDecimal, - accessor.getLocalOffset()) + // Note that this overload of Timestamp.forSecond(...) creates a timestamp with "fraction" precision. + Timestamp.forSecond( + year, + accessor.get(ChronoField.MONTH_OF_YEAR), + accessor.get(ChronoField.DAY_OF_MONTH), + accessor.get(ChronoField.HOUR_OF_DAY), + accessor.get(ChronoField.MINUTE_OF_HOUR), + BigDecimal.valueOf(accessor.getLong(ChronoField.SECOND_OF_MINUTE)).add( + secondsFraction + ) as BigDecimal, + accessor.getLocalOffset() + ) } - TimestampField.SECOND_OF_MINUTE -> { - //Note that this overload of Timestamp.forSecond(...) creates a timestamp with "second" precision. - Timestamp.forSecond(year, - accessor.get(ChronoField.MONTH_OF_YEAR), - accessor.get(ChronoField.DAY_OF_MONTH), - accessor.get(ChronoField.HOUR_OF_DAY), - accessor.get(ChronoField.MINUTE_OF_HOUR), - accessor.get(ChronoField.SECOND_OF_MINUTE), - accessor.getLocalOffset()) + TimestampField.SECOND_OF_MINUTE -> { + // Note that this overload of Timestamp.forSecond(...) creates a timestamp with "second" precision. + Timestamp.forSecond( + year, + accessor.get(ChronoField.MONTH_OF_YEAR), + accessor.get(ChronoField.DAY_OF_MONTH), + accessor.get(ChronoField.HOUR_OF_DAY), + accessor.get(ChronoField.MINUTE_OF_HOUR), + accessor.get(ChronoField.SECOND_OF_MINUTE), + accessor.getLocalOffset() + ) } TimestampField.MINUTE_OF_HOUR -> { - Timestamp.forMinute(year, - accessor.get(ChronoField.MONTH_OF_YEAR), - accessor.get(ChronoField.DAY_OF_MONTH), - accessor.get(ChronoField.HOUR_OF_DAY), - accessor.get(ChronoField.MINUTE_OF_HOUR), - accessor.getLocalOffset()) + Timestamp.forMinute( + year, + accessor.get(ChronoField.MONTH_OF_YEAR), + accessor.get(ChronoField.DAY_OF_MONTH), + accessor.get(ChronoField.HOUR_OF_DAY), + accessor.get(ChronoField.MINUTE_OF_HOUR), + accessor.getLocalOffset() + ) } TimestampField.HOUR_OF_DAY -> { - Timestamp.forMinute(year, - accessor.get(ChronoField.MONTH_OF_YEAR), - accessor.get(ChronoField.DAY_OF_MONTH), - accessor.get(ChronoField.HOUR_OF_DAY), - 0, //Ion Timestamp has no HOUR precision -- default minutes to 0 - accessor.getLocalOffset()) + Timestamp.forMinute( + year, + accessor.get(ChronoField.MONTH_OF_YEAR), + accessor.get(ChronoField.DAY_OF_MONTH), + accessor.get(ChronoField.HOUR_OF_DAY), + 0, // Ion Timestamp has no HOUR precision -- default minutes to 0 + accessor.getLocalOffset() + ) } - TimestampField.DAY_OF_MONTH -> { - Timestamp.forDay(year, - accessor.get(ChronoField.MONTH_OF_YEAR), - accessor.get(ChronoField.DAY_OF_MONTH)) + TimestampField.DAY_OF_MONTH -> { + Timestamp.forDay( + year, + accessor.get(ChronoField.MONTH_OF_YEAR), + accessor.get(ChronoField.DAY_OF_MONTH) + ) } TimestampField.MONTH_OF_YEAR -> { Timestamp.forMonth(year, accessor.get(ChronoField.MONTH_OF_YEAR)) } - TimestampField.YEAR -> { + TimestampField.YEAR -> { Timestamp.forYear(year) } TimestampField.AM_PM, TimestampField.OFFSET, null -> { - errNoContext("This code should be unreachable because AM_PM or OFFSET or null" + - "should never the value of formatPattern.leastSignificantField by at this point", - errorCode = ErrorCode.EVALUATOR_INVALID_TIMESTAMP_FORMAT_PATTERN, - internal = true) + errNoContext( + "This code should be unreachable because AM_PM or OFFSET or null" + + "should never the value of formatPattern.leastSignificantField by at this point", + errorCode = ErrorCode.EVALUATOR_INVALID_TIMESTAMP_FORMAT_PATTERN, + internal = true + ) } } } - //Can be thrown by Timestamp.for*(...) methods. - catch(ex: IllegalArgumentException) { - throw EvaluationException(ex, + // Can be thrown by Timestamp.for*(...) methods. + catch (ex: IllegalArgumentException) { + throw EvaluationException( + ex, ErrorCode.EVALUATOR_CUSTOM_TIMESTAMP_PARSE_FAILURE, propertyValueMapOf(Property.TIMESTAMP_FORMAT_PATTERN to formatPattern), - internal = false) + internal = false + ) } - //Can be thrown by TemporalAccessor.get(ChronoField) - catch(ex: DateTimeException) { - throw EvaluationException(ex, - ErrorCode.EVALUATOR_CUSTOM_TIMESTAMP_PARSE_FAILURE, - propertyValueMapOf(Property.TIMESTAMP_FORMAT_PATTERN to formatPattern), - internal = false) + // Can be thrown by TemporalAccessor.get(ChronoField) + catch (ex: DateTimeException) { + throw EvaluationException( + ex, + ErrorCode.EVALUATOR_CUSTOM_TIMESTAMP_PARSE_FAILURE, + propertyValueMapOf(Property.TIMESTAMP_FORMAT_PATTERN to formatPattern), + internal = false + ) } } } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/builtins/TimestampTemporalAccessor.kt b/lang/src/org/partiql/lang/eval/builtins/TimestampTemporalAccessor.kt index 1e96ffc2d5..d70d604ac7 100644 --- a/lang/src/org/partiql/lang/eval/builtins/TimestampTemporalAccessor.kt +++ b/lang/src/org/partiql/lang/eval/builtins/TimestampTemporalAccessor.kt @@ -39,13 +39,13 @@ internal class TimestampTemporalAccessor(val ts: Timestamp) : TemporalAccessor { * doesn't invoke this method to check if a ChronoField is supported. */ override fun isSupported(field: TemporalField?): Boolean = - when (field) { - IsoFields.QUARTER_OF_YEAR -> true - else -> false - } + when (field) { + IsoFields.QUARTER_OF_YEAR -> true + else -> false + } override fun getLong(field: TemporalField?): Long { - if(field == null) { + if (field == null) { throw IllegalArgumentException("argument 'field' may not be null") } return when (field) { @@ -61,11 +61,12 @@ internal class TimestampTemporalAccessor(val ts: Timestamp) : TemporalAccessor { ChronoField.AMPM_OF_DAY -> ts.hour / 12L ChronoField.CLOCK_HOUR_OF_AMPM -> { val hourOfAmPm = ts.hour.toLong() % 12L - if(hourOfAmPm == 0L) 12 else hourOfAmPm + if (hourOfAmPm == 0L) 12 else hourOfAmPm } - ChronoField.OFFSET_SECONDS -> if(ts.localOffset == null) 0 else ts.localOffset * 60L + ChronoField.OFFSET_SECONDS -> if (ts.localOffset == null) 0 else ts.localOffset * 60L else -> throw UnsupportedTemporalTypeException( - field.javaClass.name + "." + field.toString() + " not supported") + field.javaClass.name + "." + field.toString() + " not supported" + ) } } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/builtins/ToStringExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/ToStringExprFunction.kt index d36059e7f8..23e637ca9b 100644 --- a/lang/src/org/partiql/lang/eval/builtins/ToStringExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/ToStringExprFunction.kt @@ -43,8 +43,7 @@ class ToStringExprFunction(private val valueFactory: ExprValueFactory) : ExprFun val formatter: DateTimeFormatter = try { DateTimeFormatter.ofPattern(pattern) - } - catch (ex: IllegalArgumentException) { + } catch (ex: IllegalArgumentException) { errInvalidFormatPattern(pattern, ex) } @@ -52,8 +51,7 @@ class ToStringExprFunction(private val valueFactory: ExprValueFactory) : ExprFun val temporalAccessor = TimestampTemporalAccessor(timestamp) try { return valueFactory.newString(formatter.format(temporalAccessor)) - } - catch (ex: UnsupportedTemporalTypeException) { + } catch (ex: UnsupportedTemporalTypeException) { errInvalidFormatPattern(pattern, ex) } catch (ex: DateTimeException) { errInvalidFormatPattern(pattern, ex) @@ -63,10 +61,12 @@ class ToStringExprFunction(private val valueFactory: ExprValueFactory) : ExprFun private fun errInvalidFormatPattern(pattern: String, cause: Exception): Nothing { val pvmap = PropertyValueMap() pvmap[Property.TIMESTAMP_FORMAT_PATTERN] = pattern - throw EvaluationException("Invalid DateTime format pattern", - ErrorCode.EVALUATOR_INVALID_TIMESTAMP_FORMAT_PATTERN, - pvmap, - cause, - internal = false) + throw EvaluationException( + "Invalid DateTime format pattern", + ErrorCode.EVALUATOR_INVALID_TIMESTAMP_FORMAT_PATTERN, + pvmap, + cause, + internal = false + ) } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/builtins/ToTimestampExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/ToTimestampExprFunction.kt index 92b9686a8e..9535d4f898 100644 --- a/lang/src/org/partiql/lang/eval/builtins/ToTimestampExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/ToTimestampExprFunction.kt @@ -40,12 +40,14 @@ class ToTimestampExprFunction(private val valueFactory: ExprValueFactory) : Expr override fun callWithRequired(env: Environment, required: List): ExprValue { val ts = try { Timestamp.valueOf(required[0].ionValue.stringValue()) - } catch(ex: IllegalArgumentException) { - throw EvaluationException("Timestamp was not a valid ion timestamp", + } catch (ex: IllegalArgumentException) { + throw EvaluationException( + "Timestamp was not a valid ion timestamp", ErrorCode.EVALUATOR_ION_TIMESTAMP_PARSE_FAILURE, PropertyValueMap(), ex, - true) + true + ) } return valueFactory.newTimestamp(ts) } @@ -54,4 +56,4 @@ class ToTimestampExprFunction(private val valueFactory: ExprValueFactory) : Expr val ts = TimestampParser.parseTimestamp(required[0].ionValue.stringValue()!!, opt.ionValue.stringValue()!!) return valueFactory.newTimestamp(ts) } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/builtins/TrimExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/TrimExprFunction.kt index dd954cab89..dad44e15f3 100644 --- a/lang/src/org/partiql/lang/eval/builtins/TrimExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/TrimExprFunction.kt @@ -105,51 +105,54 @@ internal class TrimExprFunction(private val valueFactory: ExprValueFactory) : Ex private fun ExprValue.codePoints() = this.stringValue().codePoints().toArray() - private fun trim(type: TrimSpecification, toRemove: IntArray, sourceString: IntArray) : ExprValue { + private fun trim(type: TrimSpecification, toRemove: IntArray, sourceString: IntArray): ExprValue { return when (type) { BOTH, NONE -> valueFactory.newString(sourceString.trim(toRemove)) - LEADING -> valueFactory.newString(sourceString.leadingTrim(toRemove)) - TRAILING -> valueFactory.newString(sourceString.trailingTrim(toRemove)) + LEADING -> valueFactory.newString(sourceString.leadingTrim(toRemove)) + TRAILING -> valueFactory.newString(sourceString.trailingTrim(toRemove)) } } private fun trim1Arg(sourceString: ExprValue) = trim(DEFAULT_SPECIFICATION, DEFAULT_TO_REMOVE, sourceString.codePoints()) - private fun trim2Arg(specificationOrToRemove: ExprValue, sourceString: ExprValue) : ExprValue { - if(!specificationOrToRemove.type.isText){ - errNoContext("with two arguments trim's first argument must be either the " + + private fun trim2Arg(specificationOrToRemove: ExprValue, sourceString: ExprValue): ExprValue { + if (!specificationOrToRemove.type.isText) { + errNoContext( + "with two arguments trim's first argument must be either the " + "specification or a 'to remove' string", errorCode = ErrorCode.EVALUATOR_INVALID_ARGUMENTS_FOR_TRIM, - internal = false) + internal = false + ) } val trimSpec = TrimSpecification.from(specificationOrToRemove) - val toRemove = when(trimSpec) { + val toRemove = when (trimSpec) { NONE -> specificationOrToRemove.codePoints() else -> DEFAULT_TO_REMOVE } return trim(trimSpec, toRemove, sourceString.codePoints()) } - private fun trim3Arg(specification: ExprValue, toRemove: ExprValue, sourceString: ExprValue) : ExprValue { + private fun trim3Arg(specification: ExprValue, toRemove: ExprValue, sourceString: ExprValue): ExprValue { val trimSpec = TrimSpecification.from(specification) - if(trimSpec == NONE) { - errNoContext("'${specification.stringValue()}' is an unknown trim specification, " + + if (trimSpec == NONE) { + errNoContext( + "'${specification.stringValue()}' is an unknown trim specification, " + "valid vales: ${TrimSpecification.validValues}", errorCode = ErrorCode.EVALUATOR_INVALID_ARGUMENTS_FOR_TRIM, - internal = false) + internal = false + ) } - return trim(trimSpec, toRemove.codePoints(), sourceString.codePoints()) } override fun callWithRequired(env: Environment, required: List) = trim1Arg(required[0]) override fun callWithVariadic(env: Environment, required: List, variadic: List): ExprValue { return when (variadic.size) { - 0 -> trim1Arg(required[0]) - 1 -> trim2Arg(required[0], variadic[0]) - 2 -> trim3Arg(required[0], variadic[0], variadic[1]) + 0 -> trim1Arg(required[0]) + 1 -> trim2Arg(required[0], variadic[0]) + 2 -> trim3Arg(required[0], variadic[0], variadic[1]) else -> errNoContext("invalid trim arguments, should be unreachable", errorCode = ErrorCode.INTERNAL_ERROR, internal = true) } } @@ -160,10 +163,10 @@ private enum class TrimSpecification { companion object { fun from(arg: ExprValue) = when (arg.stringValue()) { - "both" -> BOTH - "leading" -> LEADING + "both" -> BOTH + "leading" -> LEADING "trailing" -> TRAILING - else -> NONE + else -> NONE } val validValues = TrimSpecification.values() diff --git a/lang/src/org/partiql/lang/eval/builtins/UnixTimestampFunction.kt b/lang/src/org/partiql/lang/eval/builtins/UnixTimestampFunction.kt index 7ee64518db..70b1c72221 100644 --- a/lang/src/org/partiql/lang/eval/builtins/UnixTimestampFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/UnixTimestampFunction.kt @@ -36,7 +36,7 @@ internal class UnixTimestampFunction(val valueFactory: ExprValueFactory) : ExprF ) private val millisPerSecond = BigDecimal(1000) - private fun epoch(timestamp: Timestamp) : BigDecimal = timestamp.decimalMillis.divide(millisPerSecond) + private fun epoch(timestamp: Timestamp): BigDecimal = timestamp.decimalMillis.divide(millisPerSecond) override fun callWithRequired(env: Environment, required: List): ExprValue { return valueFactory.newInt(epoch(env.session.now).toLong()) diff --git a/lang/src/org/partiql/lang/eval/builtins/storedprocedure/StoredProcedure.kt b/lang/src/org/partiql/lang/eval/builtins/storedprocedure/StoredProcedure.kt index 8e2b17dc23..6d3e79000d 100644 --- a/lang/src/org/partiql/lang/eval/builtins/storedprocedure/StoredProcedure.kt +++ b/lang/src/org/partiql/lang/eval/builtins/storedprocedure/StoredProcedure.kt @@ -37,4 +37,4 @@ interface StoredProcedure { * @param args argument list supplied to the stored procedure */ fun call(session: EvaluationSession, args: List): ExprValue -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/builtins/timestamp/FormatItem.kt b/lang/src/org/partiql/lang/eval/builtins/timestamp/FormatItem.kt index be3df8c2fd..10fe95fd37 100644 --- a/lang/src/org/partiql/lang/eval/builtins/timestamp/FormatItem.kt +++ b/lang/src/org/partiql/lang/eval/builtins/timestamp/FormatItem.kt @@ -93,7 +93,7 @@ internal enum class MonthFormat { /** * Format symbol: `MMMMM`. */ - FIRST_LETTER_OF_MONTH_NAME, + FIRST_LETTER_OF_MONTH_NAME, } /** @@ -195,7 +195,6 @@ internal class NanoOfSecondPatternSymbol : PatternSymbol() { override fun hashCode(): Int = field.hashCode() } - /** * Represents the AM-PM "pseudo" timestamp field: `a`. */ @@ -261,4 +260,4 @@ internal enum class OffsetFieldFormat { */ internal data class OffsetPatternSymbol(val format: OffsetFieldFormat) : PatternSymbol() { override val field: TimestampField = TimestampField.OFFSET -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/builtins/timestamp/FormatPattern.kt b/lang/src/org/partiql/lang/eval/builtins/timestamp/FormatPattern.kt index 394d747091..40702e198c 100644 --- a/lang/src/org/partiql/lang/eval/builtins/timestamp/FormatPattern.kt +++ b/lang/src/org/partiql/lang/eval/builtins/timestamp/FormatPattern.kt @@ -90,9 +90,9 @@ internal class FormatPattern(val formatPatternString: String, val formatItems: L val duplicatedField = formatSymbols.groupingBy { it.field } .eachCount() - .filter { it.value > 1 } //Appears more than once in field + .filter { it.value > 1 } // Appears more than once in field .asSequence() - .sortedByDescending { it.value } //Sort descending by number of appearances + .sortedByDescending { it.value } // Sort descending by number of appearances .firstOrNull() if (duplicatedField != null) { @@ -117,7 +117,7 @@ internal class FormatPattern(val formatPatternString: String, val formatItems: L val hasAmPm = formatSymbols.filterIsInstance().any() when (it.format.clock) { - HourClock.TwelveHour -> { + HourClock.TwelveHour -> { if (!hasAmPm) { throw EvaluationException( message = "timestamp format pattern contains 12-hour hour of day field but doesn't " + "contain an am/pm field.", @@ -128,7 +128,7 @@ internal class FormatPattern(val formatPatternString: String, val formatItems: L } } HourClock.TwentyFourHour -> { - if(hasAmPm) { + if (hasAmPm) { throw EvaluationException( message = "timestamp format pattern contains 24-hour hour of day field and also " + "contains an am/pm field.", errorCode = ErrorCode.EVALUATOR_TIMESTAMP_FORMAT_PATTERN_HOUR_CLOCK_AM_PM_MISMATCH, @@ -160,22 +160,24 @@ internal class FormatPattern(val formatPatternString: String, val formatItems: L fun errIfMissingTimestampFields(vararg fields: TimestampField) { val missingFields = fields.filter { requiredField -> formatSymbols.all { it.field != requiredField } } - if(missingFields.any()) { + if (missingFields.any()) { err(missingFields.asSequence().joinToString(", ")) } } - //Minimum precision for patterns containing offset or am/pm symbols is HOUR. - //NOTE: HOUR is not a valid precision for an Ion timestamp but when a format pattern's - //leastSignificantField is HOUR, the minute field defaults to 00. - if(hasOffset || hasAmPm) { - errIfMissingTimestampFields(TimestampField.YEAR, TimestampField.MONTH_OF_YEAR, TimestampField.DAY_OF_MONTH, - TimestampField.HOUR_OF_DAY) + // Minimum precision for patterns containing offset or am/pm symbols is HOUR. + // NOTE: HOUR is not a valid precision for an Ion timestamp but when a format pattern's + // leastSignificantField is HOUR, the minute field defaults to 00. + if (hasOffset || hasAmPm) { + errIfMissingTimestampFields( + TimestampField.YEAR, TimestampField.MONTH_OF_YEAR, TimestampField.DAY_OF_MONTH, + TimestampField.HOUR_OF_DAY + ) } when (leastSignificantField) { null -> { - //If most precise field is null there are no format symbols corresponding to any timestamp fields. + // If most precise field is null there are no format symbols corresponding to any timestamp fields. err("YEAR") } TimestampField.YEAR -> { @@ -184,18 +186,26 @@ internal class FormatPattern(val formatPatternString: String, val formatItems: L } TimestampField.MONTH_OF_YEAR -> errIfMissingTimestampFields(TimestampField.YEAR) TimestampField.DAY_OF_MONTH -> errIfMissingTimestampFields(TimestampField.YEAR, TimestampField.MONTH_OF_YEAR) - TimestampField.HOUR_OF_DAY -> errIfMissingTimestampFields(TimestampField.YEAR, - TimestampField.MONTH_OF_YEAR, TimestampField.DAY_OF_MONTH) - TimestampField.MINUTE_OF_HOUR -> errIfMissingTimestampFields(TimestampField.YEAR, - TimestampField.MONTH_OF_YEAR, TimestampField.DAY_OF_MONTH, TimestampField.HOUR_OF_DAY) - TimestampField.SECOND_OF_MINUTE -> errIfMissingTimestampFields(TimestampField.YEAR, - TimestampField.MONTH_OF_YEAR, TimestampField.DAY_OF_MONTH, TimestampField.HOUR_OF_DAY, - TimestampField.MINUTE_OF_HOUR) - TimestampField. FRACTION_OF_SECOND -> errIfMissingTimestampFields(TimestampField.YEAR, - TimestampField.MONTH_OF_YEAR, TimestampField.DAY_OF_MONTH, TimestampField.HOUR_OF_DAY, - TimestampField.MINUTE_OF_HOUR, TimestampField.SECOND_OF_MINUTE) - - TimestampField.OFFSET, TimestampField.AM_PM -> { + TimestampField.HOUR_OF_DAY -> errIfMissingTimestampFields( + TimestampField.YEAR, + TimestampField.MONTH_OF_YEAR, TimestampField.DAY_OF_MONTH + ) + TimestampField.MINUTE_OF_HOUR -> errIfMissingTimestampFields( + TimestampField.YEAR, + TimestampField.MONTH_OF_YEAR, TimestampField.DAY_OF_MONTH, TimestampField.HOUR_OF_DAY + ) + TimestampField.SECOND_OF_MINUTE -> errIfMissingTimestampFields( + TimestampField.YEAR, + TimestampField.MONTH_OF_YEAR, TimestampField.DAY_OF_MONTH, TimestampField.HOUR_OF_DAY, + TimestampField.MINUTE_OF_HOUR + ) + TimestampField.FRACTION_OF_SECOND -> errIfMissingTimestampFields( + TimestampField.YEAR, + TimestampField.MONTH_OF_YEAR, TimestampField.DAY_OF_MONTH, TimestampField.HOUR_OF_DAY, + TimestampField.MINUTE_OF_HOUR, TimestampField.SECOND_OF_MINUTE + ) + + TimestampField.OFFSET, TimestampField.AM_PM -> { throw IllegalStateException("OFFSET, AM_PM should never be the least significant field!") } } @@ -205,7 +215,7 @@ internal class FormatPattern(val formatPatternString: String, val formatItems: L * but not for parsing. */ private fun checkForFieldsNotValidForParsing() { - if(formatSymbols.filterIsInstance().any { it.format == MonthFormat.FIRST_LETTER_OF_MONTH_NAME }) { + if (formatSymbols.filterIsInstance().any { it.format == MonthFormat.FIRST_LETTER_OF_MONTH_NAME }) { throw EvaluationException( message = "timestamp format pattern missing fields", errorCode = ErrorCode.EVALUATOR_INVALID_TIMESTAMP_FORMAT_PATTERN_SYMBOL_FOR_PARSING, @@ -214,4 +224,4 @@ internal class FormatPattern(val formatPatternString: String, val formatItems: L ) } } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternLexer.kt b/lang/src/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternLexer.kt index 56889a5254..d03e6fae48 100644 --- a/lang/src/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternLexer.kt +++ b/lang/src/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternLexer.kt @@ -79,15 +79,17 @@ private interface State { /** * Table backed [State]. This class is mutable through [transitionTo] so needs to be setup statically to be thread safe */ -private class TableState(override val tokenType: TokenType?, - override val stateType: StateType, - val delegate: State? = null) : State { +private class TableState( + override val tokenType: TokenType?, + override val stateType: StateType, + val delegate: State? = null +) : State { private val transitionTable = object { val backingArray = Array(TABLE_SIZE) { null } operator fun get(codePoint: Int): State? = when { codePoint < TABLE_SIZE -> backingArray[codePoint] - else -> null + else -> null } operator fun set(codePoint: Int, next: State) { @@ -147,40 +149,42 @@ internal class TimestampFormatPatternLexer { val endQuotedState = object : TextState(StateType.TERMINAL) { override fun nextFor(cp: Int): State = when (cp) { SINGLE_QUOTE_CP -> startQuotedText - else -> INITIAL_STATE.nextFor(cp) + else -> INITIAL_STATE.nextFor(cp) } } val inQuotedState = object : TextState(StateType.INCOMPLETE) { override fun nextFor(cp: Int): State = when (cp) { SINGLE_QUOTE_CP -> endQuotedState - else -> this + else -> this } } override fun nextFor(cp: Int): State = when (cp) { SINGLE_QUOTE_CP -> endQuotedState - else -> inQuotedState + else -> inQuotedState } } INITIAL_STATE.transitionTo(NON_ESCAPED_TEXT, startEscapedText) INITIAL_STATE.transitionTo(SINGLE_QUOTE_CP, startQuotedText) PATTERN.codePoints().forEach { cp -> - INITIAL_STATE.transitionTo(cp, object : PatternState(cp, StateType.START_AND_TERMINAL) { - val repeatingState = object : PatternState(cp, StateType.TERMINAL) { - override fun nextFor(cp: Int): State = when (cp) { - codePoint -> this - else -> INITIAL_STATE.nextFor(cp) + INITIAL_STATE.transitionTo( + cp, + object : PatternState(cp, StateType.START_AND_TERMINAL) { + val repeatingState = object : PatternState(cp, StateType.TERMINAL) { + override fun nextFor(cp: Int): State = when (cp) { + codePoint -> this + else -> INITIAL_STATE.nextFor(cp) + } } - } - override fun nextFor(cp: Int): State = when (cp) { - codePoint -> repeatingState - else -> INITIAL_STATE.nextFor(cp) + codePoint -> repeatingState + else -> INITIAL_STATE.nextFor(cp) + } } - }) + ) } } } @@ -188,17 +192,17 @@ internal class TimestampFormatPatternLexer { private fun StringBuilder.reset() = this.setLength(0) private fun tokenEnd(current: State, next: State) = when { - current.stateType == StateType.INITIAL -> false + current.stateType == StateType.INITIAL -> false current.tokenType == next.tokenType && next.stateType.beginsToken -> true - current.tokenType != next.tokenType -> true - else -> false + current.tokenType != next.tokenType -> true + else -> false } fun tokenize(source: String): List { val tokens = mutableListOf() val buffer = StringBuilder() - if(source.isEmpty()) { + if (source.isEmpty()) { return listOf() } diff --git a/lang/src/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternParser.kt b/lang/src/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternParser.kt index dfbf19625b..6c9f256f06 100644 --- a/lang/src/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternParser.kt +++ b/lang/src/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternParser.kt @@ -19,7 +19,6 @@ import org.partiql.lang.errors.Property import org.partiql.lang.eval.EvaluationException import org.partiql.lang.util.propertyValueMapOf - internal class TimestampFormatPatternParser { fun parse(formatPatternString: String): FormatPattern { @@ -30,7 +29,7 @@ internal class TimestampFormatPatternParser { val formatItems = tokens.map { token -> when (token.tokenType) { TokenType.TEXT -> TextItem(token.value) - TokenType.PATTERN -> { + TokenType.PATTERN -> { patternCounter += 1 parsePattern(token.value) } @@ -44,44 +43,44 @@ internal class TimestampFormatPatternParser { // Possible optimization here: create singleton instances corresponding to each of the branches and return // those instead of creating new instances. This could work because all of the objects here are immutable. // This reduces the amount of garbage created during execution of this method. - "y" -> YearPatternSymbol(YearFormat.FOUR_DIGIT) - "yy" -> YearPatternSymbol(YearFormat.TWO_DIGIT) - "yyy", "yyyy" -> YearPatternSymbol(YearFormat.FOUR_DIGIT_ZERO_PADDED) + "y" -> YearPatternSymbol(YearFormat.FOUR_DIGIT) + "yy" -> YearPatternSymbol(YearFormat.TWO_DIGIT) + "yyy", "yyyy" -> YearPatternSymbol(YearFormat.FOUR_DIGIT_ZERO_PADDED) - "M" -> MonthPatternSymbol(MonthFormat.MONTH_NUMBER) - "MM" -> MonthPatternSymbol(MonthFormat.MONTH_NUMBER_ZERO_PADDED) - "MMM" -> MonthPatternSymbol(MonthFormat.ABBREVIATED_MONTH_NAME) - "MMMM" -> MonthPatternSymbol(MonthFormat.FULL_MONTH_NAME) - "MMMMM" -> MonthPatternSymbol(MonthFormat.FIRST_LETTER_OF_MONTH_NAME) + "M" -> MonthPatternSymbol(MonthFormat.MONTH_NUMBER) + "MM" -> MonthPatternSymbol(MonthFormat.MONTH_NUMBER_ZERO_PADDED) + "MMM" -> MonthPatternSymbol(MonthFormat.ABBREVIATED_MONTH_NAME) + "MMMM" -> MonthPatternSymbol(MonthFormat.FULL_MONTH_NAME) + "MMMMM" -> MonthPatternSymbol(MonthFormat.FIRST_LETTER_OF_MONTH_NAME) - "d" -> DayOfMonthPatternSymbol(TimestampFieldFormat.NUMBER) - "dd" -> DayOfMonthPatternSymbol(TimestampFieldFormat.ZERO_PADDED_NUMBER) + "d" -> DayOfMonthPatternSymbol(TimestampFieldFormat.NUMBER) + "dd" -> DayOfMonthPatternSymbol(TimestampFieldFormat.ZERO_PADDED_NUMBER) - "H" -> HourOfDayPatternSymbol(HourOfDayFormatFieldFormat.NUMBER_24_HOUR) - "HH" -> HourOfDayPatternSymbol(HourOfDayFormatFieldFormat.ZERO_PADDED_NUMBER_24_HOUR) - "h" -> HourOfDayPatternSymbol(HourOfDayFormatFieldFormat.NUMBER_12_HOUR) - "hh" -> HourOfDayPatternSymbol(HourOfDayFormatFieldFormat.ZERO_PADDED_NUMBER_12_HOUR) + "H" -> HourOfDayPatternSymbol(HourOfDayFormatFieldFormat.NUMBER_24_HOUR) + "HH" -> HourOfDayPatternSymbol(HourOfDayFormatFieldFormat.ZERO_PADDED_NUMBER_24_HOUR) + "h" -> HourOfDayPatternSymbol(HourOfDayFormatFieldFormat.NUMBER_12_HOUR) + "hh" -> HourOfDayPatternSymbol(HourOfDayFormatFieldFormat.ZERO_PADDED_NUMBER_12_HOUR) - "a" -> AmPmPatternSymbol() + "a" -> AmPmPatternSymbol() - "m" -> MinuteOfHourPatternSymbol(TimestampFieldFormat.NUMBER) - "mm" -> MinuteOfHourPatternSymbol(TimestampFieldFormat.ZERO_PADDED_NUMBER) + "m" -> MinuteOfHourPatternSymbol(TimestampFieldFormat.NUMBER) + "mm" -> MinuteOfHourPatternSymbol(TimestampFieldFormat.ZERO_PADDED_NUMBER) - "s" -> SecondOfMinutePatternPatternSymbol(TimestampFieldFormat.NUMBER) - "ss" -> SecondOfMinutePatternPatternSymbol(TimestampFieldFormat.ZERO_PADDED_NUMBER) + "s" -> SecondOfMinutePatternPatternSymbol(TimestampFieldFormat.NUMBER) + "ss" -> SecondOfMinutePatternPatternSymbol(TimestampFieldFormat.ZERO_PADDED_NUMBER) - "n" -> NanoOfSecondPatternSymbol() + "n" -> NanoOfSecondPatternSymbol() - "X" -> OffsetPatternSymbol(OffsetFieldFormat.ZERO_PADDED_HOUR_OR_Z) - "XX", "XXXX" -> OffsetPatternSymbol(OffsetFieldFormat.ZERO_PADDED_HOUR_MINUTE_OR_Z) + "X" -> OffsetPatternSymbol(OffsetFieldFormat.ZERO_PADDED_HOUR_OR_Z) + "XX", "XXXX" -> OffsetPatternSymbol(OffsetFieldFormat.ZERO_PADDED_HOUR_MINUTE_OR_Z) "XXX", "XXXXX" -> OffsetPatternSymbol(OffsetFieldFormat.ZERO_PADDED_HOUR_COLON_MINUTE_OR_Z) - "x" -> OffsetPatternSymbol(OffsetFieldFormat.ZERO_PADDED_HOUR) - "xx", "xxxx" -> OffsetPatternSymbol(OffsetFieldFormat.ZERO_PADDED_HOUR_MINUTE) + "x" -> OffsetPatternSymbol(OffsetFieldFormat.ZERO_PADDED_HOUR) + "xx", "xxxx" -> OffsetPatternSymbol(OffsetFieldFormat.ZERO_PADDED_HOUR_MINUTE) "xxx", "xxxxx" -> OffsetPatternSymbol(OffsetFieldFormat.ZERO_PADDED_HOUR_COLON_MINUTE) - else -> - //Note: the lexer *should* only return tokens that are full of capital S's so the precision is the length. + else -> + // Note: the lexer *should* only return tokens that are full of capital S's so the precision is the length. if (raw.first() == 'S') FractionOfSecondPatternSymbol(raw.length) else diff --git a/lang/src/org/partiql/lang/eval/io/DelimitedValues.kt b/lang/src/org/partiql/lang/eval/io/DelimitedValues.kt index d66bf8cb47..5ca4da713d 100644 --- a/lang/src/org/partiql/lang/eval/io/DelimitedValues.kt +++ b/lang/src/org/partiql/lang/eval/io/DelimitedValues.kt @@ -50,16 +50,16 @@ object DelimitedValues { /** Attempt to parse each value as a scalar, and fall back to string. */ AUTO { override fun convert(valueFactory: ExprValueFactory, raw: String): ExprValue = try { - val converted = valueFactory.ion.singleValue(raw) - when (converted) { - is IonInt, is IonFloat, is IonDecimal, is IonTimestamp -> - valueFactory.newFromIonValue(converted) - // if we can't convert the above, we just use the input string as-is - else -> valueFactory.newString(raw) - } - } catch (e: IonException) { - valueFactory.newString(raw) + val converted = valueFactory.ion.singleValue(raw) + when (converted) { + is IonInt, is IonFloat, is IonDecimal, is IonTimestamp -> + valueFactory.newFromIonValue(converted) + // if we can't convert the above, we just use the input string as-is + else -> valueFactory.newString(raw) } + } catch (e: IonException) { + valueFactory.newString(raw) + } }, /** Each field is a string. */ NONE { @@ -78,10 +78,12 @@ object DelimitedValues { * @param conversionMode How column text should be converted. */ @JvmStatic - fun exprValue(valueFactory: ExprValueFactory, - input: Reader, - csvFormat: CSVFormat, - conversionMode: ConversionMode): ExprValue { + fun exprValue( + valueFactory: ExprValueFactory, + input: Reader, + csvFormat: CSVFormat, + conversionMode: ConversionMode + ): ExprValue { val reader = BufferedReader(input) val csvParser = CSVParser(reader, csvFormat) val columns: List = csvParser.headerNames @@ -128,12 +130,14 @@ object DelimitedValues { * @param writeHeader Whether or not to write the header. */ @JvmStatic - fun writeTo(ion: IonSystem, - output: Writer, - value: ExprValue, - delimiter: Char, - newline: String, - writeHeader: Boolean) { + fun writeTo( + ion: IonSystem, + output: Writer, + value: ExprValue, + delimiter: Char, + newline: String, + writeHeader: Boolean + ) { CSVPrinter(output, CSVFormat.DEFAULT.withDelimiter(delimiter).withRecordSeparator(newline)).use { csvPrinter -> var names: List? = null for (row in value) { diff --git a/lang/src/org/partiql/lang/eval/like/CheckpointIterator.kt b/lang/src/org/partiql/lang/eval/like/CheckpointIterator.kt index c570734dc7..a2eeaa1491 100644 --- a/lang/src/org/partiql/lang/eval/like/CheckpointIterator.kt +++ b/lang/src/org/partiql/lang/eval/like/CheckpointIterator.kt @@ -27,5 +27,3 @@ internal interface CheckpointIterator : Iterator { */ fun discardCheckpoint() } - - diff --git a/lang/src/org/partiql/lang/eval/like/CheckpointIteratorImpl.kt b/lang/src/org/partiql/lang/eval/like/CheckpointIteratorImpl.kt index a432b09fad..75fdc8c5c2 100644 --- a/lang/src/org/partiql/lang/eval/like/CheckpointIteratorImpl.kt +++ b/lang/src/org/partiql/lang/eval/like/CheckpointIteratorImpl.kt @@ -2,7 +2,6 @@ package org.partiql.lang.eval.like import java.util.Stack - /** An implementation of [CheckpointIterator] which is backed by a [List]. */ internal class CheckpointIteratorImpl(private val backingList: List) : CheckpointIterator { private val checkpointStack = Stack() @@ -11,7 +10,7 @@ internal class CheckpointIteratorImpl(private val backingList: List) : Che override fun hasNext(): Boolean = (backingList.size - 1) > idx override fun next(): T { - if(!hasNext()) throw NoSuchElementException() + if (!hasNext()) throw NoSuchElementException() return backingList[++idx] } @@ -27,5 +26,3 @@ internal class CheckpointIteratorImpl(private val backingList: List) : Che checkpointStack.pop() } } - - diff --git a/lang/src/org/partiql/lang/eval/like/CodepointCheckpointIterator.kt b/lang/src/org/partiql/lang/eval/like/CodepointCheckpointIterator.kt index 055aa74f55..0bf2f710ed 100644 --- a/lang/src/org/partiql/lang/eval/like/CodepointCheckpointIterator.kt +++ b/lang/src/org/partiql/lang/eval/like/CodepointCheckpointIterator.kt @@ -11,7 +11,7 @@ internal class CodepointCheckpointIterator(private val str: String) : Checkpoint override fun hasNext(): Boolean = (codepointCount - 1) > idx override fun next(): Int { - if(!hasNext()) throw NoSuchElementException() + if (!hasNext()) throw NoSuchElementException() return str.codePointAt(++idx) } @@ -30,4 +30,4 @@ internal class CodepointCheckpointIterator(private val str: String) : Checkpoint override fun discardCheckpoint() { checkpointStack.pop() } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/like/PatternPart.kt b/lang/src/org/partiql/lang/eval/like/PatternPart.kt index b4c632ddd4..f61a844aa2 100644 --- a/lang/src/org/partiql/lang/eval/like/PatternPart.kt +++ b/lang/src/org/partiql/lang/eval/like/PatternPart.kt @@ -28,42 +28,43 @@ internal fun parsePattern(pattern: String, escapeChar: Int?): List val codepointList = pattern.codePoints().toList() val codepointsItr = codepointList.listIterator() val parts = ArrayList() - while(codepointsItr.hasNext()) { + while (codepointsItr.hasNext()) { val c = codepointsItr.next() - parts.add(when(c) { - ANY_ONE_CHAR -> PatternPart.AnyOneChar - ZERO_OR_MORE_OF_ANY_CHAR -> { - // consider consecutive `%` to be the same as one `%` - while(codepointsItr.hasNext() && codepointList[codepointsItr.nextIndex()] == ZERO_OR_MORE_OF_ANY_CHAR) { - codepointsItr.next() - } - - PatternPart.ZeroOrMoreOfAnyChar - } - else -> { - codepointsItr.previous() - // Build pattern for matching the exact string - val buffer = ArrayList() - // stop building if we encounter end of input - do { - val cc = codepointsItr.next() - // If [escapeChar] is encountered, just add the next codepoint to the buffer. - if(escapeChar != null && cc == escapeChar) { - buffer.add(codepointsItr.next()) - } else { - // stop building and back up one if we encounter `%` or `_` characters - if (cc == ANY_ONE_CHAR || cc == ZERO_OR_MORE_OF_ANY_CHAR) { - codepointsItr.previous() - break - } - buffer.add(cc) + parts.add( + when (c) { + ANY_ONE_CHAR -> PatternPart.AnyOneChar + ZERO_OR_MORE_OF_ANY_CHAR -> { + // consider consecutive `%` to be the same as one `%` + while (codepointsItr.hasNext() && codepointList[codepointsItr.nextIndex()] == ZERO_OR_MORE_OF_ANY_CHAR) { + codepointsItr.next() } - } while(codepointsItr.hasNext()) + PatternPart.ZeroOrMoreOfAnyChar + } + else -> { + codepointsItr.previous() + // Build pattern for matching the exact string + val buffer = ArrayList() + // stop building if we encounter end of input + do { + val cc = codepointsItr.next() + // If [escapeChar] is encountered, just add the next codepoint to the buffer. + if (escapeChar != null && cc == escapeChar) { + buffer.add(codepointsItr.next()) + } else { + // stop building and back up one if we encounter `%` or `_` characters + if (cc == ANY_ONE_CHAR || cc == ZERO_OR_MORE_OF_ANY_CHAR) { + codepointsItr.previous() + break + } + buffer.add(cc) + } + } while (codepointsItr.hasNext()) - PatternPart.ExactChars(buffer.toIntArray()) + PatternPart.ExactChars(buffer.toIntArray()) + } } - }) + ) } return parts @@ -71,12 +72,13 @@ internal fun parsePattern(pattern: String, escapeChar: Int?): List internal fun executePattern(parts: List, str: String): Boolean { return executePattern( - CheckpointIteratorImpl(parts), CodepointCheckpointIterator(str)) + CheckpointIteratorImpl(parts), CodepointCheckpointIterator(str) + ) } private fun executePattern(partsItr: CheckpointIterator, charsItr: CodepointCheckpointIterator): Boolean { while (partsItr.hasNext()) { - if(!executeOnePart(partsItr, charsItr)) + if (!executeOnePart(partsItr, charsItr)) return false } return !charsItr.hasNext() @@ -85,7 +87,7 @@ private fun executePattern(partsItr: CheckpointIterator, charsItr: private fun executeOnePart(partsItr: CheckpointIterator, charsItr: CodepointCheckpointIterator): Boolean { when (val currentPart = partsItr.next()) { is PatternPart.AnyOneChar -> { - if(!charsItr.hasNext()) { + if (!charsItr.hasNext()) { return false } @@ -103,7 +105,7 @@ private fun executeOnePart(partsItr: CheckpointIterator, charsItr: PatternPart.ZeroOrMoreOfAnyChar -> { // No need to check the rest of the string if this is the last pattern part if (!partsItr.hasNext()) { - charsItr.skipToEnd() // consume rest of string otherwise we will consider this a non-match. + charsItr.skipToEnd() // consume rest of string otherwise we will consider this a non-match. return true } @@ -136,4 +138,3 @@ private fun executeOnePart(partsItr: CheckpointIterator, charsItr: } } } - diff --git a/lang/src/org/partiql/lang/eval/time/Time.kt b/lang/src/org/partiql/lang/eval/time/Time.kt index 2d9df71c86..2735db198a 100644 --- a/lang/src/org/partiql/lang/eval/time/Time.kt +++ b/lang/src/org/partiql/lang/eval/time/Time.kt @@ -27,7 +27,6 @@ internal const val SECONDS_PER_HOUR = SECONDS_PER_MINUTE * MINUTES_PER_HOUR internal const val NANOS_PER_SECOND = 1000000000 internal const val MAX_PRECISION_FOR_TIME = 9 - /** * Wrapper class representing the run time instance of TIME in PartiQL. * - `TIME [(p)] HH:MM:ss[.ddd][+|-HH:MM]` PartiQL statement creates a run-time instance of this class with [zoneOffset] as null. @@ -53,7 +52,8 @@ data class Time private constructor(val localTime: LocalTime, val precision: Int message = "Specified precision for TIME should be a non-negative integer between 0 and 9 inclusive", errorCode = ErrorCode.EVALUATOR_INVALID_PRECISION_FOR_TIME, errorContext = propertyValueMapOf(), - internal = false) + internal = false + ) } } @@ -80,9 +80,9 @@ data class Time private constructor(val localTime: LocalTime, val precision: Int */ @JvmStatic @JvmOverloads - fun of(hour: Int, minute: Int, second: Int, nano: Int, precision: Int, tz_minutes: Int? = null) : Time { + fun of(hour: Int, minute: Int, second: Int, nano: Int, precision: Int, tz_minutes: Int? = null): Time { - //Validates the range of values for all the parameters. This part may throw a DateTimeException + // Validates the range of values for all the parameters. This part may throw a DateTimeException try { ChronoField.HOUR_OF_DAY.checkValidValue(hour.toLong()) ChronoField.MINUTE_OF_HOUR.checkValidValue(minute.toLong()) @@ -126,8 +126,9 @@ data class Time private constructor(val localTime: LocalTime, val precision: Int */ @JvmStatic @JvmOverloads - fun of(localTime: LocalTime, precision: Int, zoneOffset: ZoneOffset? = null) : Time { - return Time.of(localTime.hour, localTime.minute, localTime.second, localTime.nano, precision, + fun of(localTime: LocalTime, precision: Int, zoneOffset: ZoneOffset? = null): Time { + return Time.of( + localTime.hour, localTime.minute, localTime.second, localTime.nano, precision, zoneOffset?.totalSeconds?.div(SECONDS_PER_MINUTE) ) } @@ -137,7 +138,7 @@ data class Time private constructor(val localTime: LocalTime, val precision: Int * Returns the [OffsetTime] representation of this value if a [ZoneOffset] is defined for this, otherwise returns null. */ val offsetTime - get() : OffsetTime? = zoneOffset?.let { + get(): OffsetTime? = zoneOffset?.let { OffsetTime.of(localTime, it) } @@ -145,20 +146,20 @@ data class Time private constructor(val localTime: LocalTime, val precision: Int * Returns the TIMEZONE_HOUR for the [zoneOffset] of this instance. */ val timezoneHour - get() : Int? = zoneOffset?.totalSeconds?.div(SECONDS_PER_HOUR) + get(): Int? = zoneOffset?.totalSeconds?.div(SECONDS_PER_HOUR) /** * Returns the TIMEZONE_HOUR for the [zoneOffset] of this instance. */ val timezoneMinute - get() : Int? = (zoneOffset?.totalSeconds?.div(SECONDS_PER_MINUTE))?.rem(SECONDS_PER_MINUTE) + get(): Int? = (zoneOffset?.totalSeconds?.div(SECONDS_PER_MINUTE))?.rem(SECONDS_PER_MINUTE) /** * Returns the seconds along with the fractional part of the second's value. */ - val secondsWithFractionalPart : BigDecimal - get() = (localTime.second.toBigDecimal() + localTime.nano.toBigDecimal().divide(NANOS_PER_SECOND.toBigDecimal())) - .setScale(precision, RoundingMode.HALF_EVEN) + val secondsWithFractionalPart: BigDecimal + get() = (localTime.second.toBigDecimal() + localTime.nano.toBigDecimal().divide(NANOS_PER_SECOND.toBigDecimal())) + .setScale(precision, RoundingMode.HALF_EVEN) fun toIonValue(ion: IonSystem): IonStruct = ion.newEmptyStruct().apply { @@ -171,11 +172,11 @@ data class Time private constructor(val localTime: LocalTime, val precision: Int } /** - + Generates a formatter pattern at run time depending on the precision value. - * This pattern is subject to change based on the java's [DateTimeFormatter]. [java doc](https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html) - * Check here if there are issues with the output format pattern. - */ - private fun formatterPattern() : String { + + Generates a formatter pattern at run time depending on the precision value. + * This pattern is subject to change based on the java's [DateTimeFormatter]. [java doc](https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html) + * Check here if there are issues with the output format pattern. + */ + private fun formatterPattern(): String { return "HH:mm:ss" + if (precision > 0) "." + "S".repeat(min(9, precision)) else "" } @@ -208,4 +209,4 @@ data class Time private constructor(val localTime: LocalTime, val precision: Int else -> MORE } } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/time/TimeExtensions.kt b/lang/src/org/partiql/lang/eval/time/TimeExtensions.kt index 68a80afd78..7280b390c2 100644 --- a/lang/src/org/partiql/lang/eval/time/TimeExtensions.kt +++ b/lang/src/org/partiql/lang/eval/time/TimeExtensions.kt @@ -6,7 +6,6 @@ import org.partiql.lang.eval.time.SECONDS_PER_MINUTE import java.time.ZoneOffset import kotlin.math.absoluteValue - // These are used to validate the generic format of the time string. // The more involved logic such as validating the time is done by LocalTime.parse or OffsetTime.parse internal val timeWithoutTimeZoneRegex = Regex("\\d\\d:\\d\\d:\\d\\d(\\.\\d*)?") @@ -21,36 +20,37 @@ internal val DATE_PATTERN_REGEX = Regex("\\d\\d\\d\\d-\\d\\d-\\d\\d") * Returns the string representation of the [ZoneOffset] in HH:mm format. */ internal fun ZoneOffset.getOffsetHHmm(): String = - (if(totalSeconds >= 0) "+" else "-") + - hour.absoluteValue.toString().padStart(2, '0') + - ":" + - minute.absoluteValue.toString().padStart(2, '0') + (if (totalSeconds >= 0) "+" else "-") + + hour.absoluteValue.toString().padStart(2, '0') + + ":" + + minute.absoluteValue.toString().padStart(2, '0') /** * Get time zone offset hour */ -internal val ZoneOffset.hour : Int +internal val ZoneOffset.hour: Int get() = totalSeconds / SECONDS_PER_HOUR /** * Get time zone offset minute */ -internal val ZoneOffset.minute : Int +internal val ZoneOffset.minute: Int get() = (totalSeconds / SECONDS_PER_MINUTE) % SECONDS_PER_MINUTE /** * Get time zone offset in total minutes */ -internal val ZoneOffset.totalMinutes : Int +internal val ZoneOffset.totalMinutes: Int get() = totalSeconds / SECONDS_PER_MINUTE /** * Calculates the precision of a time string based on the fractional component of the 'HH:MM:SS[.ddd....][+|-HH:MM]' format. */ -internal fun getPrecisionFromTimeString(timeString: String) : Int { +internal fun getPrecisionFromTimeString(timeString: String): Int { val matcher = genericTimeRegex.toPattern().matcher(timeString) if (!matcher.find()) { - org.partiql.lang.eval.err("Time string does not match the format 'HH:MM:SS[.ddd....][+|-HH:MM]'", + org.partiql.lang.eval.err( + "Time string does not match the format 'HH:MM:SS[.ddd....][+|-HH:MM]'", ErrorCode.PARSE_INVALID_TIME_STRING, propertyValueMapOf(), false diff --git a/lang/src/org/partiql/lang/eval/visitors/AggregateSupportVisitorTransform.kt b/lang/src/org/partiql/lang/eval/visitors/AggregateSupportVisitorTransform.kt index 874a89cb8d..a9ba72491b 100644 --- a/lang/src/org/partiql/lang/eval/visitors/AggregateSupportVisitorTransform.kt +++ b/lang/src/org/partiql/lang/eval/visitors/AggregateSupportVisitorTransform.kt @@ -45,7 +45,8 @@ class AggregateSupportVisitorTransform : VisitorTransformBase() { setq = node.setq, funcName = node.funcName, arg = transformExpr(node.arg), - metas = transformMetas(node.metas) + metaContainerOf(AggregateRegisterIdMeta.TAG to AggregateRegisterIdMeta(aggregateCallSites.size))) + metas = transformMetas(node.metas) + metaContainerOf(AggregateRegisterIdMeta.TAG to AggregateRegisterIdMeta(aggregateCallSites.size)) + ) } aggregateCallSites.add(transformedCallAgg) return transformedCallAgg diff --git a/lang/src/org/partiql/lang/eval/visitors/CustomTypeVisitorTransform.kt b/lang/src/org/partiql/lang/eval/visitors/CustomTypeVisitorTransform.kt index a10de95a84..d62a28c1db 100644 --- a/lang/src/org/partiql/lang/eval/visitors/CustomTypeVisitorTransform.kt +++ b/lang/src/org/partiql/lang/eval/visitors/CustomTypeVisitorTransform.kt @@ -9,7 +9,7 @@ import org.partiql.lang.domains.PartiqlAst * The tests for this visitor transform are covered in `SqlParserCustomTypeCatalogTests.kt` * TODO: Remove this VisitorTransform once https://github.com/partiql/partiql-lang-kotlin/issues/510 is resolved. */ -class CustomTypeVisitorTransform : VisitorTransformBase(){ +class CustomTypeVisitorTransform : VisitorTransformBase() { override fun transformTypeEsBoolean(node: PartiqlAst.Type.EsBoolean): PartiqlAst.Type = PartiqlAst.build { customType(name = "es_boolean", metas = transformTypeEsBoolean_metas(node)) } @@ -61,4 +61,4 @@ class CustomTypeVisitorTransform : VisitorTransformBase(){ override fun transformTypeSparkShort(node: PartiqlAst.Type.SparkShort): PartiqlAst.Type = PartiqlAst.build { customType("spark_short", metas = transformTypeSparkShort_metas(node)) } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/eval/visitors/FromSourceAliasVisitorTransform.kt b/lang/src/org/partiql/lang/eval/visitors/FromSourceAliasVisitorTransform.kt index 84f953f30c..a27d7808ca 100644 --- a/lang/src/org/partiql/lang/eval/visitors/FromSourceAliasVisitorTransform.kt +++ b/lang/src/org/partiql/lang/eval/visitors/FromSourceAliasVisitorTransform.kt @@ -21,14 +21,14 @@ class FromSourceAliasVisitorTransform : VisitorTransformBase() { override fun transformFromSourceScan_asAlias(node: PartiqlAst.FromSource.Scan): SymbolPrimitive? { val thisFromSourceIndex = fromSourceCounter++ - return node.asAlias ?: - SymbolPrimitive(node.expr.extractColumnAlias(thisFromSourceIndex), node.extractSourceLocation()) + return node.asAlias + ?: SymbolPrimitive(node.expr.extractColumnAlias(thisFromSourceIndex), node.extractSourceLocation()) } override fun transformFromSourceUnpivot_asAlias(node: PartiqlAst.FromSource.Unpivot): SymbolPrimitive? { val thisFromSourceIndex = fromSourceCounter++ - return node.asAlias ?: - SymbolPrimitive(node.expr.extractColumnAlias(thisFromSourceIndex), node.extractSourceLocation()) + return node.asAlias + ?: SymbolPrimitive(node.expr.extractColumnAlias(thisFromSourceIndex), node.extractSourceLocation()) } // Need use a different [fromSourceCounter] for sub-queries. diff --git a/lang/src/org/partiql/lang/eval/visitors/GroupByItemAliasVisitorTransform.kt b/lang/src/org/partiql/lang/eval/visitors/GroupByItemAliasVisitorTransform.kt index 44c7e95232..b1c4c4041e 100644 --- a/lang/src/org/partiql/lang/eval/visitors/GroupByItemAliasVisitorTransform.kt +++ b/lang/src/org/partiql/lang/eval/visitors/GroupByItemAliasVisitorTransform.kt @@ -21,7 +21,6 @@ import org.partiql.lang.domains.PartiqlAst import org.partiql.lang.eval.extractColumnAlias import org.partiql.pig.runtime.SymbolPrimitive - /** * Pre-calculates [PartiqlAst.GroupBy] aliases, while not changing any that were previously specified, for example: * @@ -39,20 +38,25 @@ class GroupByItemAliasVisitorTransform(var nestLevel: Int = 0) : VisitorTransfor return PartiqlAst.build { groupBy_( strategy = node.strategy, - keyList = PartiqlAst.GroupKeyList(node.keyList.keys.mapIndexed { index, it -> - val aliasText = it.asAlias?.text ?: it.expr.extractColumnAlias(index) - var metas = it.expr.metas + metaContainerOf( - UniqueNameMeta.TAG to UniqueNameMeta("\$__partiql__group_by_${nestLevel}_item_$index")) + keyList = PartiqlAst.GroupKeyList( + node.keyList.keys.mapIndexed { index, it -> + val aliasText = it.asAlias?.text ?: it.expr.extractColumnAlias(index) + var metas = it.expr.metas + metaContainerOf( + UniqueNameMeta.TAG to UniqueNameMeta("\$__partiql__group_by_${nestLevel}_item_$index") + ) - if (it.asAlias == null) { - metas = metas + metaContainerOf(IsSyntheticNameMeta.TAG to IsSyntheticNameMeta.instance) - } - val alias = SymbolPrimitive(aliasText, metas) + if (it.asAlias == null) { + metas = metas + metaContainerOf(IsSyntheticNameMeta.TAG to IsSyntheticNameMeta.instance) + } + val alias = SymbolPrimitive(aliasText, metas) - groupKey_(transformExpr(it.expr), alias, alias.metas) - }, node.keyList.metas), + groupKey_(transformExpr(it.expr), alias, alias.metas) + }, + node.keyList.metas + ), groupAsAlias = node.groupAsAlias?.let { transformSymbolPrimitive(it) }, - metas = node.metas) + metas = node.metas + ) } } diff --git a/lang/src/org/partiql/lang/eval/visitors/GroupByPathExpressionVisitorTransform.kt b/lang/src/org/partiql/lang/eval/visitors/GroupByPathExpressionVisitorTransform.kt index 6de2a3af6d..1687bdefab 100644 --- a/lang/src/org/partiql/lang/eval/visitors/GroupByPathExpressionVisitorTransform.kt +++ b/lang/src/org/partiql/lang/eval/visitors/GroupByPathExpressionVisitorTransform.kt @@ -25,8 +25,9 @@ import org.partiql.lang.eval.errNoContext * This transform must execute after [GroupByItemAliasVisitorTransform] and [FromSourceAliasVisitorTransform]. */ class GroupByPathExpressionVisitorTransform( - parentSubstitutions: Map = mapOf()) - : SubstitutionVisitorTransform(parentSubstitutions) { + parentSubstitutions: Map = mapOf() +) : + SubstitutionVisitorTransform(parentSubstitutions) { companion object { /** @@ -40,19 +41,18 @@ class GroupByPathExpressionVisitorTransform( val expr = groupKey.expr val asName = groupKey.asAlias - //(This is the reason this transform needs to execute after [GroupByItemAliasVisitorTransform].) + // (This is the reason this transform needs to execute after [GroupByItemAliasVisitorTransform].) return when { - asName == null -> throw IllegalStateException("GroupByItem.asName must be specified for this transform to work") + asName == null -> throw IllegalStateException("GroupByItem.asName must be specified for this transform to work") !asName.metas.containsKey(IsSyntheticNameMeta.TAG) -> // If this meta is not present it would indicate that the alias was explicitly specified, which is // not allowed by SQL-92, so ignore. false // Group by expressions other than paths aren't part of SQL-92 so ignore - expr !is PartiqlAst.Expr.Path -> false - else -> true + expr !is PartiqlAst.Expr.Path -> false + else -> true } - } /** @@ -61,17 +61,17 @@ class GroupByPathExpressionVisitorTransform( */ fun collectAliases(fromSource: PartiqlAst.FromSource): List = when (fromSource) { - is PartiqlAst.FromSource.Scan -> + is PartiqlAst.FromSource.Scan -> listOf( fromSource.asAlias?.text - ?: errNoContext( - "FromSource.asAlias.text must be specified for this transform to work", - errorCode = ErrorCode.SEMANTIC_MISSING_AS_NAME, - internal = true - ) + ?: errNoContext( + "FromSource.asAlias.text must be specified for this transform to work", + errorCode = ErrorCode.SEMANTIC_MISSING_AS_NAME, + internal = true + ) ) - is PartiqlAst.FromSource.Join -> + is PartiqlAst.FromSource.Join -> collectAliases(fromSource.left) + collectAliases(fromSource.right) is PartiqlAst.FromSource.Unpivot -> @@ -93,7 +93,8 @@ class GroupByPathExpressionVisitorTransform( // A transformer for both of the sets of the substitutions defined above. val currentAndUnshadowedTransformer = GroupByPathExpressionVisitorTransform( - unshadowedSubstitutions + currentSubstitutions) + unshadowedSubstitutions + currentSubstitutions + ) // Now actually transform the query using the appropriate transformer for each of various clauses of the // SELECT statement. @@ -123,7 +124,8 @@ class GroupByPathExpressionVisitorTransform( order = order, offset = offset, limit = limit, - metas = metas) + metas = metas + ) } } @@ -140,8 +142,10 @@ class GroupByPathExpressionVisitorTransform( name = groupKey.asAlias.text, case = caseSensitive(), qualifier = unqualified(), - metas = groupKey.expr.metas + metaContainerOf(UniqueNameMeta.TAG to uniqueIdentifierMeta)) - }) + metas = groupKey.expr.metas + metaContainerOf(UniqueNameMeta.TAG to uniqueIdentifierMeta) + ) + } + ) }.associateBy { it.target } } @@ -165,5 +169,4 @@ class GroupByPathExpressionVisitorTransform( // do not transform CallAgg nodes. override fun transformExprCallAgg(node: PartiqlAst.Expr.CallAgg): PartiqlAst.Expr = node - } diff --git a/lang/src/org/partiql/lang/eval/visitors/PartiqlAstSanityValidator.kt b/lang/src/org/partiql/lang/eval/visitors/PartiqlAstSanityValidator.kt index 04dda6b3a0..c86dd7141a 100644 --- a/lang/src/org/partiql/lang/eval/visitors/PartiqlAstSanityValidator.kt +++ b/lang/src/org/partiql/lang/eval/visitors/PartiqlAstSanityValidator.kt @@ -56,21 +56,25 @@ class PartiqlAstSanityValidator : PartiqlAst.Visitor() { override fun visitExprLit(node: PartiqlAst.Expr.Lit) { val ionValue = node.value val metas = node.metas - if(node.value is IntElement && ionValue.integerSize == IntElementSize.BIG_INTEGER) { - throw EvaluationException(message = "Int overflow or underflow at compile time", + if (node.value is IntElement && ionValue.integerSize == IntElementSize.BIG_INTEGER) { + throw EvaluationException( + message = "Int overflow or underflow at compile time", errorCode = ErrorCode.SEMANTIC_LITERAL_INT_OVERFLOW, errorContext = errorContextFrom(metas), - internal = false) + internal = false + ) } } private fun validateDecimalOrNumericType(scale: LongPrimitive?, precision: LongPrimitive?, metas: MetaContainer) { if (scale != null && precision != null && compileOptions.typedOpBehavior == TypedOpBehavior.HONOR_PARAMETERS) { if (scale.value !in 0..precision.value) { - err("Scale ${scale.value} should be between 0 and precision ${precision.value}", + err( + "Scale ${scale.value} should be between 0 and precision ${precision.value}", errorCode = ErrorCode.SEMANTIC_INVALID_DECIMAL_ARGUMENTS, errorContext = errorContextFrom(metas), - internal = false) + internal = false + ) } } } @@ -87,10 +91,12 @@ class PartiqlAstSanityValidator : PartiqlAst.Visitor() { val setQuantifier = node.setq val metas = node.metas if (setQuantifier is PartiqlAst.SetQuantifier.Distinct && metas.containsKey(IsCountStarMeta.TAG)) { - err("COUNT(DISTINCT *) is not supported", + err( + "COUNT(DISTINCT *) is not supported", ErrorCode.EVALUATOR_COUNT_DISTINCT_STAR, errorContextFrom(metas), - internal = false) + internal = false + ) } } @@ -102,20 +108,26 @@ class PartiqlAstSanityValidator : PartiqlAst.Visitor() { if (groupBy != null) { if (groupBy.strategy is PartiqlAst.GroupingStrategy.GroupPartial) { - err("GROUP PARTIAL not supported yet", + err( + "GROUP PARTIAL not supported yet", ErrorCode.EVALUATOR_FEATURE_NOT_SUPPORTED_YET, errorContextFrom(metas).also { it[Property.FEATURE_NAME] = "GROUP PARTIAL" - }, internal = false) + }, + internal = false + ) } when (projection) { is PartiqlAst.Projection.ProjectPivot -> { - err("PIVOT with GROUP BY not supported yet", + err( + "PIVOT with GROUP BY not supported yet", ErrorCode.EVALUATOR_FEATURE_NOT_SUPPORTED_YET, errorContextFrom(metas).also { it[Property.FEATURE_NAME] = "PIVOT with GROUP BY" - }, internal = false) + }, + internal = false + ) } is PartiqlAst.Projection.ProjectValue, is PartiqlAst.Projection.ProjectList -> { // use of group by with SELECT & SELECT VALUE is supported @@ -124,9 +136,11 @@ class PartiqlAstSanityValidator : PartiqlAst.Visitor() { } if ((groupBy == null || groupBy.keyList.keys.isEmpty()) && having != null) { - throw SemanticException("HAVING used without GROUP BY (or grouping expressions)", - ErrorCode.SEMANTIC_HAVING_USED_WITHOUT_GROUP_BY, - PropertyValueMap().addSourceLocation(metas)) + throw SemanticException( + "HAVING used without GROUP BY (or grouping expressions)", + ErrorCode.SEMANTIC_HAVING_USED_WITHOUT_GROUP_BY, + PropertyValueMap().addSourceLocation(metas) + ) } } diff --git a/lang/src/org/partiql/lang/eval/visitors/PipelinedVisitorTransform.kt b/lang/src/org/partiql/lang/eval/visitors/PipelinedVisitorTransform.kt index cc1d9db48a..be340c4a10 100644 --- a/lang/src/org/partiql/lang/eval/visitors/PipelinedVisitorTransform.kt +++ b/lang/src/org/partiql/lang/eval/visitors/PipelinedVisitorTransform.kt @@ -9,7 +9,7 @@ import org.partiql.lang.util.interruptibleFold * @param transformers visitor transforms to be executed */ class PipelinedVisitorTransform(vararg transformers: PartiqlAst.VisitorTransform) : PartiqlAst.VisitorTransform() { - private val transformerList = transformers.toList() + private val transformerList = transformers.toList() override fun transformStatement(node: PartiqlAst.Statement): PartiqlAst.Statement = transformerList.interruptibleFold(node) { diff --git a/lang/src/org/partiql/lang/eval/visitors/SelectListItemAliasVisitorTransform.kt b/lang/src/org/partiql/lang/eval/visitors/SelectListItemAliasVisitorTransform.kt index 4c87e70bce..333ebfd425 100644 --- a/lang/src/org/partiql/lang/eval/visitors/SelectListItemAliasVisitorTransform.kt +++ b/lang/src/org/partiql/lang/eval/visitors/SelectListItemAliasVisitorTransform.kt @@ -38,7 +38,7 @@ class SelectListItemAliasVisitorTransform : VisitorTransformBase() { return PartiqlAst.build { projectList( projectItems = node.projectItems.mapIndexed { idx, it -> - when(it) { + when (it) { is PartiqlAst.ProjectItem.ProjectExpr -> when (it.asAlias) { // Synthesize a column name if one was not specified in the query. diff --git a/lang/src/org/partiql/lang/eval/visitors/SelectStarVisitorTransform.kt b/lang/src/org/partiql/lang/eval/visitors/SelectStarVisitorTransform.kt index d0bda96aa3..98fe62343a 100644 --- a/lang/src/org/partiql/lang/eval/visitors/SelectStarVisitorTransform.kt +++ b/lang/src/org/partiql/lang/eval/visitors/SelectStarVisitorTransform.kt @@ -26,7 +26,8 @@ class SelectStarVisitorTransform : VisitorTransformBase() { order = node.order, limit = node.limit, offset = node.offset, - metas = node.metas) + metas = node.metas + ) } } @@ -37,7 +38,7 @@ class SelectStarVisitorTransform : VisitorTransformBase() { // Check if SELECT * is being used. if (projection is PartiqlAst.Projection.ProjectStar) { - when (transformedExpr.group) { // No group by + when (transformedExpr.group) { // No group by null -> { val fromSourceAliases = extractAliases(transformedExpr.from) @@ -55,13 +56,14 @@ class SelectStarVisitorTransform : VisitorTransformBase() { } return copyProjectionToSelect(transformedExpr, newProjection) } - else -> { // With group by + else -> { // With group by val selectListItemsFromGroupBy = transformedExpr.group.keyList.keys.map { val asName = it.asAlias ?: errNoContext( "GroupByItem has no AS-alias--GroupByItemAliasVisitorTransform must be executed before SelectStarVisitorTransform", errorCode = ErrorCode.SEMANTIC_MISSING_AS_NAME, - internal = true) + internal = true + ) // We need to take the unique name of each grouping field key only because we need to handle // the case when multiple grouping fields are assigned the same name (which is currently allowed) @@ -106,7 +108,9 @@ class SelectStarVisitorTransform : VisitorTransformBase() { node.asAlias?.text ?: error("FromSourceAliasVisitorTransform must be executed before SelectStarVisitorTransform"), node.atAlias?.text, - node.byAlias?.text)) + node.byAlias?.text + ) + ) } override fun visitFromSourceUnpivot(node: PartiqlAst.FromSource.Unpivot) { @@ -115,7 +119,9 @@ class SelectStarVisitorTransform : VisitorTransformBase() { node.asAlias?.text ?: error("FromSourceAliasVisitorTransform must be executed before SelectStarVisitorTransform"), node.atAlias?.text, - node.byAlias?.text)) + node.byAlias?.text + ) + ) } /** We do not want to recurse into the nested select query */ diff --git a/lang/src/org/partiql/lang/eval/visitors/StaticTypeVisitorTransform.kt b/lang/src/org/partiql/lang/eval/visitors/StaticTypeVisitorTransform.kt index d3771c7474..7a01955427 100644 --- a/lang/src/org/partiql/lang/eval/visitors/StaticTypeVisitorTransform.kt +++ b/lang/src/org/partiql/lang/eval/visitors/StaticTypeVisitorTransform.kt @@ -58,9 +58,11 @@ enum class StaticTypeVisitorTransformConstraints { * perspective. * @param constraints Additional constraints on what variable scoping, or other rules should be followed. */ -class StaticTypeVisitorTransform(private val ion: IonSystem, - globalBindings: Bindings, - constraints: Set = setOf()) : VisitorTransformBase() { +class StaticTypeVisitorTransform( + private val ion: IonSystem, + globalBindings: Bindings, + constraints: Set = setOf() +) : VisitorTransformBase() { /** Used to allow certain binding lookups to occur directly in the global scope. */ private val globalEnv = wrapBindings(globalBindings, 0) @@ -110,8 +112,10 @@ class StaticTypeVisitorTransform(private val ion: IonSystem, * - 1 is the top-most statement with a `FROM` clause (i.e. select-from-where or DML operation), * - Values > 1 are for each subsequent level of nested sub-query. */ - private inner class VisitorTransform(private val parentEnv: Bindings, - private val currentScopeDepth: Int) : VisitorTransformBase() { + private inner class VisitorTransform( + private val parentEnv: Bindings, + private val currentScopeDepth: Int + ) : VisitorTransformBase() { /** Specifies the current scope search order--default is LEXICAL. */ private var scopeOrder = ScopeSearchOrder.LEXICAL @@ -137,14 +141,16 @@ class StaticTypeVisitorTransform(private val ion: IonSystem, private var singleFromSourceName: String? = null private fun singleFromSourceRef(sourceName: String, metas: MetaContainer): PartiqlAst.Expr.Id { - val sourceType = currentEnv[BindingName(sourceName, BindingCase.SENSITIVE)] ?: - throw IllegalArgumentException("Could not find type for single FROM source variable") + val sourceType = currentEnv[BindingName(sourceName, BindingCase.SENSITIVE)] + ?: throw IllegalArgumentException("Could not find type for single FROM source variable") return PartiqlAst.build { - id(sourceName, + id( + sourceName, caseSensitive(), localsFirst(), - metas + metaContainerOf(StaticTypeMeta.TAG to StaticTypeMeta(sourceType.type))) + metas + metaContainerOf(StaticTypeMeta.TAG to StaticTypeMeta(sourceType.type)) + ) } } @@ -156,7 +162,7 @@ class StaticTypeVisitorTransform(private val ion: IonSystem, private fun errUnboundName(name: String, case: PartiqlAst.CaseSensitivity, metas: MetaContainer): Nothing = throw SemanticException( "No such variable named '$name'", - when(case) { + when (case) { is PartiqlAst.CaseSensitivity.CaseInsensitive -> ErrorCode.SEMANTIC_UNBOUND_BINDING is PartiqlAst.CaseSensitivity.CaseSensitive -> ErrorCode.SEMANTIC_UNBOUND_QUOTED_BINDING }, @@ -210,7 +216,8 @@ class StaticTypeVisitorTransform(private val ion: IonSystem, node.setq, node.funcName, transformExpr(node.arg), - transformMetas(node.metas)) + transformMetas(node.metas) + ) } } @@ -220,10 +227,10 @@ class StaticTypeVisitorTransform(private val ion: IonSystem, else -> { val (type, depth) = match val scope = when { - depth == 0 -> BindingScope.GLOBAL - depth < currentScopeDepth -> BindingScope.LEXICAL + depth == 0 -> BindingScope.GLOBAL + depth < currentScopeDepth -> BindingScope.LEXICAL depth == currentScopeDepth -> BindingScope.LOCAL - else -> error("Unexpected: depth should never be > currentScopeDepth") + else -> error("Unexpected: depth should never be > currentScopeDepth") } TypeAndScope(type, scope) } @@ -237,11 +244,11 @@ class StaticTypeVisitorTransform(private val ion: IonSystem, */ private fun findBind(bindingName: BindingName, scopeQualifier: PartiqlAst.ScopeQualifier): TypeAndScope? { // Override the current scope search order if the var is lexically qualified. - val overridenScopeSearchOrder = when(scopeQualifier) { + val overridenScopeSearchOrder = when (scopeQualifier) { is PartiqlAst.ScopeQualifier.LocalsFirst -> ScopeSearchOrder.LEXICAL is PartiqlAst.ScopeQualifier.Unqualified -> this.scopeOrder } - val scopes: List> = when(overridenScopeSearchOrder) { + val scopes: List> = when (overridenScopeSearchOrder) { ScopeSearchOrder.GLOBALS_THEN_LEXICAL -> listOf(globalEnv, currentEnv) ScopeSearchOrder.LEXICAL -> listOf(currentEnv, globalEnv) } @@ -284,10 +291,10 @@ class StaticTypeVisitorTransform(private val ion: IonSystem, // If we found a variable in the global scope but a there is a single // from source, we should transform to this to path expression anyway and pretend // we didn't match the global variable. - singleBinding != null -> { + singleBinding != null -> { return makePathIntoFromSource(singleBinding, node) } - preventGlobalsExceptInFrom && fromVisited -> { + preventGlobalsExceptInFrom && fromVisited -> { errIllegalGlobalVariableAccess(bindingName.name, node.metas) } preventGlobalsInNestedQueries && currentScopeDepth > 1 -> { @@ -296,14 +303,16 @@ class StaticTypeVisitorTransform(private val ion: IonSystem, } } - val newScopeQualifier = when(found.scope) { + val newScopeQualifier = when (found.scope) { BindingScope.LOCAL, BindingScope.LEXICAL -> PartiqlAst.build { localsFirst() } BindingScope.GLOBAL -> PartiqlAst.build { unqualified() } } return PartiqlAst.build { - id_(node.name, node.case, newScopeQualifier, - node.metas + metaContainerOf(StaticTypeMeta.TAG to StaticTypeMeta(found.type))) + id_( + node.name, node.case, newScopeQualifier, + node.metas + metaContainerOf(StaticTypeMeta.TAG to StaticTypeMeta(found.type)) + ) } } @@ -316,7 +325,8 @@ class StaticTypeVisitorTransform(private val ion: IonSystem, path( singleFromSourceRef(fromSourceAlias, node.extractSourceLocation()), listOf(node.toPathExpr()), - node.extractSourceLocation()) + node.extractSourceLocation() + ) } } @@ -338,8 +348,8 @@ class StaticTypeVisitorTransform(private val ion: IonSystem, else -> it } } - else -> super.transformExprPath(node) - } + else -> super.transformExprPath(node) + } override fun transformFromSourceScan(node: PartiqlAst.FromSource.Scan): PartiqlAst.FromSource { // we need to transform the source expression before binding the names to our scope @@ -354,8 +364,10 @@ class StaticTypeVisitorTransform(private val ion: IonSystem, } val asSymbolicName = node.asAlias - ?: error("fromSourceLet.variables.asName is null. This wouldn't be the case if " + - "FromSourceAliasVisitorTransform was executed first.") + ?: error( + "fromSourceLet.variables.asName is null. This wouldn't be the case if " + + "FromSourceAliasVisitorTransform was executed first." + ) addLocal(asSymbolicName.text, StaticType.ANY, asSymbolicName.metas) @@ -381,8 +393,10 @@ class StaticTypeVisitorTransform(private val ion: IonSystem, } val asSymbolicName = node.asAlias - ?: error("fromSourceLet.variables.asName is null. This wouldn't be the case if " + - "FromSourceAliasVisitorTransform was executed first.") + ?: error( + "fromSourceLet.variables.asName is null. This wouldn't be the case if " + + "FromSourceAliasVisitorTransform was executed first." + ) addLocal(asSymbolicName.text, StaticType.ANY, asSymbolicName.metas) @@ -474,7 +488,8 @@ class StaticTypeVisitorTransform(private val ion: IonSystem, dropIndex( node.table, node.keys, - transformMetas(node.metas)) + transformMetas(node.metas) + ) } } diff --git a/lang/src/org/partiql/lang/eval/visitors/SubstitutionVisitorTransform.kt b/lang/src/org/partiql/lang/eval/visitors/SubstitutionVisitorTransform.kt index ef3434b32e..301af664de 100644 --- a/lang/src/org/partiql/lang/eval/visitors/SubstitutionVisitorTransform.kt +++ b/lang/src/org/partiql/lang/eval/visitors/SubstitutionVisitorTransform.kt @@ -35,7 +35,7 @@ data class SubstitutionPair(val target: PartiqlAst.Expr, val replacement: Partiq * * This class is `open` to allow subclasses to restrict the nodes to which the substitution should occur. */ -open class SubstitutionVisitorTransform(protected val substitutions: Map): VisitorTransformBase() { +open class SubstitutionVisitorTransform(protected val substitutions: Map) : VisitorTransformBase() { /** * If [node] matches any of the target nodes in [substitutions], replaces the node with the replacement. @@ -48,7 +48,8 @@ open class SubstitutionVisitorTransform(protected val substitutions: Map node.extractSourceLocation().let { - sl -> MetaVisitorTransform(sl).transformExpr(ms.replacement) + sl -> + MetaVisitorTransform(sl).transformExpr(ms.replacement) } } ?: super.transformExpr(node) } @@ -62,5 +63,4 @@ open class SubstitutionVisitorTransform(protected val substitutions: Map StaticType.unionOf(type, StaticType.NULL).flatten() } - /** * This class is responsible for mapping Ion Schema type definition(s) to PartiQL StaticType(s) * using an instance of IonSchemaModel.Schema (schema model generated by Ion Schema Library) @@ -97,7 +96,7 @@ class StaticTypeMapper(schema: IonSchemaModel.Schema) { is IonSchemaModel.Constraint.AnyOf -> return AnyOfType(typeConstraint.types.map { it.toStaticType(currentTopLevelTypeName) }.toSet(), metas) else -> error("This block should be unreachable") } - + // Create StaticType based on core ISL type return when (coreType) { is StringType -> StringType(getStringLengthConstraint(), metas) @@ -194,7 +193,7 @@ class StaticTypeMapper(schema: IonSchemaModel.Schema) { constraints.getConstraint(IonSchemaModel.Constraint.Element::class)?.type?.toStaticType(topLevelTypeName) ?: StaticType.ANY private fun IonSchemaModel.TypeDefinition.getStringLengthConstraint(): StringType.StringLengthConstraint = - when (val constraint = constraints.getConstraint(IonSchemaModel.Constraint.CodepointLength::class)?.rule){ + when (val constraint = constraints.getConstraint(IonSchemaModel.Constraint.CodepointLength::class)?.rule) { null -> StringType.StringLengthConstraint.Unconstrained else -> constraint.toStringLengthConstraint() } @@ -275,23 +274,29 @@ class StaticTypeMapper(schema: IonSchemaModel.Schema) { // - Precision ranges starting from 1 (inclusive), 0 (exclusive), or min is IonSchemaModel.NumberRule.EqualsNumber -> when (val exactPrecision = rule.value.toInt()) { 1 -> exactPrecision - else -> error("Exact decimal precision of $exactPrecision can't map to DecimalType. " + - "Only exact precisions of 1 are allowed.") + else -> error( + "Exact decimal precision of $exactPrecision can't map to DecimalType. " + + "Only exact precisions of 1 are allowed." + ) } is IonSchemaModel.NumberRule.EqualsRange -> { when (val minPrecision = rule.range.min) { is IonSchemaModel.NumberExtent.Inclusive -> { val minPrecisionValue = minPrecision.value.toInt() if (minPrecisionValue != 1) { - error("Inclusive precision range min of $minPrecisionValue can't map to DecimalType. " + - "Only inclusive precision range mins of 1 are allowed.") + error( + "Inclusive precision range min of $minPrecisionValue can't map to DecimalType. " + + "Only inclusive precision range mins of 1 are allowed." + ) } } is IonSchemaModel.NumberExtent.Exclusive -> { val minPrecisionValue = minPrecision.value.toInt() if (minPrecisionValue != 0) { - error("Exclusive precision range min of $minPrecisionValue can't map to DecimalType. " + - "Only exclusive precision range mins of 0 are allowed") + error( + "Exclusive precision range min of $minPrecisionValue can't map to DecimalType. " + + "Only exclusive precision range mins of 0 are allowed" + ) } } is IonSchemaModel.NumberExtent.Max -> error("Min value of a range cannot be 'max'") @@ -327,7 +332,7 @@ class StaticTypeMapper(schema: IonSchemaModel.Schema) { } } - private fun AnyElement.toInt() : Int = this.longValue.toIntExact() + private fun AnyElement.toInt(): Int = this.longValue.toIntExact() /** * Returns fields map for Struct Type, if present. Otherwise, returns an empty map @@ -349,7 +354,7 @@ class StaticTypeMapper(schema: IonSchemaModel.Schema) { else -> StaticType.unionOf(st, StaticType.MISSING, metas = st.metas) } } - + /** * Recursively accumulates all top-level types present in the type definition */ @@ -366,7 +371,7 @@ class StaticTypeMapper(schema: IonSchemaModel.Schema) { } return current.distinct() } - + /** * Recursively accumulates all top-level types present in the type reference */ @@ -384,7 +389,7 @@ class StaticTypeMapper(schema: IonSchemaModel.Schema) { * Returns true if type definition has an occurs constraint as 'required' */ private fun IonSchemaModel.TypeDefinition.isRequired() = - constraints.getConstraint(IonSchemaModel.Constraint.Occurs::class)?.spec is IonSchemaModel.OccursSpec.OccursRequired + constraints.getConstraint(IonSchemaModel.Constraint.Occurs::class)?.spec is IonSchemaModel.OccursSpec.OccursRequired /** * Returns true if type definition does not allow open content. @@ -404,4 +409,3 @@ class StaticTypeMapper(schema: IonSchemaModel.Schema) { return null } } - diff --git a/lang/src/org/partiql/lang/partiqlisl/ResourceAuthority.kt b/lang/src/org/partiql/lang/partiqlisl/ResourceAuthority.kt index d1fb91e030..11d381a2df 100644 --- a/lang/src/org/partiql/lang/partiqlisl/ResourceAuthority.kt +++ b/lang/src/org/partiql/lang/partiqlisl/ResourceAuthority.kt @@ -8,14 +8,14 @@ import com.amazon.ionschema.util.CloseableIterator import java.io.InputStream class ResourceAuthority( - val rootPackage: String, - val classLoader: ClassLoader, -val ion: IonSystem + val rootPackage: String, + val classLoader: ClassLoader, + val ion: IonSystem ) : Authority { override fun iteratorFor(iss: IonSchemaSystem, id: String): CloseableIterator { - val resourceName = "${rootPackage}/$id" + val resourceName = "$rootPackage/$id" var str: InputStream? = classLoader.getResourceAsStream(resourceName) - ?: error("Failed to load schema with resource name '$resourceName'") + ?: error("Failed to load schema with resource name '$resourceName'") return object : CloseableIterator { @@ -40,4 +40,4 @@ val ion: IonSystem fun getResourceAuthority(ion: IonSystem) = ResourceAuthority("org/partiql/schemas", ResourceAuthority::class.java.getClassLoader(), ion) -fun loadPartiqlIsl(iss: IonSchemaSystem) = iss.loadSchema("partiql.isl") \ No newline at end of file +fun loadPartiqlIsl(iss: IonSchemaSystem) = iss.loadSchema("partiql.isl") diff --git a/lang/src/org/partiql/lang/schemadiscovery/ConstraintDiscoverer.kt b/lang/src/org/partiql/lang/schemadiscovery/ConstraintDiscoverer.kt index 60b61d606c..f308b84997 100644 --- a/lang/src/org/partiql/lang/schemadiscovery/ConstraintDiscoverer.kt +++ b/lang/src/org/partiql/lang/schemadiscovery/ConstraintDiscoverer.kt @@ -42,7 +42,7 @@ internal interface ConstraintDiscoverer { } /** - * An implementation of [ConstraintDiscoverer] that supports all [IonType]s except for DATAGRAM. All base + * An implementation of [ConstraintDiscoverer] that supports all [IonType]s except for DATAGRAM. All base * implementations return an empty [IonSchemaModel.ConstraintList] and do not depend on each other (i.e. sequence * and struct constraint discoverers do not call the scalar constraint discoverers). * @@ -50,7 +50,7 @@ internal interface ConstraintDiscoverer { * [IonSchemaModel.Constraint.TypeConstraint], typed nulls collapse to `null` and will not have any additional * constraints discovered. */ -internal open class TypeConstraintDiscoverer: ConstraintDiscoverer { +internal open class TypeConstraintDiscoverer : ConstraintDiscoverer { override fun discover(value: IonValue): IonSchemaModel.ConstraintList = when (value) { is IonBool -> constraintDiscovererBool(value) @@ -75,7 +75,7 @@ internal open class TypeConstraintDiscoverer: ConstraintDiscoverer { * This implementation returns an empty constraint list. */ open fun constraintDiscovererBool(value: IonBool): IonSchemaModel.ConstraintList = emptyConstraintList - + /** * Returns a [IonSchemaModel.ConstraintList] with additional discovered constraints for [IonInt]s. * @@ -95,70 +95,70 @@ internal open class TypeConstraintDiscoverer: ConstraintDiscoverer { * * This implementation returns an empty constraint list. */ - open fun constraintDiscovererDecimal(value: IonDecimal): IonSchemaModel.ConstraintList = emptyConstraintList + open fun constraintDiscovererDecimal(value: IonDecimal): IonSchemaModel.ConstraintList = emptyConstraintList /** * Returns a [IonSchemaModel.ConstraintList] with additional discovered constraints for [IonTimestamp]s. * * This implementation returns an empty constraint list. */ - open fun constraintDiscovererTimestamp(value: IonTimestamp): IonSchemaModel.ConstraintList = emptyConstraintList + open fun constraintDiscovererTimestamp(value: IonTimestamp): IonSchemaModel.ConstraintList = emptyConstraintList /** * Returns a [IonSchemaModel.ConstraintList] with additional discovered constraints for [IonSymbol]s. * * This implementation returns an empty constraint list. */ - open fun constraintDiscovererSymbol(value: IonSymbol): IonSchemaModel.ConstraintList = emptyConstraintList + open fun constraintDiscovererSymbol(value: IonSymbol): IonSchemaModel.ConstraintList = emptyConstraintList /** * Returns a [IonSchemaModel.ConstraintList] with additional discovered constraints for [IonString]s. * * This implementation returns an empty constraint list. */ - open fun constraintDiscovererString(value: IonString): IonSchemaModel.ConstraintList = emptyConstraintList + open fun constraintDiscovererString(value: IonString): IonSchemaModel.ConstraintList = emptyConstraintList /** * Returns a [IonSchemaModel.ConstraintList] with additional discovered constraints for [IonClob]s. * * This implementation returns an empty constraint list. */ - open fun constraintDiscovererClob(value: IonClob): IonSchemaModel.ConstraintList = emptyConstraintList + open fun constraintDiscovererClob(value: IonClob): IonSchemaModel.ConstraintList = emptyConstraintList /** * Returns a [IonSchemaModel.ConstraintList] with additional discovered constraints for [IonBlob]s. * * This implementation returns an empty constraint list. */ - open fun constraintDiscovererBlob(value: IonBlob): IonSchemaModel.ConstraintList = emptyConstraintList + open fun constraintDiscovererBlob(value: IonBlob): IonSchemaModel.ConstraintList = emptyConstraintList /** * Returns a [IonSchemaModel.ConstraintList] with additional discovered constraints for [IonNull]s. * * This implementation returns an empty constraint list. */ - open fun constraintDiscovererNull(value: IonNull): IonSchemaModel.ConstraintList = emptyConstraintList + open fun constraintDiscovererNull(value: IonNull): IonSchemaModel.ConstraintList = emptyConstraintList /** * Returns a [IonSchemaModel.ConstraintList] with additional discovered constraints for [IonSexp]s. * * This implementation returns an empty constraint list. */ - open fun constraintDiscovererSexp(value: IonSexp): IonSchemaModel.ConstraintList = emptyConstraintList + open fun constraintDiscovererSexp(value: IonSexp): IonSchemaModel.ConstraintList = emptyConstraintList /** * Returns a [IonSchemaModel.ConstraintList] with additional discovered constraints for [IonList]s. * * This implementation returns an empty constraint list. */ - open fun constraintDiscovererList(value: IonList): IonSchemaModel.ConstraintList = emptyConstraintList + open fun constraintDiscovererList(value: IonList): IonSchemaModel.ConstraintList = emptyConstraintList /** * Returns a [IonSchemaModel.ConstraintList] with additional discovered constraints for [IonStruct]s. * * This implementation returns an empty constraint list. */ - open fun constraintDiscovererStruct(value: IonStruct): IonSchemaModel.ConstraintList = emptyConstraintList + open fun constraintDiscovererStruct(value: IonStruct): IonSchemaModel.ConstraintList = emptyConstraintList } /** @@ -171,7 +171,7 @@ internal open class TypeConstraintDiscoverer: ConstraintDiscoverer { * some specific discovered constraints can be done through overriding [TypeConstraintDiscoverer]'s extensible * `constraintDiscoverer...` functions. */ -internal class StandardConstraintDiscoverer: TypeConstraintDiscoverer() { +internal class StandardConstraintDiscoverer : TypeConstraintDiscoverer() { override fun constraintDiscovererInt(value: IonInt) = INT_VALID_VALUES_DISCOVERER(value) override fun constraintDiscovererDecimal(value: IonDecimal) = DECIMAL_SCALE_AND_PRECISION_DISCOVERER(value) override fun constraintDiscovererString(value: IonString) = STRING_CODEPOINT_LENGTH_DISCOVERER(value) @@ -191,7 +191,7 @@ internal val INT_VALID_VALUES_DISCOVERER = { value: IonInt -> in INT2_RANGE -> constraintList(INT2_RANGE_CONSTRAINT) in INT4_RANGE -> constraintList(INT4_RANGE_CONSTRAINT) in INT8_RANGE -> constraintList(INT8_RANGE_CONSTRAINT) - else -> constraintList() // unconstrained int has no constraint added + else -> constraintList() // unconstrained int has no constraint added } } } diff --git a/lang/src/org/partiql/lang/schemadiscovery/ConstraintInferer.kt b/lang/src/org/partiql/lang/schemadiscovery/ConstraintInferer.kt index 3a9274e00f..c15ec12514 100644 --- a/lang/src/org/partiql/lang/schemadiscovery/ConstraintInferer.kt +++ b/lang/src/org/partiql/lang/schemadiscovery/ConstraintInferer.kt @@ -51,7 +51,7 @@ internal class TypeAndConstraintInferer( val constraintUnifier: ConstraintUnifier, val constraintDiscoverer: ConstraintDiscoverer = StandardConstraintDiscoverer(), private val importedTypes: List = emptyList() -): ConstraintInferer { +) : ConstraintInferer { private val nullNamedType = IonSchemaModel.build { namedType("\$null", nullable = ionBool(true)) } private val nullNamedTypeConstraintList = IonSchemaModel.build { constraintList(typeConstraint(nullNamedType)) } private val notNullable = ionBool(false) @@ -174,8 +174,9 @@ internal class TypeAndConstraintInferer( } val structConstraints = mutableListOf( - IonSchemaModel.build { typeConstraint(namedType(TypeConstraint.STRUCT.typeName, notNullable)) }, - IonSchemaModel.build { closedContent() }) + IonSchemaModel.build { typeConstraint(namedType(TypeConstraint.STRUCT.typeName, notNullable)) }, + IonSchemaModel.build { closedContent() } + ) if (fields.isNotEmpty()) { structConstraints.add( diff --git a/lang/src/org/partiql/lang/schemadiscovery/ConstraintUnifier.kt b/lang/src/org/partiql/lang/schemadiscovery/ConstraintUnifier.kt index 6b1a06e513..8e40f0b0c5 100644 --- a/lang/src/org/partiql/lang/schemadiscovery/ConstraintUnifier.kt +++ b/lang/src/org/partiql/lang/schemadiscovery/ConstraintUnifier.kt @@ -47,21 +47,23 @@ internal enum class ConflictStrategy { */ internal enum class StructBehavior { INTERSECTION { - override fun unifyStructs(unifier: ConstraintUnifier, - structA: IonSchemaModel.ConstraintList, - structB: IonSchemaModel.ConstraintList): IonSchemaModel.ConstraintList { + override fun unifyStructs( + unifier: ConstraintUnifier, + structA: IonSchemaModel.ConstraintList, + structB: IonSchemaModel.ConstraintList + ): IonSchemaModel.ConstraintList { TODO("Not yet implemented") } }, UNION { - override fun unifyStructs(unifier: ConstraintUnifier, - structA: IonSchemaModel.ConstraintList, - structB: IonSchemaModel.ConstraintList): IonSchemaModel.ConstraintList { + override fun unifyStructs( + unifier: ConstraintUnifier, + structA: IonSchemaModel.ConstraintList, + structB: IonSchemaModel.ConstraintList + ): IonSchemaModel.ConstraintList { if (structA.isEmptyStruct()) { return structB.addClosedContentConstraint() - } - - else if (structB.isEmptyStruct()) { + } else if (structB.isEmptyStruct()) { return structA.addClosedContentConstraint() } @@ -80,9 +82,7 @@ internal enum class StructBehavior { unifiedFields.add(unifiedField) } // Otherwise, has same name and value type - } - - else { + } else { // `structA` doesn't have `structB`'s field unifiedFields.add(bField) } @@ -93,16 +93,20 @@ internal enum class StructBehavior { } }, INTERSECTION_AS_REQUIRED { - override fun unifyStructs(unifier: ConstraintUnifier, - structA: IonSchemaModel.ConstraintList, - structB: IonSchemaModel.ConstraintList): IonSchemaModel.ConstraintList { + override fun unifyStructs( + unifier: ConstraintUnifier, + structA: IonSchemaModel.ConstraintList, + structB: IonSchemaModel.ConstraintList + ): IonSchemaModel.ConstraintList { TODO("Not yet implemented") } }; - abstract fun unifyStructs(unifier: ConstraintUnifier, - structA: IonSchemaModel.ConstraintList, - structB: IonSchemaModel.ConstraintList): IonSchemaModel.ConstraintList + abstract fun unifyStructs( + unifier: ConstraintUnifier, + structA: IonSchemaModel.ConstraintList, + structB: IonSchemaModel.ConstraintList + ): IonSchemaModel.ConstraintList } /** @@ -157,7 +161,7 @@ private class ConstraintUnifierImpl( val conflictStrategy: ConflictStrategy, val structBehavior: StructBehavior, val discoveredConstraintUnifier: DiscoveredConstraintUnifier -): ConstraintUnifier { +) : ConstraintUnifier { /** * Unifies [aConstraints] with [bConstraints]. */ @@ -190,8 +194,7 @@ private class ConstraintUnifierImpl( return if (aTypeName == bTypeName) { IonSchemaModel.build { unifyNonUnionTypes(aConstraints, bConstraints) } - } - else { + } else { // typenames of `aConstraints` and `bConstraints` are different so create union IonSchemaModel.build { constraintList(anyOf(aConstraints.toTypeReference(), bConstraints.toTypeReference())) @@ -216,9 +219,7 @@ private class ConstraintUnifierImpl( // items.size == 1 -> no element type so is empty sequence if (sequenceA.items.size == 1) { return sequenceB - } - - else if (sequenceB.items.size == 1) { + } else if (sequenceB.items.size == 1) { return sequenceA } @@ -236,8 +237,10 @@ private class ConstraintUnifierImpl( } return IonSchemaModel.build { - constraintList(typeConstraint(namedType(aTypeName, notNullable)), - element(inlineType(typeDefinition(name = null, constraints = elementTypeConstraints), notNullable))) + constraintList( + typeConstraint(namedType(aTypeName, notNullable)), + element(inlineType(typeDefinition(name = null, constraints = elementTypeConstraints), notNullable)) + ) } } diff --git a/lang/src/org/partiql/lang/schemadiscovery/DiscoveredConstraintUnifier.kt b/lang/src/org/partiql/lang/schemadiscovery/DiscoveredConstraintUnifier.kt index 55a2075b05..28a835e493 100644 --- a/lang/src/org/partiql/lang/schemadiscovery/DiscoveredConstraintUnifier.kt +++ b/lang/src/org/partiql/lang/schemadiscovery/DiscoveredConstraintUnifier.kt @@ -9,8 +9,8 @@ import org.partiql.ionschema.model.IonSchemaModel * - [IonSchemaModel.Constraint.Fields] for structs * - [IonSchemaModel.Constraint.ClosedContent] for structs * - [IonSchemaModel.Constraint.Element] for sequences). - * - * This is intended to be called by a [ConstraintUnifier] when unifying + * + * This is intended to be called by a [ConstraintUnifier] when unifying * - discovered constraints only ([MultipleTypedDCU]) * - discovered with definite constraints ([AppendAdditionalConstraints]) */ @@ -20,14 +20,14 @@ internal fun interface DiscoveredConstraintUnifier { /** * Represents a [DiscoveredConstraintUnifier] where each [IonSchemaModel.ConstraintList] to unify has a - * [IonSchemaModel.Constraint.TypeConstraint] with [typeName]. This is intended to be used when creating + * [IonSchemaModel.Constraint.TypeConstraint] with [typeName]. This is intended to be used when creating * [MultipleTypedDCU]. */ internal data class SingleTypedDCU(val typeName: String, val unifyFunc: DiscoveredConstraintUnifier) /** - * For two conflicting constraint lists, `a` and `b`, unifies discovered constraints based on [constraintUnifiers]. - * If `a`/`b`'s type name matches one of the [constraintUnifiers]' [SingleTypedDCU.typeName]s, then `a` and `b` are + * For two conflicting constraint lists, `a` and `b`, unifies discovered constraints based on [constraintUnifiers]. + * If `a`/`b`'s type name matches one of the [constraintUnifiers]' [SingleTypedDCU.typeName]s, then `a` and `b` are * unified with that corresponding unifier. Otherwise, an empty constraint list is returned. * * @exception IllegalArgumentException if any of [constraintUnifiers] have the same @@ -35,7 +35,7 @@ internal data class SingleTypedDCU(val typeName: String, val unifyFunc: Discover */ internal class MultipleTypedDCU( private val constraintUnifiers: List = standardTypedDiscoveredConstraintUnifiers -): DiscoveredConstraintUnifier { +) : DiscoveredConstraintUnifier { private val discoveredConstraintUnifierMapping = initializeMapping() private fun initializeMapping(): Map { @@ -62,12 +62,12 @@ internal class MultipleTypedDCU( * For two conflicting constraint lists, `a` and `b`, appends `b`'s constraints not found in `a`. Any constraints that * are found in `a` and `b` will return `a`'s constraint. */ -internal class AppendAdditionalConstraints: DiscoveredConstraintUnifier { +internal class AppendAdditionalConstraints : DiscoveredConstraintUnifier { private fun IonSchemaModel.Constraint.isDiscoveredConstraint(): Boolean { - return this !is IonSchemaModel.Constraint.TypeConstraint - && this !is IonSchemaModel.Constraint.ClosedContent - && this !is IonSchemaModel.Constraint.Fields - && this !is IonSchemaModel.Constraint.Element + return this !is IonSchemaModel.Constraint.TypeConstraint && + this !is IonSchemaModel.Constraint.ClosedContent && + this !is IonSchemaModel.Constraint.Fields && + this !is IonSchemaModel.Constraint.Element } override fun invoke(a: IonSchemaModel.ConstraintList, b: IonSchemaModel.ConstraintList): IonSchemaModel.ConstraintList { @@ -154,4 +154,5 @@ internal val standardTypedDiscoveredConstraintUnifiers = listOf( INT_VALID_VALUES_UNIFIER, DECIMAL_SCALE_AND_PRECISION_UNIFIER, - STRING_CODEPOINT_LENGTH_UNIFIER) + STRING_CODEPOINT_LENGTH_UNIFIER + ) diff --git a/lang/src/org/partiql/lang/schemadiscovery/IonExampleParser.kt b/lang/src/org/partiql/lang/schemadiscovery/IonExampleParser.kt index 50630bd29d..d78881e00e 100644 --- a/lang/src/org/partiql/lang/schemadiscovery/IonExampleParser.kt +++ b/lang/src/org/partiql/lang/schemadiscovery/IonExampleParser.kt @@ -15,6 +15,4 @@ class IonExampleParser(val ion: IonSystem) { reader.next() ?: return null return ion.newValue(reader) } - - -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/schemadiscovery/NormalizeNullableVisitorTransform.kt b/lang/src/org/partiql/lang/schemadiscovery/NormalizeNullableVisitorTransform.kt index 6cef8b3bc8..8df7ae0cd9 100644 --- a/lang/src/org/partiql/lang/schemadiscovery/NormalizeNullableVisitorTransform.kt +++ b/lang/src/org/partiql/lang/schemadiscovery/NormalizeNullableVisitorTransform.kt @@ -61,7 +61,7 @@ class NormalizeNullableVisitorTransform : IonSchemaModel.VisitorTransform() { val anyOfTypes = node.getAnyOfConstraint().types if (anyOfTypes.any { it.isNullable() }) { val newAnyOf = anyOfTypes.filter { it != nullNamedType } - .map { transformTypeReference(it.toNullable()) } + .map { transformTypeReference(it.toNullable()) } return if (newAnyOf.size == 1) { newAnyOf.first().toConstraintList().items diff --git a/lang/src/org/partiql/lang/schemadiscovery/SchemaInferencerFromExampleImpl.kt b/lang/src/org/partiql/lang/schemadiscovery/SchemaInferencerFromExampleImpl.kt index ab0ba5ee1e..74e2b418a8 100644 --- a/lang/src/org/partiql/lang/schemadiscovery/SchemaInferencerFromExampleImpl.kt +++ b/lang/src/org/partiql/lang/schemadiscovery/SchemaInferencerFromExampleImpl.kt @@ -15,7 +15,7 @@ import org.partiql.lang.util.stringValueOrNull * generated schema. The passed [schemaIds] will also be used for the generated * [IonSchemaModel.SchemaStatement.HeaderStatement]'s [IonSchemaModel.ImportList]. */ -class SchemaInferencerFromExampleImpl(val typeName: String, iss: IonSchemaSystem, val schemaIds: List): SchemaInferencerFromExample { +class SchemaInferencerFromExampleImpl(val typeName: String, iss: IonSchemaSystem, val schemaIds: List) : SchemaInferencerFromExample { private val importedTypes = schemaIds.loadImportedTypes(iss) private val sequenceTypes = importedTypes.loadSequenceTypes() private val islAnyConstraints = IonSchemaModel.build { constraintList() } @@ -23,7 +23,8 @@ class SchemaInferencerFromExampleImpl(val typeName: String, iss: IonSchemaSystem schema( headerStatement(openFieldList(), importList(schemaIds.map { import(it) })), typeStatement(typeDefinition(typeName, islAnyConstraints)), - footerStatement(openFieldList())) + footerStatement(openFieldList()) + ) } override fun inferFromExamples(reader: IonReader, maxExampleCount: Int, definiteISL: IonSchemaModel.Schema?): IonSchemaModel.Schema { @@ -51,7 +52,8 @@ class SchemaInferencerFromExampleImpl(val typeName: String, iss: IonSchemaSystem val dataguideInferer = TypeAndConstraintInferer( constraintUnifier = dataguideConstraintUnifier, constraintDiscoverer = StandardConstraintDiscoverer(), - importedTypes = importedTypes) + importedTypes = importedTypes + ) val discoveredWithDefiniteUnifier = ConstraintUnifier.builder() .sequenceTypes(sequenceTypes) @@ -69,9 +71,12 @@ class SchemaInferencerFromExampleImpl(val typeName: String, iss: IonSchemaSystem val definiteSchemaTypeStatement = definiteISL.getFirstTypeStatement() val definiteISLTopTypeName = definiteSchemaTypeStatement.typeDef.name?.text if (typeName != definiteISLTopTypeName) { - error("""Top level type name differs. + error( + """Top level type name differs. Expected: $typeName - Actual: $definiteISLTopTypeName""".trimIndent()) + Actual: $definiteISLTopTypeName + """.trimIndent() + ) } discoveredWithDefiniteUnifier.unify(discoveredConstraints, definiteSchemaTypeStatement.typeDef.constraints) } diff --git a/lang/src/org/partiql/lang/syntax/Exceptions.kt b/lang/src/org/partiql/lang/syntax/Exceptions.kt index 4250497e16..ba06ba7d31 100644 --- a/lang/src/org/partiql/lang/syntax/Exceptions.kt +++ b/lang/src/org/partiql/lang/syntax/Exceptions.kt @@ -18,25 +18,29 @@ import org.partiql.lang.SqlException import org.partiql.lang.errors.ErrorCode import org.partiql.lang.errors.PropertyValueMap - - /** Root exception type for syntactic problems. */ -open class SyntaxException(message: String = "", - errorCode: ErrorCode, - errorContext: PropertyValueMap, - cause: Throwable? = null) - : SqlException(message, errorCode, errorContext, cause) +open class SyntaxException( + message: String = "", + errorCode: ErrorCode, + errorContext: PropertyValueMap, + cause: Throwable? = null +) : + SqlException(message, errorCode, errorContext, cause) /** Error in the Lexer. */ -open class LexerException(message: String = "", - errorCode: ErrorCode, - errorContext: PropertyValueMap, - cause: Throwable? = null) - : SyntaxException(message, errorCode, errorContext, cause) +open class LexerException( + message: String = "", + errorCode: ErrorCode, + errorContext: PropertyValueMap, + cause: Throwable? = null +) : + SyntaxException(message, errorCode, errorContext, cause) /** Error in the parser. */ -open class ParserException(message: String = "", - errorCode: ErrorCode, - errorContext: PropertyValueMap, - cause: Throwable? = null) - : SyntaxException(message, errorCode, errorContext, cause) \ No newline at end of file +open class ParserException( + message: String = "", + errorCode: ErrorCode, + errorContext: PropertyValueMap, + cause: Throwable? = null +) : + SyntaxException(message, errorCode, errorContext, cause) diff --git a/lang/src/org/partiql/lang/syntax/LexerConstants.kt b/lang/src/org/partiql/lang/syntax/LexerConstants.kt index 12b588b870..45cdf0d85c 100644 --- a/lang/src/org/partiql/lang/syntax/LexerConstants.kt +++ b/lang/src/org/partiql/lang/syntax/LexerConstants.kt @@ -309,16 +309,16 @@ internal val DATE_TIME_PART_KEYWORDS: Set = DateTimePart.values() /** Keywords that are aliases for type keywords. */ @JvmField internal val TYPE_ALIASES = mapOf( - "varchar" to "character_varying", - "char" to "character", - "dec" to "decimal", - "int" to "integer", - "int2" to "smallint", - "integer2" to "smallint", - "int4" to "integer4", - "int8" to "integer8", - "bigint" to "integer8", - "bool" to "boolean" + "varchar" to "character_varying", + "char" to "character", + "dec" to "decimal", + "int" to "integer", + "int2" to "smallint", + "integer2" to "smallint", + "int4" to "integer4", + "int8" to "integer8", + "bigint" to "integer8", + "bool" to "boolean" ) /** @@ -327,40 +327,40 @@ internal val DATE_TIME_PART_KEYWORDS: Set = DateTimePart.values() * don't require that. */ @JvmField internal val CORE_TYPE_NAME_ARITY_MAP = mapOf( - "missing" to 0..0, // PartiQL - "null" to 0..0, // Ion - "boolean" to 0..0, // Ion & SQL-99 - "smallint" to 0..0, // SQL-92 - "integer4" to 0..0, // PartiQL - "integer8" to 0..0, // PartiQL - "integer" to 0..0, // Ion & SQL-92 - "float" to 0..1, // Ion & SQL-92 - "real" to 0..0, // SQL-92 - "double_precision" to 0..0, // SQL-92 - "decimal" to 0..2, // Ion & SQL-92 - "numeric" to 0..2, // SQL-92 - "timestamp" to 0..0, // Ion & SQL-92 - "date" to 0..0, // PartiQL & SQL-92 - "time" to 0..1, // PartiQL & SQL-92 - "character" to 0..1, // SQL-92 + "missing" to 0..0, // PartiQL + "null" to 0..0, // Ion + "boolean" to 0..0, // Ion & SQL-99 + "smallint" to 0..0, // SQL-92 + "integer4" to 0..0, // PartiQL + "integer8" to 0..0, // PartiQL + "integer" to 0..0, // Ion & SQL-92 + "float" to 0..1, // Ion & SQL-92 + "real" to 0..0, // SQL-92 + "double_precision" to 0..0, // SQL-92 + "decimal" to 0..2, // Ion & SQL-92 + "numeric" to 0..2, // SQL-92 + "timestamp" to 0..0, // Ion & SQL-92 + "date" to 0..0, // PartiQL & SQL-92 + "time" to 0..1, // PartiQL & SQL-92 + "character" to 0..1, // SQL-92 "character_varying" to 0..1, // SQL-92 - "string" to 0..0, // Ion - "symbol" to 0..0, // Ion - "clob" to 0..0, // Ion - "blob" to 0..0, // Ion - "struct" to 0..0, // Ion - "tuple" to 0..0, // PartiQL - "list" to 0..0, // Ion - "sexp" to 0..0, // Ion - "bag" to 0..0 // PartiQL - // TODO SQL-92 types BIT, BIT VARYING, DATE, TIME, INTERVAL and TIMEZONE qualifier + "string" to 0..0, // Ion + "symbol" to 0..0, // Ion + "clob" to 0..0, // Ion + "blob" to 0..0, // Ion + "struct" to 0..0, // Ion + "tuple" to 0..0, // PartiQL + "list" to 0..0, // Ion + "sexp" to 0..0, // Ion + "bag" to 0..0 // PartiQL + // TODO SQL-92 types BIT, BIT VARYING, DATE, TIME, INTERVAL and TIMEZONE qualifier ) /** Indicates the keywords that indicate special union types. */ @JvmField internal val UNION_TYPE_NAME_ARITY_MAP = mapOf( - "any" to 0..0, + "any" to 0..0, /* ElasticSearch Data Types */ - "es_any" to 0..0 + "es_any" to 0..0 ) /** All type names and their arity. */ @@ -405,7 +405,7 @@ internal val DATE_TIME_PART_KEYWORDS: Set = DateTimePart.values() "sum" ) -@JvmField internal val BASE_DML_KEYWORDS = setOf("insert_into", "set", "remove") +@JvmField internal val BASE_DML_KEYWORDS = setOf("insert_into", "set", "remove") /** * These reserved keywords cannot be used as identifiers for items in `select list`. @@ -423,7 +423,7 @@ internal val DATE_TIME_PART_KEYWORDS: Set = DateTimePart.values() /** Operators that parse as infix, but have special parsing rules. */ @JvmField internal val SPECIAL_INFIX_OPERATORS = setOf( "between", "not_between", - "like", "not_like" // optionally a ternary operator when `ESCAPE` is present + "like", "not_like" // optionally a ternary operator when `ESCAPE` is present ) /** Binary operators with verbatim lexical token equivalents. */ @@ -438,42 +438,48 @@ internal val DATE_TIME_PART_KEYWORDS: Set = DateTimePart.values() /** Tokens comprising multiple lexemes (**happens before** keyword aliasing). */ @JvmField internal val MULTI_LEXEME_TOKEN_MAP = mapOf( - listOf("not", "in") to ("not_in" to OPERATOR), - listOf("is", "not") to ("is_not" to OPERATOR), - listOf("not", "between") to ("not_between" to OPERATOR), - listOf("intersect", "all") to ("intersect_all" to OPERATOR), - listOf("except", "all") to ("except_all" to OPERATOR), - listOf("union", "all") to ("union_all" to OPERATOR), - listOf("character", "varying") to ("character_varying" to KEYWORD), - listOf("double", "precision") to ("double_precision" to KEYWORD), - listOf("not", "like") to ("not_like" to OPERATOR), - listOf("cross", "join") to ("cross_join" to KEYWORD), - listOf("inner", "join") to ("inner_join" to KEYWORD), - listOf("inner", "cross", "join") to ("cross_join" to KEYWORD), - listOf("left", "join") to ("left_join" to KEYWORD), - listOf("left", "outer", "join") to ("left_join" to KEYWORD), - listOf("left", "cross", "join") to ("left_cross_join" to KEYWORD), - listOf("left", "outer", - "cross", "join") to ("left_cross_join" to KEYWORD), - listOf("right", "join") to ("right_join" to KEYWORD), - listOf("right", "outer", "join") to ("right_join" to KEYWORD), - listOf("right", "cross", "join") to ("right_cross_join" to KEYWORD), - listOf("right", "outer", - "cross", "join") to ("right_cross_join" to KEYWORD), - listOf("full", "join") to ("outer_join" to KEYWORD), - listOf("outer", "join") to ("outer_join" to KEYWORD), - listOf("full", "outer", "join") to ("outer_join" to KEYWORD), - listOf("full", "cross", "join") to ("outer_cross_join" to KEYWORD), - listOf("outer", "cross", "join") to ("outer_cross_join" to KEYWORD), - listOf("full", "outer", - "cross", "join") to ("outer_cross_join" to KEYWORD), - listOf("insert", "into") to ("insert_into" to KEYWORD), - listOf("on", "conflict") to ("on_conflict" to KEYWORD), - listOf("do", "nothing") to ("do_nothing" to KEYWORD), - listOf("modified", "old") to ("modified_old" to KEYWORD), - listOf("modified", "new") to ("modified_new" to KEYWORD), - listOf("all", "old") to ("all_old" to KEYWORD), - listOf("all", "new") to ("all_new" to KEYWORD) + listOf("not", "in") to ("not_in" to OPERATOR), + listOf("is", "not") to ("is_not" to OPERATOR), + listOf("not", "between") to ("not_between" to OPERATOR), + listOf("intersect", "all") to ("intersect_all" to OPERATOR), + listOf("except", "all") to ("except_all" to OPERATOR), + listOf("union", "all") to ("union_all" to OPERATOR), + listOf("character", "varying") to ("character_varying" to KEYWORD), + listOf("double", "precision") to ("double_precision" to KEYWORD), + listOf("not", "like") to ("not_like" to OPERATOR), + listOf("cross", "join") to ("cross_join" to KEYWORD), + listOf("inner", "join") to ("inner_join" to KEYWORD), + listOf("inner", "cross", "join") to ("cross_join" to KEYWORD), + listOf("left", "join") to ("left_join" to KEYWORD), + listOf("left", "outer", "join") to ("left_join" to KEYWORD), + listOf("left", "cross", "join") to ("left_cross_join" to KEYWORD), + listOf( + "left", "outer", + "cross", "join" + ) to ("left_cross_join" to KEYWORD), + listOf("right", "join") to ("right_join" to KEYWORD), + listOf("right", "outer", "join") to ("right_join" to KEYWORD), + listOf("right", "cross", "join") to ("right_cross_join" to KEYWORD), + listOf( + "right", "outer", + "cross", "join" + ) to ("right_cross_join" to KEYWORD), + listOf("full", "join") to ("outer_join" to KEYWORD), + listOf("outer", "join") to ("outer_join" to KEYWORD), + listOf("full", "outer", "join") to ("outer_join" to KEYWORD), + listOf("full", "cross", "join") to ("outer_cross_join" to KEYWORD), + listOf("outer", "cross", "join") to ("outer_cross_join" to KEYWORD), + listOf( + "full", "outer", + "cross", "join" + ) to ("outer_cross_join" to KEYWORD), + listOf("insert", "into") to ("insert_into" to KEYWORD), + listOf("on", "conflict") to ("on_conflict" to KEYWORD), + listOf("do", "nothing") to ("do_nothing" to KEYWORD), + listOf("modified", "old") to ("modified_old" to KEYWORD), + listOf("modified", "new") to ("modified_new" to KEYWORD), + listOf("all", "old") to ("all_old" to KEYWORD), + listOf("all", "new") to ("all_new" to KEYWORD) ) @JvmField internal val MULTI_LEXEME_MIN_LENGTH = MULTI_LEXEME_TOKEN_MAP.keys.map { it.size }.min()!! @@ -524,45 +530,45 @@ enum class OperatorPrecedenceGroups(val precedence: Int) { */ @JvmField internal val OPERATOR_PRECEDENCE = mapOf( // set operator group - "intersect" to OperatorPrecedenceGroups.SET.precedence, + "intersect" to OperatorPrecedenceGroups.SET.precedence, "intersect_all" to OperatorPrecedenceGroups.SET.precedence, - "except" to OperatorPrecedenceGroups.SET.precedence, - "except_all" to OperatorPrecedenceGroups.SET.precedence, - "union" to OperatorPrecedenceGroups.SET.precedence, - "union_all" to OperatorPrecedenceGroups.SET.precedence, + "except" to OperatorPrecedenceGroups.SET.precedence, + "except_all" to OperatorPrecedenceGroups.SET.precedence, + "union" to OperatorPrecedenceGroups.SET.precedence, + "union_all" to OperatorPrecedenceGroups.SET.precedence, // logical group - "or" to OperatorPrecedenceGroups.LOGICAL_OR.precedence, - "and" to OperatorPrecedenceGroups.LOGICAL_AND.precedence, - "not" to OperatorPrecedenceGroups.LOGICAL_NOT.precedence, + "or" to OperatorPrecedenceGroups.LOGICAL_OR.precedence, + "and" to OperatorPrecedenceGroups.LOGICAL_AND.precedence, + "not" to OperatorPrecedenceGroups.LOGICAL_NOT.precedence, // equality group (TODO add other morphemes of equality/non-equality) - "=" to OperatorPrecedenceGroups.EQUITY.precedence, - "<>" to OperatorPrecedenceGroups.EQUITY.precedence, - "is" to OperatorPrecedenceGroups.EQUITY.precedence, - "is_not" to OperatorPrecedenceGroups.EQUITY.precedence, - "in" to OperatorPrecedenceGroups.EQUITY.precedence, - "not_in" to OperatorPrecedenceGroups.EQUITY.precedence, + "=" to OperatorPrecedenceGroups.EQUITY.precedence, + "<>" to OperatorPrecedenceGroups.EQUITY.precedence, + "is" to OperatorPrecedenceGroups.EQUITY.precedence, + "is_not" to OperatorPrecedenceGroups.EQUITY.precedence, + "in" to OperatorPrecedenceGroups.EQUITY.precedence, + "not_in" to OperatorPrecedenceGroups.EQUITY.precedence, // comparison group - "<" to OperatorPrecedenceGroups.COMPARISON.precedence, - "<=" to OperatorPrecedenceGroups.COMPARISON.precedence, - ">" to OperatorPrecedenceGroups.COMPARISON.precedence, - ">=" to OperatorPrecedenceGroups.COMPARISON.precedence, - "between" to OperatorPrecedenceGroups.COMPARISON.precedence, // note that this **must** be above 'AND' - "not_between" to OperatorPrecedenceGroups.COMPARISON.precedence, // note that this **must** be above 'AND' - "like" to OperatorPrecedenceGroups.COMPARISON.precedence, - "not_like" to OperatorPrecedenceGroups.COMPARISON.precedence, + "<" to OperatorPrecedenceGroups.COMPARISON.precedence, + "<=" to OperatorPrecedenceGroups.COMPARISON.precedence, + ">" to OperatorPrecedenceGroups.COMPARISON.precedence, + ">=" to OperatorPrecedenceGroups.COMPARISON.precedence, + "between" to OperatorPrecedenceGroups.COMPARISON.precedence, // note that this **must** be above 'AND' + "not_between" to OperatorPrecedenceGroups.COMPARISON.precedence, // note that this **must** be above 'AND' + "like" to OperatorPrecedenceGroups.COMPARISON.precedence, + "not_like" to OperatorPrecedenceGroups.COMPARISON.precedence, // the addition group - "+" to OperatorPrecedenceGroups.ADDITION.precedence, - "-" to OperatorPrecedenceGroups.ADDITION.precedence, - "||" to OperatorPrecedenceGroups.ADDITION.precedence, + "+" to OperatorPrecedenceGroups.ADDITION.precedence, + "-" to OperatorPrecedenceGroups.ADDITION.precedence, + "||" to OperatorPrecedenceGroups.ADDITION.precedence, // multiply group (TODO add exponentiation) - "*" to OperatorPrecedenceGroups.MULTIPLY.precedence, - "/" to OperatorPrecedenceGroups.MULTIPLY.precedence, - "%" to OperatorPrecedenceGroups.MULTIPLY.precedence + "*" to OperatorPrecedenceGroups.MULTIPLY.precedence, + "/" to OperatorPrecedenceGroups.MULTIPLY.precedence, + "%" to OperatorPrecedenceGroups.MULTIPLY.precedence ) // @@ -572,24 +578,24 @@ enum class OperatorPrecedenceGroups(val precedence: Int) { private fun allCase(chars: String) = chars.toLowerCase() + chars.toUpperCase() -const internal val SIGN_CHARS = "+-" +internal const val SIGN_CHARS = "+-" -const internal val NON_ZERO_DIGIT_CHARS = "123456789" -const internal val DIGIT_CHARS = "0" + NON_ZERO_DIGIT_CHARS +internal const val NON_ZERO_DIGIT_CHARS = "123456789" +internal const val DIGIT_CHARS = "0" + NON_ZERO_DIGIT_CHARS @JvmField internal val E_NOTATION_CHARS = allCase("E") -const internal val NON_OVERLOADED_OPERATOR_CHARS = "^%=@+" -const internal val OPERATOR_CHARS = NON_OVERLOADED_OPERATOR_CHARS + "-*/<>|!" +internal const val NON_OVERLOADED_OPERATOR_CHARS = "^%=@+" +internal const val OPERATOR_CHARS = NON_OVERLOADED_OPERATOR_CHARS + "-*/<>|!" @JvmField internal val ALPHA_CHARS = allCase("ABCDEFGHIJKLMNOPQRSTUVWXYZ") @JvmField internal val IDENT_START_CHARS = "_\$" + ALPHA_CHARS @JvmField internal val IDENT_CONTINUE_CHARS = IDENT_START_CHARS + DIGIT_CHARS -const internal val NL_WHITESPACE_CHARS = "\u000D\u000A" // CR, LF -const internal val NON_NL_WHITESPACE_CHARS = "\u0009\u000B\u000C\u0020" // TAB, VT, FF, SPACE -const internal val ALL_WHITESPACE_CHARS = NL_WHITESPACE_CHARS + NON_NL_WHITESPACE_CHARS +internal const val NL_WHITESPACE_CHARS = "\u000D\u000A" // CR, LF +internal const val NON_NL_WHITESPACE_CHARS = "\u0009\u000B\u000C\u0020" // TAB, VT, FF, SPACE +internal const val ALL_WHITESPACE_CHARS = NL_WHITESPACE_CHARS + NON_NL_WHITESPACE_CHARS -const internal val DOUBLE_QUOTE_CHARS = "\"" -const internal val SINGLE_QUOTE_CHARS = "'" -const internal val BACKTICK_CHARS = "`" +internal const val DOUBLE_QUOTE_CHARS = "\"" +internal const val SINGLE_QUOTE_CHARS = "'" +internal const val BACKTICK_CHARS = "`" diff --git a/lang/src/org/partiql/lang/syntax/SourcePosition.kt b/lang/src/org/partiql/lang/syntax/SourcePosition.kt index 8e476952c1..9a5f0b9740 100644 --- a/lang/src/org/partiql/lang/syntax/SourcePosition.kt +++ b/lang/src/org/partiql/lang/syntax/SourcePosition.kt @@ -18,4 +18,3 @@ package org.partiql.lang.syntax data class SourcePosition(val line: Long, val column: Long) { override fun toString(): String = "line $line, column $column" } - diff --git a/lang/src/org/partiql/lang/syntax/SqlLexer.kt b/lang/src/org/partiql/lang/syntax/SqlLexer.kt index 924ba86180..71881932f4 100644 --- a/lang/src/org/partiql/lang/syntax/SqlLexer.kt +++ b/lang/src/org/partiql/lang/syntax/SqlLexer.kt @@ -29,8 +29,10 @@ import java.math.BigInteger */ class SqlLexer(private val ion: IonSystem) : Lexer { /** Transition types. */ - internal enum class StateType(val beginsToken: Boolean = false, - val endsToken: Boolean = false) { + internal enum class StateType( + val beginsToken: Boolean = false, + val endsToken: Boolean = false + ) { /** Indicates the initial state for recognition. */ INITIAL(), /** Indicates an error state. */ @@ -85,12 +87,14 @@ class SqlLexer(private val ion: IonSystem) : Lexer { } /** State node and corresponding state table. */ - internal class TableState(override val stateType: StateType, - override val tokenType: TokenType? = null, - override val lexType: LexType = LexType.NONE, - override val replacement: Int = REPLACE_SAME, - var delegate: State = ERROR_STATE, - setup: TableState.() -> Unit = { }) : State { + internal class TableState( + override val stateType: StateType, + override val tokenType: TokenType? = null, + override val lexType: LexType = LexType.NONE, + override val replacement: Int = REPLACE_SAME, + var delegate: State = ERROR_STATE, + setup: TableState.() -> Unit = { } + ) : State { /** Default table with null states. */ val table = Array(TABLE_SIZE) { null } @@ -105,7 +109,8 @@ class SqlLexer(private val ion: IonSystem) : Lexer { this[cp] = when (old) { null -> new else -> throw IllegalStateException( - "Cannot replace existing state $old with $new") + "Cannot replace existing state $old with $new" + ) } } } @@ -117,9 +122,11 @@ class SqlLexer(private val ion: IonSystem) : Lexer { override fun get(next: Int): State = getFromTable(next) ?: delegate[next] - fun selfRepeatingDelegate(stateType: StateType, - tokenType: TokenType? = null, - lexType: LexType = LexType.NONE) { + fun selfRepeatingDelegate( + stateType: StateType, + tokenType: TokenType? = null, + lexType: LexType = LexType.NONE + ) { delegate = object : State { override val stateType = stateType override val tokenType = tokenType @@ -128,13 +135,15 @@ class SqlLexer(private val ion: IonSystem) : Lexer { } } - fun delta(chars: String, - stateType: StateType, - tokenType: TokenType? = null, - lexType: LexType = LexType.NONE, - replacement: Int = REPLACE_SAME, - delegate: State = this, - setup: TableState.(String) -> Unit = { }): TableState { + fun delta( + chars: String, + stateType: StateType, + tokenType: TokenType? = null, + lexType: LexType = LexType.NONE, + replacement: Int = REPLACE_SAME, + delegate: State = this, + setup: TableState.(String) -> Unit = { } + ): TableState { val child = TableState(stateType, tokenType, lexType, replacement, delegate) { setup(chars) } @@ -240,20 +249,20 @@ class SqlLexer(private val ion: IonSystem) : Lexer { delta(IDENT_CONTINUE_CHARS, StateType.TERMINAL, TokenType.IDENTIFIER) } - fun TableState.deltaDecimalInteger(stateType: StateType, lexType: LexType, setup: TableState.(String) -> Unit = { }): Unit { + fun TableState.deltaDecimalInteger(stateType: StateType, lexType: LexType, setup: TableState.(String) -> Unit = { }) { delta(DIGIT_CHARS, stateType, TokenType.LITERAL, lexType, delegate = initialState) { delta(DIGIT_CHARS, StateType.TERMINAL, TokenType.LITERAL, lexType) setup(it) } } - fun TableState.deltaDecimalFraction(setup: TableState.(String) -> Unit = { }): Unit { + fun TableState.deltaDecimalFraction(setup: TableState.(String) -> Unit = { }) { delta(".", StateType.TERMINAL, TokenType.LITERAL, LexType.DECIMAL) { deltaDecimalInteger(StateType.TERMINAL, LexType.DECIMAL, setup) } } - fun TableState.deltaExponent(setup: TableState.(String) -> Unit = { }): Unit { + fun TableState.deltaExponent(setup: TableState.(String) -> Unit = { }) { delta(E_NOTATION_CHARS, StateType.INCOMPLETE, delegate = ERROR_STATE) { delta(SIGN_CHARS, StateType.INCOMPLETE, delegate = ERROR_STATE) { deltaDecimalInteger(StateType.TERMINAL, LexType.DECIMAL, setup) @@ -288,7 +297,7 @@ class SqlLexer(private val ion: IonSystem) : Lexer { deltaNumber(StateType.START_AND_TERMINAL) - fun TableState.deltaQuote(quoteChar: String, tokenType: TokenType, lexType: LexType): Unit { + fun TableState.deltaQuote(quoteChar: String, tokenType: TokenType, lexType: LexType) { delta(quoteChar, StateType.START, replacement = REPLACE_NOTHING) { selfRepeatingDelegate(StateType.INCOMPLETE) val quoteState = this @@ -314,7 +323,7 @@ class SqlLexer(private val ion: IonSystem) : Lexer { delta(BACKTICK_CHARS, StateType.INCOMPLETE, delegate = ionCommentState) delta(NL_WHITESPACE_CHARS, StateType.INCOMPLETE, delegate = quoteState) } - delta("*", StateType.INCOMPLETE) { + delta("*", StateType.INCOMPLETE) { val ionCommentState = this selfRepeatingDelegate(StateType.INCOMPLETE) delta(BACKTICK_CHARS, StateType.INCOMPLETE, delegate = ionCommentState) @@ -407,16 +416,14 @@ class SqlLexer(private val ion: IonSystem) : Lexer { */ private fun makePropertyBag(tokenString: String, tracker: PositionTracker): PropertyValueMap { val pvmap = PropertyValueMap() - pvmap[Property.LINE_NUMBER] = tracker.line - pvmap[Property.COLUMN_NUMBER] = tracker.col - pvmap[Property.TOKEN_STRING] = tokenString + pvmap[Property.LINE_NUMBER] = tracker.line + pvmap[Property.COLUMN_NUMBER] = tracker.col + pvmap[Property.TOKEN_STRING] = tokenString return pvmap } - override fun tokenize(source: String): List { - val codePoints = source.codePointSequence() + EOF val tokens = ArrayList() @@ -427,9 +434,8 @@ class SqlLexer(private val ion: IonSystem) : Lexer { var curr: State = INITIAL_STATE val buffer = StringBuilder() - for (cp in codePoints) { - tokenCodePointCount++; + tokenCodePointCount++ fun errInvalidChar(): Nothing = throw LexerException(errorCode = ErrorCode.LEXER_INVALID_CHAR, errorContext = makePropertyBag(repr(cp), tracker)) @@ -441,15 +447,17 @@ class SqlLexer(private val ion: IonSystem) : Lexer { throw LexerException(errorCode = ErrorCode.LEXER_INVALID_LITERAL, errorContext = makePropertyBag(literal, tracker)) fun errInvalidIonLiteral(literal: String, cause: IonException): Nothing = - throw LexerException(errorCode = ErrorCode.LEXER_INVALID_ION_LITERAL, - errorContext = makePropertyBag(literal, tracker), - cause = cause) + throw LexerException( + errorCode = ErrorCode.LEXER_INVALID_ION_LITERAL, + errorContext = makePropertyBag(literal, tracker), + cause = cause + ) tracker.advance(cp) // retrieve the next state val next = when (cp) { - EOF -> EOF_STATE + EOF -> EOF_STATE else -> curr[cp] } @@ -537,24 +545,22 @@ class SqlLexer(private val ion: IonSystem) : Lexer { } } TokenType.LITERAL -> when (curr.lexType) { - LexType.SQ_STRING -> ion.newString(text) - LexType.INTEGER -> ion.newInt(BigInteger(text, 10)) - LexType.DECIMAL -> try { + LexType.SQ_STRING -> ion.newString(text) + LexType.INTEGER -> ion.newInt(BigInteger(text, 10)) + LexType.DECIMAL -> try { ion.newDecimal(bigDecimalOf(text)) - } - catch (e: NumberFormatException) { + } catch (e: NumberFormatException) { errInvalidLiteral(text) } - else -> errInvalidLiteral(text) + else -> errInvalidLiteral(text) } TokenType.ION_LITERAL -> { try { // anything wrapped by `` is considered as an ion literal, including invalid // ion so we need to handle the exception here for proper error reporting ion.singleValue(text) - } - catch (e: IonException) { + } catch (e: IonException) { errInvalidIonLiteral(text, e) } } @@ -568,7 +574,9 @@ class SqlLexer(private val ion: IonSystem) : Lexer { Token( type = tokenType, value = ionValue, - span = SourceSpan(currPos.line, currPos.column, tokenCodePointCount))) + span = SourceSpan(currPos.line, currPos.column, tokenCodePointCount) + ) + ) } // get ready for next token @@ -579,10 +587,12 @@ class SqlLexer(private val ion: IonSystem) : Lexer { } val replacement = next.replacement if (cp != EOF && replacement != REPLACE_NOTHING) { - buffer.appendCodePoint(when (replacement) { - REPLACE_SAME -> cp - else -> replacement - }) + buffer.appendCodePoint( + when (replacement) { + REPLACE_SAME -> cp + else -> replacement + } + ) } // if next state is the EOF marker add it to `tokens`. @@ -590,7 +600,9 @@ class SqlLexer(private val ion: IonSystem) : Lexer { Token( type = TokenType.EOF, value = ion.newSymbol("EOF"), - span = SourceSpan(currPos.line, currPos.column, 0))) + span = SourceSpan(currPos.line, currPos.column, 0) + ) + ) curr = next } diff --git a/lang/src/org/partiql/lang/syntax/SqlParser.kt b/lang/src/org/partiql/lang/syntax/SqlParser.kt index f72ce686bf..4db4e75dab 100644 --- a/lang/src/org/partiql/lang/syntax/SqlParser.kt +++ b/lang/src/org/partiql/lang/syntax/SqlParser.kt @@ -23,7 +23,6 @@ import com.amazon.ionelement.api.ionInt import com.amazon.ionelement.api.ionString import com.amazon.ionelement.api.metaContainerOf import com.amazon.ionelement.api.toIonElement -import org.partiql.lang.domains.metaContainerOf import org.partiql.lang.ast.AstSerializer import org.partiql.lang.ast.AstVersion import org.partiql.lang.ast.ExprNode @@ -37,6 +36,7 @@ import org.partiql.lang.ast.SourceLocationMeta import org.partiql.lang.ast.SqlDataType import org.partiql.lang.ast.toExprNode import org.partiql.lang.domains.PartiqlAst +import org.partiql.lang.domains.metaContainerOf import org.partiql.lang.errors.ErrorCode import org.partiql.lang.errors.Property import org.partiql.lang.errors.PropertyValueMap @@ -73,14 +73,14 @@ import java.time.format.DateTimeFormatter.ISO_TIME import java.time.format.DateTimeParseException import java.time.temporal.Temporal - /** * Parses a list of tokens as infix query expression into a prefix s-expression * as the abstract syntax tree. */ class SqlParser( private val ion: IonSystem, - customTypes: List = listOf()) : Parser { + customTypes: List = listOf() +) : Parser { private val CUSTOM_KEYWORDS = customTypes.map { it.name.toLowerCase() } @@ -117,7 +117,7 @@ class SqlParser( ATOM, CASE_SENSITIVE_ATOM, CASE_INSENSITIVE_ATOM, - PROJECT_ALL, // Wildcard, i.e. the * in `SELECT * FROM f` and a.b.c.* in `SELECT a.b.c.* FROM f` + PROJECT_ALL, // Wildcard, i.e. the * in `SELECT * FROM f` and a.b.c.* in `SELECT a.b.c.* FROM f` PATH_WILDCARD, PATH_UNPIVOT, LET, @@ -196,10 +196,12 @@ class SqlParser( val identifier = name.toLowerCase() } - internal data class ParseNode(val type: ParseType, - val token: Token?, - val children: List, - val remaining: List) { + internal data class ParseNode( + val type: ParseType, + val token: Token?, + val children: List, + val remaining: List + ) { /** Derives a [ParseNode] transforming the list of remaining tokens. */ private fun derive(tokensHandler: List.() -> List): ParseNode = @@ -261,9 +263,9 @@ class SqlParser( } } - //*************************************** + // *************************************** // toAstStatement - //*************************************** + // *************************************** private fun ParseNode.toAstStatement(): PartiqlAst.Statement { return when (type) { ParseType.ATOM, ParseType.LIST, ParseType.BAG, ParseType.STRUCT, ParseType.UNARY, ParseType.BINARY, @@ -289,7 +291,7 @@ class SqlParser( return PartiqlAst.build { when (type) { - ParseType.ATOM -> when (token?.type){ + ParseType.ATOM -> when (token?.type) { TokenType.LITERAL, TokenType.NULL, TokenType.TRIM_SPECIFICATION, TokenType.DATETIME_PART -> lit(token.value!!.toIonElement(), metas) TokenType.ION_LITERAL -> lit(token.value!!.toIonElement(), metas + metaContainerOf(IsIonLiteralMeta.instance)) TokenType.MISSING -> missing(metas) @@ -329,7 +331,7 @@ class SqlParser( } when (opName) { - "@" -> { + "@" -> { val childNode = children[0] val childToken = childNode.token ?: errMalformedParseTree("@ node does not have a token") @@ -357,17 +359,19 @@ class SqlParser( nullIf( children[0].toAstExpr(), children[1].toAstExpr(), - metas) + metas + ) } ParseType.COALESCE -> { coalesce( children.map { it.toAstExpr() }, - metas) + metas + ) } ParseType.TYPE_FUNCTION -> { val funcExpr = children[0].toAstExpr() val dataType = children[1].toAstType() - when(token?.keywordText) { + when (token?.keywordText) { "cast" -> cast(funcExpr, dataType, metas) "can_cast" -> canCast(funcExpr, dataType, metas) "can_lossless_cast" -> canLosslessCast(funcExpr, dataType, metas) @@ -377,9 +381,9 @@ class SqlParser( ParseType.CALL -> { when (val funcName = token?.text!!.toLowerCase()) { // special case--list/sexp/bag "functions" are intrinsic to the literal form - "sexp" -> sexp(children.map { it.toAstExpr()}, metas) - "list" -> list(children.map { it.toAstExpr()}, metas) - "bag" -> bag(children.map { it.toAstExpr()}, metas) + "sexp" -> sexp(children.map { it.toAstExpr() }, metas) + "list" -> list(children.map { it.toAstExpr() }, metas) + "bag" -> bag(children.map { it.toAstExpr() }, metas) else -> { // Note: we are forcing all function name lookups to be case-insensitive here... // This seems like the right thing to do because that is consistent with the @@ -397,7 +401,7 @@ class SqlParser( callAgg_(distinct(), funcName, children[0].toAstExpr(), metas) } ParseType.CALL_AGG_WILDCARD -> { - if(token!!.type != TokenType.KEYWORD || token.keywordText != "count") { + if (token!!.type != TokenType.KEYWORD || token.keywordText != "count") { errMalformedParseTree("only COUNT can be used with a wildcard") } // Should only get the [SourceLocationMeta] if present, not any other metas. @@ -410,9 +414,9 @@ class SqlParser( ParseType.PATH -> { val rootExpr = children[0].toAstExpr() val pathComponents = children.drop(1).map { - when(it.type) { + when (it.type) { ParseType.PATH_DOT -> { - if(it.children.size != 1) { + if (it.children.size != 1) { errMalformedParseTree("Unexpected number of child elements in PATH_DOT ParseNode") } val atomParseNode = it.children[0] @@ -420,7 +424,7 @@ class SqlParser( when (atomParseNode.type) { ParseType.CASE_SENSITIVE_ATOM, ParseType.CASE_INSENSITIVE_ATOM -> { val lit = lit(ionString(atomParseNode.token?.text!!), atomMetas) - val caseSensitivity = if (atomParseNode.type == ParseType.CASE_SENSITIVE_ATOM) caseSensitive() else caseInsensitive() + val caseSensitivity = if (atomParseNode.type == ParseType.CASE_SENSITIVE_ATOM) caseSensitive() else caseInsensitive() pathExpr(lit, caseSensitivity, atomMetas) } ParseType.PATH_UNPIVOT -> pathUnpivot(atomMetas) @@ -428,7 +432,7 @@ class SqlParser( } } ParseType.PATH_SQB -> { - if(it.children.size != 1) { + if (it.children.size != 1) { errMalformedParseTree("Unexpected number of child elements in PATH_SQB ParseNode") } val child = it.children[0] @@ -447,7 +451,7 @@ class SqlParser( var elseExpr: PartiqlAst.Expr? = null fun ParseNode.addCases() = children.forEach { - when(it.type) { + when (it.type) { ParseType.WHEN -> branches.add(exprPair(it.children[0].toAstExpr(), it.children[1].toAstExpr())) ParseType.ELSE -> elseExpr = it.children[0].toAstExpr() @@ -541,8 +545,10 @@ class SqlParser( it.children[0].toAstExpr() } - val groupBy = unconsumedChildren.firstOrNull { it.type == ParseType.GROUP || - it.type == ParseType.GROUP_PARTIAL }?.let { + val groupBy = unconsumedChildren.firstOrNull { + it.type == ParseType.GROUP || + it.type == ParseType.GROUP_PARTIAL + }?.let { unconsumedChildren.remove(it) val groupingStrategy = when (it.type) { ParseType.GROUP -> groupFull() @@ -615,8 +621,10 @@ class SqlParser( val precision = children[0].token!!.value!!.numberValue().toLong() val time = LocalTime.parse(timeString, ISO_TIME) litTime( - timeValue(time.hour.toLong(), time.minute.toLong(), time.second.toLong(), time.nano.toLong(), - precision, false, null), + timeValue( + time.hour.toLong(), time.minute.toLong(), time.second.toLong(), time.nano.toLong(), + precision, false, null + ), metas ) } @@ -626,16 +634,20 @@ class SqlParser( try { val time = OffsetTime.parse(timeString) litTime( - timeValue(time.hour.toLong(), time.minute.toLong(), time.second.toLong(), - time.nano.toLong(), precision, true, (time.offset.totalSeconds/60).toLong()), + timeValue( + time.hour.toLong(), time.minute.toLong(), time.second.toLong(), + time.nano.toLong(), precision, true, (time.offset.totalSeconds / 60).toLong() + ), metas ) } catch (e: DateTimeParseException) { // In case time zone not explicitly specified val time = LocalTime.parse(timeString) litTime( - timeValue(time.hour.toLong(), time.minute.toLong(), time.second.toLong(), - time.nano.toLong(), precision, true, null), + timeValue( + time.hour.toLong(), time.minute.toLong(), time.second.toLong(), + time.nano.toLong(), precision, true, null + ), metas ) } @@ -756,8 +768,8 @@ class SqlParser( } val typeName = token?.keywordText ?: token?.customKeywordText - val sqlDataType = SqlDataType.forTypeName(typeName!!) ?: - (token?.customType ?: errMalformedParseTree("Invalid DataType: $typeName")) + val sqlDataType = SqlDataType.forTypeName(typeName!!) + ?: (token?.customType ?: errMalformedParseTree("Invalid DataType: $typeName")) val metas = getMetas() val args = children.map { val argValue = it.token?.value @@ -767,7 +779,7 @@ class SqlParser( errMalformedParseTree("Data type argument was not an Ion INT for some reason") } - when(argValue.integerSize!!) { + when (argValue.integerSize!!) { IntegerSize.INT -> argValue.intValue() IntegerSize.LONG, IntegerSize.BIG_INTEGER -> it.token.err( @@ -921,15 +933,15 @@ class SqlParser( return when (type) { ParseType.AS_ALIAS -> { - if(variables.asName != null) error("Invalid parse tree: AS_ALIAS encountered more than once in FROM source") + if (variables.asName != null) error("Invalid parse tree: AS_ALIAS encountered more than once in FROM source") children[0].unwrapAliases(variables.copy(asName = SymbolPrimitive(token!!.text!!, metas))) } ParseType.AT_ALIAS -> { - if(variables.atName != null) error("Invalid parse tree: AT_ALIAS encountered more than once in FROM source") + if (variables.atName != null) error("Invalid parse tree: AT_ALIAS encountered more than once in FROM source") children[0].unwrapAliases(variables.copy(atName = SymbolPrimitive(token!!.text!!, metas))) } ParseType.BY_ALIAS -> { - if(variables.byName != null) error("Invalid parse tree: BY_ALIAS encountered more than once in FROM source") + if (variables.byName != null) error("Invalid parse tree: BY_ALIAS encountered more than once in FROM source") children[0].unwrapAliases(variables.copy(byName = SymbolPrimitive(token!!.text!!, metas))) } else -> Pair(variables, this) @@ -949,7 +961,7 @@ class SqlParser( } private fun ParseNode.toReturningMapping(): PartiqlAst.ReturningMapping { - if(type != ParseType.RETURNING_MAPPING) { + if (type != ParseType.RETURNING_MAPPING) { errMalformedParseTree("Expected ParseType.RETURNING_MAPPING instead of $type") } return PartiqlAst.build { @@ -1001,8 +1013,10 @@ class SqlParser( val unconsumedChildren = children.drop(2).toMutableList() // Handle AT clause - val position = unconsumedChildren.firstOrNull { it.type != ParseType.ON_CONFLICT && - it.type != ParseType.RETURNING }?.let { + val position = unconsumedChildren.firstOrNull { + it.type != ParseType.ON_CONFLICT && + it.type != ParseType.RETURNING + }?.let { unconsumedChildren.remove(it) it.toAstExpr() } @@ -1048,8 +1062,10 @@ class SqlParser( val unconsumedChildren = children.drop(2).toMutableList() // Handle AT clause - val position = unconsumedChildren.firstOrNull { it.type != ParseType.ON_CONFLICT && - it.type != ParseType.RETURNING }?.let { + val position = unconsumedChildren.firstOrNull { + it.type != ParseType.ON_CONFLICT && + it.type != ParseType.RETURNING + }?.let { unconsumedChildren.remove(it) it.toAstExpr() } @@ -1096,14 +1112,14 @@ class SqlParser( } private fun ParseNode.toIdentifier(): PartiqlAst.Identifier { - if (type != ParseType.ATOM){ + if (type != ParseType.ATOM) { errMalformedParseTree("Cannot transform ParseNode type: $type to identifier") } val metas = getMetas() return PartiqlAst.build { - when (token?.type){ + when (token?.type) { TokenType.QUOTED_IDENTIFIER -> identifier(token.text!!, caseSensitive(), metas) TokenType.IDENTIFIER -> identifier(token.text!!, caseInsensitive(), metas) else -> errMalformedParseTree("Cannot transform atom token type ${token?.type} to identifier") @@ -1194,7 +1210,6 @@ class SqlParser( val node: ParseNode ) - /********************************************************************************************** * Parse logic below this line. **********************************************************************************************/ @@ -1232,7 +1247,7 @@ class SqlParser( fun parseRightExpr() = if (rem.size < 3) { rem.err( "Missing right-hand side expression of infix operator", - ErrorCode.PARSE_EXPECTED_EXPRESSION + ErrorCode.PARSE_EXPECTED_EXPRESSION ) } else { rem.tail.parseExpression( @@ -1245,8 +1260,8 @@ class SqlParser( "is", "is_not" -> rem.tail.parseType(op.keywordText!!) // IN has context sensitive parsing rules around parenthesis "in", "not_in" -> when { - rem.tail.head?.type == TokenType.LEFT_PAREN - && rem.tail.tail.head?.keywordText !in IN_OP_NORMAL_EVAL_KEYWORDS -> + rem.tail.head?.type == TokenType.LEFT_PAREN && + rem.tail.tail.head?.keywordText !in IN_OP_NORMAL_EVAL_KEYWORDS -> rem.tail.tail.parseArgList( aliasSupportType = AliasSupportType.NONE, mode = ArgListMode.NORMAL_ARG_LIST @@ -1329,7 +1344,6 @@ class SqlParser( } } - private fun List.parsePathTerm(pathMode: PathMode = PathMode.FULL_PATH): ParseNode { val term = when (pathMode) { PathMode.FULL_PATH -> parseTerm() @@ -1363,7 +1377,7 @@ class SqlParser( } ParseNode(ParseType.PATH_UNPIVOT, rem.head, emptyList(), rem.tail) } - else -> { + else -> { rem.err("Invalid path dot component", ErrorCode.PARSE_INVALID_PATH_COMPONENT) } } @@ -1545,7 +1559,7 @@ class SqlParser( return ParseNode(ParseType.TYPE_FUNCTION, head, listOf(valueExpr, typeNode), rem) } - private fun List.parseNullIf(nullIfToken: Token) : ParseNode { + private fun List.parseNullIf(nullIfToken: Token): ParseNode { if (head?.type != TokenType.LEFT_PAREN) { err("Missing left parenthesis after nullif", ErrorCode.PARSE_EXPECTED_LEFT_PAREN_VALUE_CONSTRUCTOR) } @@ -1558,7 +1572,7 @@ class SqlParser( return ParseNode(ParseType.NULLIF, nullIfToken, listOf(expr1, expr2), rem) } - private fun List.parseCoalesce() : ParseNode { + private fun List.parseCoalesce(): ParseNode { if (head?.type != TokenType.LEFT_PAREN) { err("Missing left parenthesis after coalesce", ErrorCode.PARSE_EXPECTED_LEFT_PAREN_VALUE_CONSTRUCTOR) } @@ -1569,9 +1583,11 @@ class SqlParser( private fun List.parseType(opName: String): ParseNode { val typeName = head?.keywordText - val typeArity = ALL_TYPE_NAME_ARITY_MAP[typeName] ?: - (head?.customType?.arityRange ?: - err("Expected type name", ErrorCode.PARSE_EXPECTED_TYPE_NAME)) + val typeArity = ALL_TYPE_NAME_ARITY_MAP[typeName] + ?: ( + head?.customType?.arityRange + ?: err("Expected type name", ErrorCode.PARSE_EXPECTED_TYPE_NAME) + ) val typeNode = when (tail.head?.type) { TokenType.LEFT_PAREN -> tail.tail.parseArgList( @@ -1595,7 +1611,7 @@ class SqlParser( ) { precision.token.err( "Expected integer value between 0 and 9 for precision", - ErrorCode.PARSE_INVALID_PRECISION_FOR_TIME + ErrorCode.PARSE_INVALID_PRECISION_FOR_TIME ) } } @@ -1606,23 +1622,23 @@ class SqlParser( it.token } it.copy(token = newToken, remaining = remainingAfterOptionalTimeZone) - } - else { + } else { it } } if (typeNode.children.size !in typeArity) { val pvmap = PropertyValueMap() - pvmap[Property.CAST_TO] = typeName?: "" + pvmap[Property.CAST_TO] = typeName ?: "" pvmap[Property.EXPECTED_ARITY_MIN] = typeArity.first pvmap[Property.EXPECTED_ARITY_MAX] = typeArity.last tail.err("$opName type argument $typeName must have arity of $typeArity", ErrorCode.PARSE_CAST_ARITY, pvmap) } for (child in typeNode.children) { - if (child.type != ParseType.ATOM - || child.token?.type != TokenType.LITERAL - || child.token.value?.isUnsignedInteger != true) { + if (child.type != ParseType.ATOM || + child.token?.type != TokenType.LITERAL || + child.token.value?.isUnsignedInteger != true + ) { err("Type parameter must be an unsigned integer literal", ErrorCode.PARSE_INVALID_TYPE_PARAM) } } @@ -1657,8 +1673,8 @@ class SqlParser( return ParseNode(ParseType.FROM, null, listOf(operation, fromList) + children, rem) } - private fun List.parseBaseDmls() : ParseNode { - var rem = this; + private fun List.parseBaseDmls(): ParseNode { + var rem = this val nodes = ArrayList() while (rem.head?.keywordText in BASE_DML_KEYWORDS) { var node = rem.parseBaseDml() @@ -1688,7 +1704,7 @@ class SqlParser( val value = rem.tail.parseExpression() rem = value.remaining - val position = when(rem.head?.keywordText) { + val position = when (rem.head?.keywordText) { "at" -> rem.tail.parseExpression().also { rem = it.remaining } else -> null } @@ -1844,11 +1860,15 @@ class SqlParser( var rem = this when (rem.head?.keywordText) { "modified_old", "modified_new", "all_old", "all_new" -> { - return ParseNode(type = ParseType.RETURNING_MAPPING, token = rem.head, children = listOf(), - remaining = rem.tail) + return ParseNode( + type = ParseType.RETURNING_MAPPING, token = rem.head, children = listOf(), + remaining = rem.tail + ) } - else -> rem.err("Expected ( MODIFIED | ALL ) ( NEW | OLD ) in each returning element.", - ErrorCode.PARSE_EXPECTED_RETURNING_CLAUSE) + else -> rem.err( + "Expected ( MODIFIED | ALL ) ( NEW | OLD ) in each returning element.", + ErrorCode.PARSE_EXPECTED_RETURNING_CLAUSE + ) } } @@ -1860,8 +1880,10 @@ class SqlParser( when (it.type) { ParseType.PATH -> inspectColumnPathExpression(it) ParseType.ATOM -> it - else -> this.err("Unsupported syntax in RETURNING columns.", - ErrorCode.PARSE_UNSUPPORTED_RETURNING_CLAUSE_SYNTAX) + else -> this.err( + "Unsupported syntax in RETURNING columns.", + ErrorCode.PARSE_UNSUPPORTED_RETURNING_CLAUSE_SYNTAX + ) } } expr @@ -1871,8 +1893,10 @@ class SqlParser( private fun inspectColumnPathExpression(pathNode: ParseNode): ParseNode { if (pathNode.children.size > 2) { - pathNode.children[2].token?.err("More than two paths in RETURNING columns.", - ErrorCode.PARSE_UNSUPPORTED_RETURNING_CLAUSE_SYNTAX) + pathNode.children[2].token?.err( + "More than two paths in RETURNING columns.", + ErrorCode.PARSE_UNSUPPORTED_RETURNING_CLAUSE_SYNTAX + ) } return pathNode } @@ -1883,8 +1907,10 @@ class SqlParser( rem = value.remaining val name = rem.parseExpression() rem = name.remaining - val selectAfterProjection = parseSelectAfterProjection(ParseType.PIVOT, - ParseNode(ParseType.MEMBER, null, listOf(name, value), rem)) + val selectAfterProjection = parseSelectAfterProjection( + ParseType.PIVOT, + ParseNode(ParseType.MEMBER, null, listOf(name, value), rem) + ) return selectAfterProjection } @@ -1916,11 +1942,13 @@ class SqlParser( } val asterisk = list.children.firstOrNull { it.type == ParseType.PROJECT_ALL && it.children.isEmpty() } - if(asterisk != null - && list.children.size > 1) { + if (asterisk != null && + list.children.size > 1 + ) { asterisk.token.err( "Other expressions may not be present in the select list when '*' is used without dot notation.", - ErrorCode.PARSE_ASTERISK_IS_NOT_ALONE_IN_SELECT_LIST) + ErrorCode.PARSE_ASTERISK_IS_NOT_ALONE_IN_SELECT_LIST + ) } list @@ -2049,7 +2077,7 @@ class SqlParser( } // TODO support full expressions here... only simple paths for now val keys = rem.tail.parseArgList(AliasSupportType.NONE, ArgListMode.SIMPLE_PATH_ARG_LIST) - .deriveExpected(TokenType.RIGHT_PAREN) + .deriveExpected(TokenType.RIGHT_PAREN) rem = keys.remaining // TODO support other syntax options @@ -2087,7 +2115,7 @@ class SqlParser( fun flattenParseNode(node: ParseNode): List { fun doFlatten(n: ParseNode, l: MutableList) { l.add(n) - n.children.forEach { doFlatten(it,l ) } + n.children.forEach { doFlatten(it, l) } } val list = mutableListOf() doFlatten(node, list) @@ -2096,23 +2124,23 @@ class SqlParser( val flattened = flattenParseNode(pathNode).drop(2) - //Is invalid if contains PATH_WILDCARD (i.e. to `[*]`} + // Is invalid if contains PATH_WILDCARD (i.e. to `[*]`} flattened.firstOrNull { it.type == ParseType.PATH_WILDCARD } ?.token ?.err("Invalid use of * in select list", ErrorCode.PARSE_INVALID_CONTEXT_FOR_WILDCARD_IN_SELECT_LIST) - //Is invalid if contains PATH_WILDCARD_UNPIVOT (i.e. * as part of a dotted expression) anywhere except at the end. - //i.e. f.*.b is invalid but f.b.* is not. + // Is invalid if contains PATH_WILDCARD_UNPIVOT (i.e. * as part of a dotted expression) anywhere except at the end. + // i.e. f.*.b is invalid but f.b.* is not. flattened.dropLast(1).firstOrNull { it.type == ParseType.PATH_UNPIVOT } ?.token ?.err("Invalid use of * in select list", ErrorCode.PARSE_INVALID_CONTEXT_FOR_WILDCARD_IN_SELECT_LIST) - //If the last path component expression is a *, then the PathType is a wildcard and we need to do one - //additional check. - if(flattened.last().type == ParseType.PATH_UNPIVOT) { + // If the last path component expression is a *, then the PathType is a wildcard and we need to do one + // additional check. + if (flattened.last().type == ParseType.PATH_UNPIVOT) { - //Is invalid if contains a square bracket anywhere and a wildcard at the end. - //i.e f[1].* is invalid + // Is invalid if contains a square bracket anywhere and a wildcard at the end. + // i.e f[1].* is invalid flattened.firstOrNull { it.type == ParseType.PATH_SQB } ?.token ?.err("Cannot use [] and * together in SELECT list expression", ErrorCode.PARSE_CANNOT_MIX_SQB_AND_WILDCARD_IN_SELECT_LIST) @@ -2123,7 +2151,8 @@ class SqlParser( type = ParseType.PROJECT_ALL, token = null, children = listOf(if (pathPart.children.size == 1) pathPart.children[0] else pathPart), - remaining = pathNode.remaining) + remaining = pathNode.remaining + ) } return pathNode } @@ -2132,14 +2161,12 @@ class SqlParser( return parseCommaList { if (this.head?.type == TokenType.STAR) { ParseNode(ParseType.PROJECT_ALL, this.head, listOf(), this.tail) - } - else if (this.head != null && this.head?.keywordText in RESERVED_KEYWORDS) { + } else if (this.head != null && this.head?.keywordText in RESERVED_KEYWORDS) { this.head.err( - "Expected identifier or an expression but found unexpected keyword '${this.head?.keywordText?: ""}' in a select list.", - ErrorCode.PARSE_UNEXPECTED_KEYWORD + "Expected identifier or an expression but found unexpected keyword '${this.head?.keywordText ?: ""}' in a select list.", + ErrorCode.PARSE_UNEXPECTED_KEYWORD ) - } - else { + } else { val expr = parseExpression().let { when (it.type) { ParseType.PATH -> inspectPathExpression(it) @@ -2216,8 +2243,10 @@ class SqlParser( groupKey.children.forEach { // TODO support ordinal case if (it.token?.type == TokenType.LITERAL) { - it.token.err("Literals (including ordinals) not supported in GROUP BY", - ErrorCode.PARSE_UNSUPPORTED_LITERALS_GROUPBY) + it.token.err( + "Literals (including ordinals) not supported in GROUP BY", + ErrorCode.PARSE_UNSUPPORTED_LITERALS_GROUPBY + ) } } groupChildren.add(groupKey) @@ -2227,8 +2256,10 @@ class SqlParser( rem = rem.tail.tailExpectedKeyword("as") if (rem.head?.type?.isIdentifier() != true) { - rem.err("Expected identifier for GROUP name", - ErrorCode.PARSE_EXPECTED_IDENT_FOR_GROUP_NAME) + rem.err( + "Expected identifier for GROUP name", + ErrorCode.PARSE_EXPECTED_IDENT_FOR_GROUP_NAME + ) } groupChildren.add(rem.atomFromHead()) rem = rem.tail @@ -2253,7 +2284,7 @@ class SqlParser( } private fun List.parseFunctionCall(name: Token): ParseNode { - fun parseCallArguments(callName: String, args: List, callType: ParseType): ParseNode = when(args.head?.type) { + fun parseCallArguments(callName: String, args: List, callType: ParseType): ParseNode = when (args.head?.type) { TokenType.STAR -> err("$callName(*) is not allowed", ErrorCode.PARSE_UNSUPPORTED_CALL_WITH_STAR) TokenType.RIGHT_PAREN -> ParseNode(callType, name, emptyList(), tail) else -> { @@ -2280,7 +2311,7 @@ class SqlParser( } head?.type == TokenType.KEYWORD && keywordText == "distinct" -> { - when(memoizedTail.head?.type) { + when (memoizedTail.head?.type) { // COUNT(DISTINCT *) TokenType.STAR -> { err("COUNT(DISTINCT *) is not supported", ErrorCode.PARSE_UNSUPPORTED_CALL_WITH_STAR) @@ -2296,7 +2327,7 @@ class SqlParser( } head?.type == TokenType.KEYWORD && keywordText == "all" -> { - when(memoizedTail.head?.type) { + when (memoizedTail.head?.type) { TokenType.STAR -> err("COUNT(ALL *) is not supported", ErrorCode.PARSE_UNSUPPORTED_CALL_WITH_STAR) // COUNT(ALL expression) @@ -2328,8 +2359,10 @@ class SqlParser( } if (call.children.size != 1) { - err("Aggregate functions are always unary", - ErrorCode.PARSE_NON_UNARY_AGREGATE_FUNCTION_CALL) + err( + "Aggregate functions are always unary", + ErrorCode.PARSE_NON_UNARY_AGREGATE_FUNCTION_CALL + ) } call @@ -2358,9 +2391,7 @@ class SqlParser( // Stored procedure call has no args if (rem.head?.type == TokenType.EOF) { return ParseNode(ParseType.EXEC, procedureName, emptyList(), rem) - } - - else if (rem.head?.type == TokenType.LEFT_PAREN) { + } else if (rem.head?.type == TokenType.LEFT_PAREN) { rem.err("Unexpected ${TokenType.LEFT_PAREN} found following stored procedure call", ErrorCode.PARSE_UNEXPECTED_TOKEN) } @@ -2380,8 +2411,10 @@ class SqlParser( if (rem.head?.type != TokenType.LEFT_PAREN) { val pvmap = PropertyValueMap() pvmap[Property.EXPECTED_TOKEN_TYPE] = TokenType.LEFT_PAREN - rem.err("Expected ${TokenType.LEFT_PAREN}", - ErrorCode.PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL, pvmap) + rem.err( + "Expected ${TokenType.LEFT_PAREN}", + ErrorCode.PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL, pvmap + ) } var stringExpr = tail.parseExpression() @@ -2394,12 +2427,14 @@ class SqlParser( stringExpr.deriveExpectedKeyword("from") } rem.head!!.type == TokenType.COMMA -> stringExpr.deriveExpected(TokenType.COMMA) - else -> rem.err("Expected ${TokenType.KEYWORD} 'from' OR ${TokenType.COMMA}", - ErrorCode.PARSE_EXPECTED_ARGUMENT_DELIMITER) + else -> rem.err( + "Expected ${TokenType.KEYWORD} 'from' OR ${TokenType.COMMA}", + ErrorCode.PARSE_EXPECTED_ARGUMENT_DELIMITER + ) } val (positionExpr: ParseNode, expectedToken: Token) = stringExpr.remaining.parseExpression() - .deriveExpected(if(parseSql92Syntax) TokenType.FOR else TokenType.COMMA, TokenType.RIGHT_PAREN) + .deriveExpected(if (parseSql92Syntax) TokenType.FOR else TokenType.COMMA, TokenType.RIGHT_PAREN) if (expectedToken.type == TokenType.RIGHT_PAREN) { return ParseNode( @@ -2412,11 +2447,12 @@ class SqlParser( rem = positionExpr.remaining val lengthExpr = rem.parseExpression().deriveExpected(TokenType.RIGHT_PAREN) - return ParseNode(ParseType.CALL, + return ParseNode( + ParseType.CALL, name, listOf(stringExpr, positionExpr, lengthExpr), - lengthExpr.remaining) - + lengthExpr.remaining + ) } /** @@ -2425,8 +2461,10 @@ class SqlParser( * Syntax is TRIM([[ specification ] [to trim characters] FROM] ). */ private fun List.parseTrim(name: Token): ParseNode { - if (head?.type != TokenType.LEFT_PAREN) err("Expected ${TokenType.LEFT_PAREN}", - ErrorCode.PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL) + if (head?.type != TokenType.LEFT_PAREN) err( + "Expected ${TokenType.LEFT_PAREN}", + ErrorCode.PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL + ) var rem = tail val arguments = mutableListOf() @@ -2442,8 +2480,12 @@ class SqlParser( val hasSpecification = when { maybeTrimSpec?.type == TokenType.IDENTIFIER && TRIM_SPECIFICATION_KEYWORDS.contains(maybeTrimSpec.text?.toLowerCase()) -> { - arguments.add(ParseNode(ParseType.ATOM, maybeTrimSpec.copy(type = TokenType.TRIM_SPECIFICATION), - listOf(), rem.tail)) + arguments.add( + ParseNode( + ParseType.ATOM, maybeTrimSpec.copy(type = TokenType.TRIM_SPECIFICATION), + listOf(), rem.tail + ) + ) rem = rem.tail true @@ -2454,21 +2496,19 @@ class SqlParser( if (hasSpecification) { // trim(spec [toRemove] from target) rem = when (rem.head?.keywordText) { "from" -> rem.tail - else -> parseArgument { it.deriveExpectedKeyword("from") } + else -> parseArgument { it.deriveExpectedKeyword("from") } } rem = parseArgument() - } - else { - if(rem.head?.keywordText == "from") { // trim(from target) + } else { + if (rem.head?.keywordText == "from") { // trim(from target) rem = rem.tail // skips from rem = parseArgument() - } - else { // trim([toRemove from] target) + } else { // trim([toRemove from] target) rem = parseArgument() - if(rem.head?.keywordText == "from") { + if (rem.head?.keywordText == "from") { rem = rem.tail // skips from rem = parseArgument() @@ -2476,7 +2516,7 @@ class SqlParser( } } - if(rem.head?.type != TokenType.RIGHT_PAREN) { + if (rem.head?.type != TokenType.RIGHT_PAREN) { rem.err("Expected ${TokenType.RIGHT_PAREN}", ErrorCode.PARSE_EXPECTED_RIGHT_PAREN_BUILTIN_FUNCTION_CALL) } @@ -2485,7 +2525,7 @@ class SqlParser( private fun List.parseDateTimePart(): ParseNode { val maybeDateTimePart = this.head - return when { + return when { maybeDateTimePart?.type == TokenType.IDENTIFIER && DATE_TIME_PART_KEYWORDS.contains(maybeDateTimePart.text?.toLowerCase()) -> { ParseNode(ParseType.ATOM, maybeDateTimePart.copy(type = TokenType.DATETIME_PART), listOf(), this.tail) } @@ -2499,8 +2539,10 @@ class SqlParser( * Syntax is EXTRACT( FROM ). */ private fun List.parseExtract(name: Token): ParseNode { - if (head?.type != TokenType.LEFT_PAREN) err("Expected ${TokenType.LEFT_PAREN}", - ErrorCode.PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL) + if (head?.type != TokenType.LEFT_PAREN) err( + "Expected ${TokenType.LEFT_PAREN}", + ErrorCode.PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL + ) val dateTimePart = this.tail.parseDateTimePart().deriveExpectedKeyword("from") val rem = dateTimePart.remaining val dateTimeType = rem.parseExpression().deriveExpected(TokenType.RIGHT_PAREN) @@ -2514,8 +2556,10 @@ class SqlParser( private fun List.parseDate(): ParseNode { val dateStringToken = head if (dateStringToken?.value == null || dateStringToken.type != TokenType.LITERAL || !dateStringToken.value.isText) { - err("Expected date string followed by the keyword DATE, found ${head?.value?.type}", - ErrorCode.PARSE_UNEXPECTED_TOKEN) + err( + "Expected date string followed by the keyword DATE, found ${head?.value?.type}", + ErrorCode.PARSE_UNEXPECTED_TOKEN + ) } val dateString = dateStringToken.value.stringValue() @@ -2548,7 +2592,8 @@ class SqlParser( var rem = tail // Expected precision token to be unsigned integer between 0 and 9 inclusive if (rem.head == null || rem.head!!.type != TokenType.LITERAL || !rem.head!!.value!!.isUnsignedInteger || - rem.head!!.value!!.longValue() < 0 || rem.head!!.value!!.longValue() > MAX_PRECISION_FOR_TIME) { + rem.head!!.value!!.longValue() < 0 || rem.head!!.value!!.longValue() > MAX_PRECISION_FOR_TIME + ) { rem.head.err("Expected integer value between 0 and 9 for precision", ErrorCode.PARSE_INVALID_PRECISION_FOR_TIME) } val precision = rem.head @@ -2557,8 +2602,7 @@ class SqlParser( rem.head.errExpectedTokenType(TokenType.RIGHT_PAREN) } ParseNode(ParseType.PRECISION, precision, listOf(), rem.tail) - } - else { + } else { ParseNode(ParseType.PRECISION, null, listOf(), this) } @@ -2569,15 +2613,14 @@ class SqlParser( // If the keyword is specified for time zone, it must be a series of keywords - "with time zone" if (head?.type == TokenType.KEYWORD) { val rem = - tailExpectedKeyword("with"). - tailExpectedKeyword("time"). - tailExpectedKeyword("zone") + tailExpectedKeyword("with") + .tailExpectedKeyword("time") + .tailExpectedKeyword("zone") return Pair(rem, true) } return Pair(this, false) } - /** * Parses a time string and verifies that the time string is a string and is specified in the valid ISO 8601 format. * Allows for optional precision and time zone to be specified with the time. @@ -2606,8 +2649,7 @@ class SqlParser( fun tryTimeParsing(time: String?, formatter: DateTimeFormatter, parse: (String?, DateTimeFormatter) -> Temporal) { try { parse(time, formatter) - } - catch (e: DateTimeParseException) { + } catch (e: DateTimeParseException) { rem.head.err(e.localizedMessage, ErrorCode.PARSE_INVALID_TIME_STRING) } } @@ -2624,7 +2666,7 @@ class SqlParser( if (timeStringToken?.value == null || timeStringToken.type != TokenType.LITERAL || !timeStringToken.value.isText) { rem.head.err( "Expected time string followed by the keyword TIME OR TIME WITH TIME ZONE, found ${rem.head?.value?.type}", - ErrorCode.PARSE_UNEXPECTED_TOKEN + ErrorCode.PARSE_UNEXPECTED_TOKEN ) } @@ -2632,15 +2674,16 @@ class SqlParser( // - If the time zone is true and the local offset is missing, consider local offset from the system settings. val timeString = timeStringToken.value.stringValue()?.replace(" ", "") if (!genericTimeRegex.matches(timeString!!)) { - rem.head.err("Invalid format for time string. Expected format is \"TIME [(p)] [WITH TIME ZONE] HH:MM:SS[.ddddd...][+|-HH:MM]\"", - ErrorCode. PARSE_INVALID_TIME_STRING) + rem.head.err( + "Invalid format for time string. Expected format is \"TIME [(p)] [WITH TIME ZONE] HH:MM:SS[.ddddd...][+|-HH:MM]\"", + ErrorCode.PARSE_INVALID_TIME_STRING + ) } // For "TIME WITH TIME ZONE", if the time zone is not explicitly specified, we still consider it as valid. // We will add the default time zone to it later in the evaluation phase. if (!withTimeZone || timeWithoutTimeZoneRegex.matches(timeString)) { tryTimeParsing(timeString, ISO_TIME, LocalTime::parse) - } - else { + } else { tryTimeParsing(timeString, ISO_TIME, OffsetTime::parse) } @@ -2648,14 +2691,15 @@ class SqlParser( // For e.g., TIME '23:12:12.12300' should have precision of 5. // The source span here is just the filler value and does not reflect the actual source location of the precision // as it does not exists in case the precision is unspecified. - val precisionOfValue = precision.token ?: - Token(TokenType.LITERAL, ion.newInt(getPrecisionFromTimeString(timeString)), timeStringToken.span) + val precisionOfValue = precision.token + ?: Token(TokenType.LITERAL, ion.newInt(getPrecisionFromTimeString(timeString)), timeStringToken.span) return ParseNode( if (withTimeZone) ParseType.TIME_WITH_TIME_ZONE else ParseType.TIME, rem.head!!.copy(value = ion.newString(timeString)), listOf(precision.copy(token = precisionOfValue)), - rem.tail) + rem.tail + ) } /** @@ -2665,8 +2709,10 @@ class SqlParser( * is the value of [name]. */ private fun List.parseDateAddOrDateDiff(name: Token): ParseNode { - if (head?.type != TokenType.LEFT_PAREN) err("Expected ${TokenType.LEFT_PAREN}", - ErrorCode.PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL) + if (head?.type != TokenType.LEFT_PAREN) err( + "Expected ${TokenType.LEFT_PAREN}", + ErrorCode.PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL + ) val dateTimePart = this.tail.parseDateTimePart().deriveExpected(TokenType.COMMA) val timestamp1 = dateTimePart.remaining.parseExpression().deriveExpected(TokenType.COMMA) @@ -2682,15 +2728,19 @@ class SqlParser( rem = child.remaining if (rem.head?.type != TokenType.AS) { - rem.head.err("Expected ${TokenType.AS} following ${ParseType.LET} expr", - ErrorCode.PARSE_EXPECTED_AS_FOR_LET) + rem.head.err( + "Expected ${TokenType.AS} following ${ParseType.LET} expr", + ErrorCode.PARSE_EXPECTED_AS_FOR_LET + ) } rem = rem.tail if (rem.head?.type?.isIdentifier() != true) { - rem.head.err("Expected identifier for ${TokenType.AS}-alias", - ErrorCode.PARSE_EXPECTED_IDENT_FOR_ALIAS) + rem.head.err( + "Expected identifier for ${TokenType.AS}-alias", + ErrorCode.PARSE_EXPECTED_IDENT_FOR_ALIAS + ) } var name = rem.head @@ -2747,8 +2797,10 @@ class SqlParser( parseCommaList { var rem = this if (rem.head?.type != TokenType.LEFT_PAREN) { - err("Expected ${TokenType.LEFT_PAREN} for row value constructor", - ErrorCode.PARSE_EXPECTED_LEFT_PAREN_VALUE_CONSTRUCTOR) + err( + "Expected ${TokenType.LEFT_PAREN} for row value constructor", + ErrorCode.PARSE_EXPECTED_LEFT_PAREN_VALUE_CONSTRUCTOR + ) } rem = rem.tail rem.parseArgList( @@ -2790,11 +2842,15 @@ class SqlParser( when (rem.head?.type) { TokenType.ASC, TokenType.DESC -> { - sortSpecKey = listOf(child, ParseNode( - type = ParseType.ORDERING_SPEC, - token = rem.head, - children = listOf(), - remaining = rem.tail)) + sortSpecKey = listOf( + child, + ParseNode( + type = ParseType.ORDERING_SPEC, + token = rem.head, + children = listOf(), + remaining = rem.tail + ) + ) rem = rem.tail } } @@ -2846,8 +2902,8 @@ class SqlParser( while (delim?.type?.isJoin == true) { val isCrossJoin = delim.token?.keywordText?.contains("cross") ?: false val hasOnClause = delim.token?.type == TokenType.KEYWORD && !isCrossJoin - var children : List - var joinToken : Token? = delim.token + var children: List + var joinToken: Token? = delim.token rem = rem.tail @@ -2869,14 +2925,10 @@ class SqlParser( rem = onClause.remaining if (!isSubqueryOrLiteral) { children = listOf(parenClause, left, onClause) - } - - else { + } else { children = listOf(left, parenClause, onClause) } - } - - else { + } else { // Rest is just the right side of the clause val rightRef = rem.parseFromSource(precedence, parseRemaining = false) rem = rightRef.remaining @@ -2892,9 +2944,7 @@ class SqlParser( children = listOf(left, rightRef, onClause) } - } - - else { + } else { // For implicit joins val rightRef = rem.parseFromSource(precedence, parseRemaining = false) rem = rightRef.remaining @@ -2914,7 +2964,6 @@ class SqlParser( return child } - private fun List.parseFromSourceList(precedence: Int = -1): ParseNode { val child = this.parseFromSource(precedence) return ParseNode(ParseType.FROM_CLAUSE, null, listOf(child), child.remaining) @@ -2991,15 +3040,17 @@ class SqlParser( rem = rem.tail val name = rem.head if (rem.head?.type?.isIdentifier() != true) { - rem.head.err("Expected identifier for $keywordTokenType-alias", - ErrorCode.PARSE_EXPECTED_IDENT_FOR_ALIAS) + rem.head.err( + "Expected identifier for $keywordTokenType-alias", + ErrorCode.PARSE_EXPECTED_IDENT_FOR_ALIAS + ) } rem = rem.tail ParseNode(parseNodeType, name, listOf(child), rem) } keywordIsOptional && rem.head?.type?.isIdentifier() ?: false -> { ParseNode(parseNodeType, rem.head, listOf(child), rem.tail) - } else -> { + } else -> { child } } @@ -3028,8 +3079,10 @@ class SqlParser( * of the delimiter that was encountered prior to the item to be parsed which could be `null` * for the first item in the list. */ - private inline fun List.parseDelimitedList(parseDelim: List.() -> ParseNode?, - parseItem: List.(delim: ParseNode?) -> ParseNode): ParseNode { + private inline fun List.parseDelimitedList( + parseDelim: List.() -> ParseNode?, + parseItem: List.(delim: ParseNode?) -> ParseNode + ): ParseNode { val items = ArrayList() var delim: ParseNode? = null var rem = this @@ -3044,15 +3097,16 @@ class SqlParser( break } rem = delim.remaining - } return ParseNode(ParseType.ARG_LIST, null, items, rem) } private fun ParseNode.throwTopLevelParserError(): Nothing = token?.err("Keyword ${token.text} only expected at the top level in the query", ErrorCode.PARSE_UNEXPECTED_TERM) - ?: throw ParserException("Keyword ${token?.text} only expected at the top level in the query", - ErrorCode.PARSE_UNEXPECTED_TERM, PropertyValueMap()) + ?: throw ParserException( + "Keyword ${token?.text} only expected at the top level in the query", + ErrorCode.PARSE_UNEXPECTED_TERM, PropertyValueMap() + ) /** * Validates tree to make sure that the top level tokens are not found below the top level. @@ -3111,9 +3165,11 @@ class SqlParser( val rem = node.remaining if (!rem.onlyEndOfStatement()) { when (rem.head?.type) { - TokenType.SEMICOLON -> rem.tail.err("Unexpected token after semicolon. (Only one query is allowed.)", - ErrorCode.PARSE_UNEXPECTED_TOKEN) - else -> rem.err("Unexpected token after expression", ErrorCode.PARSE_UNEXPECTED_TOKEN) + TokenType.SEMICOLON -> rem.tail.err( + "Unexpected token after semicolon. (Only one query is allowed.)", + ErrorCode.PARSE_UNEXPECTED_TOKEN + ) + else -> rem.err("Unexpected token after expression", ErrorCode.PARSE_UNEXPECTED_TOKEN) } } diff --git a/lang/src/org/partiql/lang/syntax/Token.kt b/lang/src/org/partiql/lang/syntax/Token.kt index 3f88e80bb0..84317e9c52 100644 --- a/lang/src/org/partiql/lang/syntax/Token.kt +++ b/lang/src/org/partiql/lang/syntax/Token.kt @@ -20,9 +20,11 @@ import org.partiql.lang.util.stringValue /** * Simple [IonValue] based token for lexing PartiQL. */ -data class Token(val type: TokenType, - val value: IonValue? = null, - val span: SourceSpan) { +data class Token( + val type: TokenType, + val value: IonValue? = null, + val span: SourceSpan +) { val text: String? get() = value?.stringValue() @@ -48,7 +50,7 @@ data class Token(val type: TokenType, } val isUnaryOperator: Boolean - get() = when (type){ + get() = when (type) { TokenType.OPERATOR, TokenType.KEYWORD -> text in UNARY_OPERATORS else -> false } diff --git a/lang/src/org/partiql/lang/syntax/TokenType.kt b/lang/src/org/partiql/lang/syntax/TokenType.kt index e777fb18b9..b0620d6507 100644 --- a/lang/src/org/partiql/lang/syntax/TokenType.kt +++ b/lang/src/org/partiql/lang/syntax/TokenType.kt @@ -53,4 +53,3 @@ enum class TokenType { ; fun isIdentifier() = this == TokenType.IDENTIFIER || this == TokenType.QUOTED_IDENTIFIER } - diff --git a/lang/src/org/partiql/lang/types/CustomType.kt b/lang/src/org/partiql/lang/types/CustomType.kt index 05d034c1fe..d0ccd087ed 100644 --- a/lang/src/org/partiql/lang/types/CustomType.kt +++ b/lang/src/org/partiql/lang/types/CustomType.kt @@ -14,4 +14,5 @@ import org.partiql.lang.ast.SqlDataType data class CustomType( val name: String, val typedOpParameter: TypedOpParameter, - val aliases: List = listOf()) + val aliases: List = listOf() +) diff --git a/lang/src/org/partiql/lang/types/FunctionSignature.kt b/lang/src/org/partiql/lang/types/FunctionSignature.kt index 07b10257bc..3474c808c1 100644 --- a/lang/src/org/partiql/lang/types/FunctionSignature.kt +++ b/lang/src/org/partiql/lang/types/FunctionSignature.kt @@ -50,10 +50,10 @@ class FunctionSignature private constructor ( val arity: IntRange = let { val r = requiredParameters.size..requiredParameters.size - val o = if(optionalParameter != null) 0..1 else 0..0 + val o = if (optionalParameter != null) 0..1 else 0..0 val v = variadicParameter?.arityRange ?: 0..0 - (r.first + o.first + v.first)..when(v.last) { + (r.first + o.first + v.first)..when (v.last) { Int.MAX_VALUE -> Int.MAX_VALUE else -> (r.last + o.last + v.last) } diff --git a/lang/src/org/partiql/lang/types/PartiqlAstTypeExtensions.kt b/lang/src/org/partiql/lang/types/PartiqlAstTypeExtensions.kt index 5de3777e3c..d9ef4f9096 100644 --- a/lang/src/org/partiql/lang/types/PartiqlAstTypeExtensions.kt +++ b/lang/src/org/partiql/lang/types/PartiqlAstTypeExtensions.kt @@ -18,19 +18,26 @@ fun PartiqlAst.Type.toTypedOpParameter(customTypedOpParameters: Map TypedOpParameter(StaticType.DECIMAL) this.precision != null && this.scale == null -> TypedOpParameter(DecimalType(DecimalType.PrecisionScaleConstraint.Constrained(this.precision.value.toInt()))) else -> TypedOpParameter( - DecimalType(DecimalType.PrecisionScaleConstraint.Constrained(this.precision!!.value.toInt(), this.scale!!.value.toInt()))) + DecimalType(DecimalType.PrecisionScaleConstraint.Constrained(this.precision!!.value.toInt(), this.scale!!.value.toInt())) + ) } is PartiqlAst.Type.NumericType -> when { this.precision == null && this.scale == null -> TypedOpParameter(StaticType.DECIMAL) this.precision != null && this.scale == null -> TypedOpParameter(DecimalType(DecimalType.PrecisionScaleConstraint.Constrained(this.precision.value.toInt()))) else -> TypedOpParameter( - DecimalType(DecimalType.PrecisionScaleConstraint.Constrained(this.precision!!.value.toInt(), this.scale!!.value.toInt()))) + DecimalType(DecimalType.PrecisionScaleConstraint.Constrained(this.precision!!.value.toInt(), this.scale!!.value.toInt())) + ) } is PartiqlAst.Type.TimestampType -> TypedOpParameter(StaticType.TIMESTAMP) is PartiqlAst.Type.CharacterType -> when { this.length == null -> TypedOpParameter(StringType(StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(1)))) - else -> TypedOpParameter(StringType(StringType.StringLengthConstraint.Constrained( - NumberConstraint.Equals(this.length.value.toInt())))) + else -> TypedOpParameter( + StringType( + StringType.StringLengthConstraint.Constrained( + NumberConstraint.Equals(this.length.value.toInt()) + ) + ) + ) } is PartiqlAst.Type.CharacterVaryingType -> when (this.length) { null -> TypedOpParameter(StringType(StringType.StringLengthConstraint.Unconstrained)) @@ -48,7 +55,8 @@ fun PartiqlAst.Type.toTypedOpParameter(customTypedOpParameters: Map TypedOpParameter(StaticType.ANY) is PartiqlAst.Type.CustomType -> customTypedOpParameters.mapKeys { - (k, _) -> k.toLowerCase() + (k, _) -> + k.toLowerCase() }[this.name.text.toLowerCase()] ?: error("Could not find parameter for $this") is PartiqlAst.Type.DateType -> TypedOpParameter(StaticType.DATE) is PartiqlAst.Type.TimeType -> TypedOpParameter( diff --git a/lang/src/org/partiql/lang/types/StaticType.kt b/lang/src/org/partiql/lang/types/StaticType.kt index 7b38fcfd6f..4fabc9eea5 100644 --- a/lang/src/org/partiql/lang/types/StaticType.kt +++ b/lang/src/org/partiql/lang/types/StaticType.kt @@ -68,7 +68,7 @@ sealed class StaticType { @JvmStatic fun fromExprValueType(exprValueType: ExprValueType): StaticType = - when(exprValueType) { + when (exprValueType) { ExprValueType.MISSING -> MISSING ExprValueType.NULL -> NULL ExprValueType.BOOL -> BOOL @@ -90,7 +90,7 @@ sealed class StaticType { @JvmStatic fun fromExprValue(exprValue: ExprValue): StaticType = - when(exprValue.type) { + when (exprValue.type) { ExprValueType.TIME -> { val timeValue = exprValue.timeValue() TimeType(precision = timeValue.precision, withTimeZone = timeValue.zoneOffset != null) @@ -242,7 +242,7 @@ sealed class StaticType { */ // TODO: Remove `NULL` from here. This affects inference as operations (especially NAry) can produce // `NULL` or `MISSING` depending on a null propagation or an incorrect argument. -data class AnyType(override val metas: Map = mapOf()): StaticType() { +data class AnyType(override val metas: Map = mapOf()) : StaticType() { // AnyType encompasses all PartiQL types (including Null type) override fun isInstance(value: ExprValue): Boolean = true override fun isComparableTo(other: StaticType): Boolean = true @@ -267,7 +267,7 @@ data class AnyType(override val metas: Map = mapOf()): StaticType() /** * Represents a [StaticType] that is type of a single [ExprValueType]. */ -sealed class SingleType: StaticType() { +sealed class SingleType : StaticType() { abstract val runtimeType: ExprValueType override val typeDomain: Set get() = setOf(runtimeType) @@ -314,9 +314,8 @@ class UnsupportedTypeCheckException(message: String) : RuntimeException(message) sealed class CollectionType : SingleType() { abstract val elementType: StaticType - override fun isInstance(value: ExprValue): Boolean { - if(!super.isInstance(value)) return false + if (!super.isInstance(value)) return false return when (this.elementType) { StaticType.ANY -> true // no need to check every element if the elementType is ANY. @@ -362,10 +361,9 @@ object MissingType : SingleType() { get() = listOf(this) override fun toString(): String = "missing" - } -data class BoolType(override val metas: Map = mapOf()): SingleType() { +data class BoolType(override val metas: Map = mapOf()) : SingleType() { override val runtimeType: ExprValueType get() = ExprValueType.BOOL @@ -378,7 +376,7 @@ data class BoolType(override val metas: Map = mapOf()): SingleType( data class IntType( val rangeConstraint: IntRangeConstraint = IntRangeConstraint.UNCONSTRAINED, override val metas: Map = mapOf() -): SingleType() { +) : SingleType() { enum class IntRangeConstraint(val numBytes: Int, val validRange: LongRange) { /** SQL's SMALLINT (2 Bytes) */ @@ -410,7 +408,7 @@ data class IntType( } override fun isInstance(value: ExprValue): Boolean { - if(value.type != ExprValueType.INT) { + if (value.type != ExprValueType.INT) { return false } @@ -418,10 +416,9 @@ data class IntType( return rangeConstraint.validRange.contains(longValue) } - } -data class FloatType(override val metas: Map = mapOf()): SingleType() { +data class FloatType(override val metas: Map = mapOf()) : SingleType() { override val runtimeType: ExprValueType get() = ExprValueType.FLOAT @@ -444,7 +441,7 @@ data class DecimalType( override fun matches(d: BigDecimal): Boolean = true } - data class Constrained(val precision: Int, val scale: Int = 0): PrecisionScaleConstraint() { + data class Constrained(val precision: Int, val scale: Int = 0) : PrecisionScaleConstraint() { override fun matches(d: BigDecimal): Boolean { val dv = d.stripTrailingZeros() @@ -453,7 +450,6 @@ data class DecimalType( return integerDigits <= expectedIntegerDigits && dv.scale() <= scale } } - } override val runtimeType: ExprValueType @@ -465,13 +461,13 @@ data class DecimalType( override fun toString(): String = "decimal" override fun isInstance(value: ExprValue): Boolean = - when(value.type) { + when (value.type) { ExprValueType.DECIMAL -> precisionScaleConstraint.matches(value.scalar.numberValue() as BigDecimal) else -> false } } -data class DateType(override val metas: Map = mapOf()): SingleType() { +data class DateType(override val metas: Map = mapOf()) : SingleType() { override val runtimeType: ExprValueType get() = ExprValueType.DATE @@ -485,7 +481,7 @@ data class TimeType( val precision: Int? = null, val withTimeZone: Boolean = false, override val metas: Map = mapOf() -): SingleType() { +) : SingleType() { override val runtimeType: ExprValueType get() = ExprValueType.TIME @@ -498,7 +494,7 @@ data class TimeType( } } -data class TimestampType(override val metas: Map = mapOf()): SingleType() { +data class TimestampType(override val metas: Map = mapOf()) : SingleType() { override val runtimeType: ExprValueType get() = ExprValueType.TIMESTAMP @@ -508,7 +504,7 @@ data class TimestampType(override val metas: Map = mapOf()): Single override fun toString(): String = "timestamp" } -data class SymbolType(override val metas: Map = mapOf()): SingleType() { +data class SymbolType(override val metas: Map = mapOf()) : SingleType() { override val runtimeType: ExprValueType get() = ExprValueType.SYMBOL @@ -523,7 +519,7 @@ data class StringType( override val metas: Map = mapOf() ) : SingleType() { - sealed class StringLengthConstraint{ + sealed class StringLengthConstraint { /** Returns true if the code point count of [value] falls within the constraints. */ abstract fun matches(value: ExprValue): Boolean @@ -531,7 +527,7 @@ data class StringType( override fun matches(value: ExprValue): Boolean = true } - data class Constrained(val length: NumberConstraint): StringLengthConstraint() { + data class Constrained(val length: NumberConstraint) : StringLengthConstraint() { override fun matches(value: ExprValue): Boolean { val str = value.scalar.stringValue() ?: error("value.scalar.stringValue() unexpectedly returned null") @@ -549,7 +545,7 @@ data class StringType( override fun toString(): String = "string" override fun isInstance(value: ExprValue): Boolean = - when(value.type) { + when (value.type) { ExprValueType.STRING -> lengthConstraint.matches(value) else -> false } @@ -557,7 +553,7 @@ data class StringType( internal constructor(numberConstraint: NumberConstraint) : this(StringLengthConstraint.Constrained(numberConstraint)) } -data class BlobType(override val metas: Map = mapOf()): SingleType() { +data class BlobType(override val metas: Map = mapOf()) : SingleType() { override val runtimeType: ExprValueType get() = ExprValueType.BLOB @@ -567,7 +563,7 @@ data class BlobType(override val metas: Map = mapOf()): SingleType( override fun toString(): String = "blob" } -data class ClobType(override val metas: Map = mapOf()): SingleType() { +data class ClobType(override val metas: Map = mapOf()) : SingleType() { override val runtimeType: ExprValueType get() = ExprValueType.CLOB @@ -583,7 +579,7 @@ data class ClobType(override val metas: Map = mapOf()): SingleType( data class ListType( override val elementType: StaticType = ANY, override val metas: Map = mapOf() -): CollectionType() { +) : CollectionType() { override val runtimeType: ExprValueType get() = ExprValueType.LIST @@ -592,7 +588,7 @@ data class ListType( override val allTypes: List get() = listOf(this) - override fun toString(): String = "list(${elementType})" + override fun toString(): String = "list($elementType)" } /** @@ -601,7 +597,7 @@ data class ListType( data class SexpType( override val elementType: StaticType = ANY, override val metas: Map = mapOf() -): CollectionType() { +) : CollectionType() { override val runtimeType: ExprValueType get() = ExprValueType.SEXP override fun flatten(): StaticType = this @@ -609,7 +605,7 @@ data class SexpType( override val allTypes: List get() = listOf(this) - override fun toString(): String = "sexp(${elementType})" + override fun toString(): String = "sexp($elementType)" } /** @@ -618,7 +614,7 @@ data class SexpType( data class BagType( override val elementType: StaticType = ANY, override val metas: Map = mapOf() -): CollectionType() { +) : CollectionType() { override val runtimeType: ExprValueType get() = ExprValueType.BAG @@ -627,7 +623,7 @@ data class BagType( override val allTypes: List get() = listOf(this) - override fun toString(): String = "bag(${elementType})" + override fun toString(): String = "bag($elementType)" } data class StructType( @@ -713,7 +709,6 @@ data class StructType( } } } - } /** @@ -727,13 +722,15 @@ data class AnyOfType(val types: Set, override val metas: Map listOf(it) - is AnyType -> listOf(it) - is AnyOfType -> it.types - } - }.toSet()).let { + override fun flatten(): StaticType = this.copy( + types = this.types.flatMap { + when (it) { + is SingleType -> listOf(it) + is AnyType -> listOf(it) + is AnyOfType -> it.types + } + }.toSet() + ).let { when { it.types.size == 1 -> it.types.first() it.types.filterIsInstance().any() -> it.flatten() @@ -791,11 +788,11 @@ sealed class NumberConstraint { abstract val value: Int - data class Equals(override val value: Int): NumberConstraint() { + data class Equals(override val value: Int) : NumberConstraint() { override fun matches(num: Int): Boolean = value == num } - data class UpTo(override val value: Int): NumberConstraint() { + data class UpTo(override val value: Int) : NumberConstraint() { override fun matches(num: Int): Boolean = value >= num } } diff --git a/lang/src/org/partiql/lang/types/TypedOpParameter.kt b/lang/src/org/partiql/lang/types/TypedOpParameter.kt index bf5c636236..6d0625841c 100644 --- a/lang/src/org/partiql/lang/types/TypedOpParameter.kt +++ b/lang/src/org/partiql/lang/types/TypedOpParameter.kt @@ -12,4 +12,4 @@ import org.partiql.lang.eval.ExprValue data class TypedOpParameter( val staticType: StaticType, val validationThunk: ((ExprValue) -> Boolean)? = null -) \ No newline at end of file +) diff --git a/lang/src/org/partiql/lang/util/BindingHelpers.kt b/lang/src/org/partiql/lang/util/BindingHelpers.kt index 1809088941..0ef8d83d2c 100644 --- a/lang/src/org/partiql/lang/util/BindingHelpers.kt +++ b/lang/src/org/partiql/lang/util/BindingHelpers.kt @@ -20,19 +20,23 @@ import org.partiql.lang.eval.BindingCase import org.partiql.lang.eval.err internal fun errAmbiguousBinding(bindingName: String, matchingNames: List): Nothing { - err("Multiple matches were found for the specified identifier", + err( + "Multiple matches were found for the specified identifier", ErrorCode.EVALUATOR_AMBIGUOUS_BINDING, - propertyValueMapOf(Property.BINDING_NAME to bindingName, - Property.BINDING_NAME_MATCHES to matchingNames.joinToString(", ")), - internal = false) + propertyValueMapOf( + Property.BINDING_NAME to bindingName, + Property.BINDING_NAME_MATCHES to matchingNames.joinToString(", ") + ), + internal = false + ) } /** * Compares this string to [other] using the rules specified by [case]. */ fun String.isBindingNameEquivalent(other: String, case: BindingCase): Boolean = - when(case) { - BindingCase.SENSITIVE -> this.equals(other) + when (case) { + BindingCase.SENSITIVE -> this.equals(other) BindingCase.INSENSITIVE -> this.caseInsensitiveEquivalent(other) } @@ -56,8 +60,8 @@ abstract class BindingHelper private constructor() { */ @JvmStatic fun bindingNameEquals(id1: String, id2: String, case: BindingCase): Boolean = - when(case) { - BindingCase.SENSITIVE -> id1.equals(id2) + when (case) { + BindingCase.SENSITIVE -> id1.equals(id2) BindingCase.INSENSITIVE -> id1.caseInsensitiveEquivalent(id2) } @@ -67,4 +71,4 @@ abstract class BindingHelper private constructor() { @JvmStatic fun throwAmbiguousBindingEvaluationException(bindingName: String, matchingNames: List): Nothing = errAmbiguousBinding(bindingName, matchingNames) } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/util/CollectionExtensions.kt b/lang/src/org/partiql/lang/util/CollectionExtensions.kt index 51c5b7aa65..2894bcb3dd 100644 --- a/lang/src/org/partiql/lang/util/CollectionExtensions.kt +++ b/lang/src/org/partiql/lang/util/CollectionExtensions.kt @@ -29,7 +29,6 @@ inline val List.tail: List else -> this.subList(1, this.size) } - /** Returns true if any ExprValue in the Iterable is an unknown value, i.e. either MISSING or NULL. */ fun Iterable.isAnyUnknown() = any { it.isUnknown() } @@ -39,7 +38,6 @@ fun Iterable.isAnyNull() = any { it.type == ExprValueType.NULL } /** Returns true if any ExprValue in the Iterable is missing. */ fun Iterable.isAnyMissing() = any { it.type == ExprValueType.MISSING } - /** * This should function the same as Kotlin's [Sequence.take(n: Int)] function but takes * a long value instead. @@ -94,7 +92,6 @@ internal fun Sequence.drop(count: Long): Sequence { } } - /** * Given a predicate function, return `true` if all members of the list satisfy the predicate, return false otherwise. * In the case that an empty list is given, the result is `true`. @@ -108,7 +105,7 @@ internal fun Sequence.drop(count: Long): Sequence { * @param T type of each element in the list * */ -inline fun List.forAll(predicate: (T) -> Boolean) : Boolean = +inline fun List.forAll(predicate: (T) -> Boolean): Boolean = this.find { x -> !predicate(x) } == null /** @@ -134,8 +131,10 @@ fun List>.product(): Iterable> = foldLeftProduct(Unit) { * @param S The source type of the list to be map and folded upon. * @param C The context to map and fold over [S]. */ -inline fun List.foldLeftProduct(initialContext: C, - crossinline map: (C, S) -> Iterator>) : Iterable> = +inline fun List.foldLeftProduct( + initialContext: C, + crossinline map: (C, S) -> Iterator> +): Iterable> = object : Iterable> { override fun iterator(): Iterator> { val sources = this@foldLeftProduct diff --git a/lang/src/org/partiql/lang/util/ExprValueFormatter.kt b/lang/src/org/partiql/lang/util/ExprValueFormatter.kt index 32e42d15bb..917065bfbc 100644 --- a/lang/src/org/partiql/lang/util/ExprValueFormatter.kt +++ b/lang/src/org/partiql/lang/util/ExprValueFormatter.kt @@ -48,7 +48,7 @@ class ConfigurableExprValueFormatter(private val config: Configuration) : ExprVa // fallback to an Ion literal for all types that don't have a native PartiQL representation ExprValueType.FLOAT, ExprValueType.TIMESTAMP, ExprValueType.SYMBOL, - ExprValueType.CLOB,ExprValueType. BLOB, ExprValueType.SEXP -> prettyPrintIonLiteral(value) + ExprValueType.CLOB, ExprValueType.BLOB, ExprValueType.SEXP -> prettyPrintIonLiteral(value) ExprValueType.LIST -> prettyPrintContainer(value, "[", "]") ExprValueType.BAG -> prettyPrintContainer(value, "<<", ">>") @@ -60,10 +60,12 @@ class ConfigurableExprValueFormatter(private val config: Configuration) : ExprVa } } - private fun prettyPrintContainer(value: ExprValue, - openingMarker: String, - closingMarker: String, - prettyPrintElement: (ExprValue) -> Unit = { v -> recursivePrettyPrint(v) }) { + private fun prettyPrintContainer( + value: ExprValue, + openingMarker: String, + closingMarker: String, + prettyPrintElement: (ExprValue) -> Unit = { v -> recursivePrettyPrint(v) } + ) { val iterator = value.iterator() @@ -78,10 +80,9 @@ class ConfigurableExprValueFormatter(private val config: Configuration) : ExprVa iterator.forEach { v -> out.append(",") - if(config.containerValueSeparator.isEmpty()) { + if (config.containerValueSeparator.isEmpty()) { out.append(" ") - } - else { + } else { out.append(config.containerValueSeparator) } @@ -94,14 +95,12 @@ class ConfigurableExprValueFormatter(private val config: Configuration) : ExprVa out.append(config.containerValueSeparator) writeIndentation() out.append(closingMarker) - } - else { + } else { // empty container out.append(openingMarker).append(closingMarker) } } - private fun prettyPrintIonLiteral(value: ExprValue) { val ionValue = value.ionValue out.append("`") diff --git a/lang/src/org/partiql/lang/util/FacetExtensions.kt b/lang/src/org/partiql/lang/util/FacetExtensions.kt index 92607f0661..ef7a3e93ac 100644 --- a/lang/src/org/partiql/lang/util/FacetExtensions.kt +++ b/lang/src/org/partiql/lang/util/FacetExtensions.kt @@ -14,7 +14,6 @@ package org.partiql.lang.util - /** * Simple dynamic downcast for a type. */ diff --git a/lang/src/org/partiql/lang/util/IonValueExtensions.kt b/lang/src/org/partiql/lang/util/IonValueExtensions.kt index ed6e0a7076..95cf365531 100644 --- a/lang/src/org/partiql/lang/util/IonValueExtensions.kt +++ b/lang/src/org/partiql/lang/util/IonValueExtensions.kt @@ -76,126 +76,125 @@ fun IonValue.asSequence(): Sequence = when (this) { fun IonInt.javaValue(): Number = when (integerSize) { IntegerSize.BIG_INTEGER -> bigIntegerValue() - else -> longValue() + else -> longValue() } fun IonValue.numberValue(): Number = when { isNullValue -> err("Expected non-null number: $this") - else -> when (this) { - is IonInt -> javaValue() + else -> when (this) { + is IonInt -> javaValue() is IonFloat -> doubleValue() is IonDecimal -> decimalValue() - else -> err("Expected number: $this") + else -> err("Expected number: $this") } } fun IonValue.longValue(): Long { val number = numberValue() return when (number) { - is Int -> number.toLong() - is Long -> number + is Int -> number.toLong() + is Long -> number is BigInteger -> number.longValueExact() - else -> err("Number is not a long: $number") + else -> err("Number is not a long: $number") } } fun IonValue.doubleValue(): Double = when { - isNullValue -> err("Expected non-null double: $this") + isNullValue -> err("Expected non-null double: $this") this is IonFloat -> doubleValue() - else -> err("Expected double: $this") + else -> err("Expected double: $this") } fun IonValue.bigDecimalValue(): BigDecimal = when { - isNullValue -> err("Expected non-null decimal: $this") + isNullValue -> err("Expected non-null decimal: $this") this is IonDecimal -> decimalValue() - else -> err("Expected decimal: $this") + else -> err("Expected decimal: $this") } fun IonValue.booleanValue(): Boolean = when (this) { is IonBool -> booleanValue() - else -> err("Expected boolean: $this") + else -> err("Expected boolean: $this") } fun IonValue.timestampValue(): Timestamp = when (this) { is IonTimestamp -> timestampValue() - else -> err("Expected timestamp: $this") + else -> err("Expected timestamp: $this") } fun IonValue.stringValue(): String? = when (this) { is IonText -> stringValue() - else -> err("Expected text: $this") + else -> err("Expected text: $this") } fun IonValue.bytesValue(): ByteArray = when (this) { is IonLob -> bytes - else -> err("Expected LOB: $this") + else -> err("Expected LOB: $this") } fun IonValue.numberValueOrNull(): Number? = when (this) { - is IonInt -> javaValue() - is IonFloat -> doubleValue() + is IonInt -> javaValue() + is IonFloat -> doubleValue() is IonDecimal -> decimalValue() - else -> null + else -> null } - fun IonValue.longValueOrNull(): Long? { val number = numberValue() return when (number) { - is Int -> number.toLong() - is Long -> number + is Int -> number.toLong() + is Long -> number is BigInteger -> number.longValueExact() - else -> null + else -> null } } fun IonValue.doubleValueOrNull(): Double? = when { this is IonFloat -> doubleValue() - else -> null + else -> null } fun IonValue.bigDecimalValueOrNull(): BigDecimal? = when { this is IonDecimal -> bigDecimalValue() - else -> null + else -> null } fun IonValue.booleanValueOrNull(): Boolean? = when (this) { is IonBool -> booleanValue() - else -> null + else -> null } fun IonValue.timestampValueOrNull(): Timestamp? = when (this) { is IonTimestamp -> timestampValue() - else -> null + else -> null } fun IonValue.stringValueOrNull(): String? = when (this) { is IonText -> stringValue() - else -> null + else -> null } fun IonValue.bytesValueOrNull(): ByteArray? = when (this) { is IonLob -> bytes - else -> null + else -> null } val IonValue.isNumeric: Boolean get() = when (this) { is IonInt, is IonFloat, is IonDecimal -> true - else -> false + else -> false } val IonValue.isUnsignedInteger: Boolean get() = when (this) { is IonInt -> longValue() >= 0 - else -> false + else -> false } val IonValue.isNonNullText: Boolean get() = when (this) { is IonText -> !isNullValue - else -> false + else -> false } val IonValue.ordinal: Int @@ -204,19 +203,19 @@ val IonValue.ordinal: Int val IonValue.isText: Boolean get() = when (this) { is IonText -> true - else -> false + else -> false } val IonValue.isBag: Boolean get() = when (this) { is IonList -> this.hasTypeAnnotation(BAG_ANNOTATION) - else -> false + else -> false } val IonValue.isMissing: Boolean get() = when (this) { is IonNull -> this.hasTypeAnnotation(MISSING_ANNOTATION) - else -> false + else -> false } /** Creates a new [IonSexp] from a legacy AST [IonSexp] that strips out meta nodes. */ @@ -225,7 +224,7 @@ fun IonSexp.filterMetaNodes(): IonValue { while (target[0].stringValue() == "meta") { val tmpTarget = target[1] - if(tmpTarget !is IonSexp) { + if (tmpTarget !is IonSexp) { return tmpTarget.clone() } target = tmpTarget.asIonSexp() @@ -237,7 +236,7 @@ fun IonSexp.filterMetaNodes(): IonValue { add( when { !isLiteral && child is IonSexp -> child.filterMetaNodes() - else -> child.clone() + else -> child.clone() } ) } @@ -258,7 +257,7 @@ fun IonValue.asIonSymbol() = this as? IonSymbol ?: err("Expected an IonSymbol bu fun IonStruct.field(nameOfField: String) = this.get(nameOfField) ?: err("Expected struct field '$nameOfField' was not present.") val IonSexp.tagText: String get() { - if(this.isEmpty) { + if (this.isEmpty) { err("IonSexp was empty") } @@ -273,4 +272,4 @@ val IonSexp.arity: Int get() = this.size - 1 fun IonValue.isAstLiteral(): Boolean = this is IonSexp && - this[0].stringValue() == "lit" //TODO AST node names should be refactored to statics + this[0].stringValue() == "lit" // TODO AST node names should be refactored to statics diff --git a/lang/src/org/partiql/lang/util/IonWriterContext.kt b/lang/src/org/partiql/lang/util/IonWriterContext.kt index 8ef8cc988b..82b9a327f4 100644 --- a/lang/src/org/partiql/lang/util/IonWriterContext.kt +++ b/lang/src/org/partiql/lang/util/IonWriterContext.kt @@ -26,15 +26,13 @@ import com.amazon.ion.IonWriter class IonWriterContext(val writer: IonWriter) { fun setNextFieldName(fieldName: String) { - if(!writer.isInStruct) { + if (!writer.isInStruct) { throw IllegalStateException("Cannot set field name while not in a struct") } writer.setFieldName(fieldName) } - - fun sexp(block: IonWriterContext.() -> Unit) { writer.stepIn(IonType.SEXP) block(this) @@ -185,8 +183,8 @@ class IonWriterContext(val writer: IonWriter) { } } - fun value(fieldName: String, value:IonValue) { + fun value(fieldName: String, value: IonValue) { setNextFieldName(fieldName) value(value) } -} \ No newline at end of file +} diff --git a/lang/src/org/partiql/lang/util/LongExtensions.kt b/lang/src/org/partiql/lang/util/LongExtensions.kt index 99aba18189..17a15180ae 100644 --- a/lang/src/org/partiql/lang/util/LongExtensions.kt +++ b/lang/src/org/partiql/lang/util/LongExtensions.kt @@ -5,9 +5,9 @@ package org.partiql.lang.util * * This is needed because Kotlin's default [Long.toInt()] returns `-1` instead of throwing an exception. */ -fun Long.toIntExact():Int = - if(this !in Int.MIN_VALUE..Int.MAX_VALUE) { +fun Long.toIntExact(): Int = + if (this !in Int.MIN_VALUE..Int.MAX_VALUE) { throw IllegalStateException("Long value is not within Int.MIN_VALUE..Int.MAX_VALUE") } else { this.toInt() - } \ No newline at end of file + } diff --git a/lang/src/org/partiql/lang/util/NumberExtensions.kt b/lang/src/org/partiql/lang/util/NumberExtensions.kt index 3c1c8e8587..8ebebfd860 100644 --- a/lang/src/org/partiql/lang/util/NumberExtensions.kt +++ b/lang/src/org/partiql/lang/util/NumberExtensions.kt @@ -31,12 +31,12 @@ private val MATH_CONTEXT = MathContext(38, RoundingMode.HALF_EVEN) // TODO shoul */ internal fun bigDecimalOf(num: Number, mc: MathContext = MATH_CONTEXT): BigDecimal = when (num) { is Decimal -> num - is Int -> BigDecimal(num, mc) - is Long -> BigDecimal(num, mc) - is Double -> BigDecimal(num, mc) - is BigDecimal -> num + is Int -> BigDecimal(num, mc) + is Long -> BigDecimal(num, mc) + is Double -> BigDecimal(num, mc) + is BigDecimal -> num Decimal.NEGATIVE_ZERO -> num as Decimal - else -> throw IllegalArgumentException("Unsupported number type: $num, ${num.javaClass}") + else -> throw IllegalArgumentException("Unsupported number type: $num, ${num.javaClass}") } internal fun bigDecimalOf(text: String, mc: MathContext = MATH_CONTEXT): BigDecimal = BigDecimal(text.trim(), mc) @@ -49,12 +49,12 @@ private val CONVERSION_MAP = mapOf>, Class>( setOf(Double::class.javaObjectType, Double::class.javaObjectType) to Double::class.javaObjectType, setOf(Double::class.javaObjectType, BigDecimal::class.javaObjectType) to BigDecimal::class.javaObjectType, - setOf(BigDecimal::class.javaObjectType, BigDecimal::class.javaObjectType) to BigDecimal::class.javaObjectType) - + setOf(BigDecimal::class.javaObjectType, BigDecimal::class.javaObjectType) to BigDecimal::class.javaObjectType +) private val CONVERTERS = mapOf, (Number) -> Number>( - Long::class.javaObjectType to Number::toLong, - Double::class.javaObjectType to Number::toDouble, + Long::class.javaObjectType to Number::toLong, + Double::class.javaObjectType to Number::toDouble, BigDecimal::class.java to { num -> when (num) { is Long -> bigDecimalOf(num) @@ -67,11 +67,11 @@ private val CONVERTERS = mapOf, (Number) -> Number>( } ) -internal fun Number.isZero() = when(this) { +internal fun Number.isZero() = when (this) { // using compareTo instead of equals for BigDecimal because equality also checks same scale is Long -> this == 0L - is Double -> this == 0.0 || this == -0.0 + is Double -> this == 0.0 || this == -0.0 is BigDecimal -> BigDecimal.ZERO.compareTo(this) == 0 else -> throw IllegalStateException() } @@ -89,14 +89,14 @@ fun Number.coerce(type: Class): Number { * This is only supported on limited types needed by the expression system. */ fun coerceNumbers(first: Number, second: Number): Pair { - fun typeFor(n: Number): Class<*> = if(n is Decimal) { + fun typeFor(n: Number): Class<*> = if (n is Decimal) { BigDecimal::class.javaObjectType } else { n.javaClass } - val type = CONVERSION_MAP[setOf(typeFor(first), typeFor(second))] ?: - throw IllegalArgumentException("No coercion support for ${typeFor(first)} to ${typeFor(second)}") + val type = CONVERSION_MAP[setOf(typeFor(first), typeFor(second))] + ?: throw IllegalArgumentException("No coercion support for ${typeFor(first)} to ${typeFor(second)}") return Pair(first.coerce(type), second.coerce(type)) } @@ -114,11 +114,11 @@ operator fun Number.unaryMinus(): Number { is Long -> -this is BigInteger -> this.negate() is Double -> -this - is BigDecimal -> if(this.isZero()) { - Decimal.negativeZero(this.scale()) - } else { - this.negate() - } + is BigDecimal -> if (this.isZero()) { + Decimal.negativeZero(this.scale()) + } else { + this.negate() + } else -> throw IllegalStateException() } } @@ -133,7 +133,7 @@ private fun Long.checkOverflowPlus(other: Long): Number { val overflows = ((this xor other) >= 0) and ((this xor result) < 0) return when (overflows) { false -> result - else -> errIntOverflow(8) + else -> errIntOverflow(8) } } @@ -144,7 +144,7 @@ private fun Long.checkOverflowMinus(other: Long): Number { val overflows = ((this xor other) < 0) and ((this xor result) < 0) return when (overflows) { false -> result - else -> errIntOverflow(8) + else -> errIntOverflow(8) } } @@ -154,16 +154,17 @@ private fun Long.checkOverflowTimes(other: Long): Number { // Hacker's Delight, Section 2-12 val leadingZeros = this.numberOfLeadingZeros() + - this.inv().numberOfLeadingZeros() + - other.numberOfLeadingZeros() + - other.inv().numberOfLeadingZeros() + this.inv().numberOfLeadingZeros() + + other.numberOfLeadingZeros() + + other.inv().numberOfLeadingZeros() val result = this * other val longSize = java.lang.Long.SIZE if ((leadingZeros >= longSize) && ((this >= 0) or (other != Long.MIN_VALUE)) && - (this == 0L || result / this == other)) { + (this == 0L || result / this == other) + ) { return result } @@ -173,7 +174,7 @@ private fun Long.checkOverflowTimes(other: Long): Number { private fun Long.checkOverflowDivision(other: Long): Number { // division can only underflow Long.MIN_VALUE / -1 // because abs(Long.MIN_VALUE) == abs(Long.MAX_VALUE) + 1 - if(this == Long.MIN_VALUE && other == -1L){ + if (this == Long.MIN_VALUE && other == -1L) { errIntOverflow(8) } @@ -240,17 +241,17 @@ operator fun Number.compareTo(other: Number): Int { } } -val Number.isNaN get() = when(this) { +val Number.isNaN get() = when (this) { is Double -> isNaN() else -> false } -val Number.isNegInf get() = when(this) { +val Number.isNegInf get() = when (this) { is Double -> isInfinite() && this < 0 else -> false } -val Number.isPosInf get() = when(this) { +val Number.isPosInf get() = when (this) { is Double -> isInfinite() && this > 0 else -> false } diff --git a/lang/src/org/partiql/lang/util/PropertyMapHelpers.kt b/lang/src/org/partiql/lang/util/PropertyMapHelpers.kt index 79eb783633..4877537572 100644 --- a/lang/src/org/partiql/lang/util/PropertyMapHelpers.kt +++ b/lang/src/org/partiql/lang/util/PropertyMapHelpers.kt @@ -27,13 +27,13 @@ fun propertyValueMapOf(vararg properties: Pair): PropertyValueMap properties.forEach { if (pvm.hasProperty(it.first)) throw IllegalArgumentException("Duplicate property: ${it.first.propertyName}") when (it.second) { - is Int -> pvm[it.first] = it.second as Int - is Long -> pvm[it.first] = it.second as Long - is String -> pvm[it.first] = it.second as String + is Int -> pvm[it.first] = it.second as Int + is Long -> pvm[it.first] = it.second as Long + is String -> pvm[it.first] = it.second as String is TokenType -> pvm[it.first] = it.second as TokenType is IonValue -> pvm[it.first] = it.second as IonValue - is Enum<*> -> pvm[it.first] = it.second.toString() - else -> throw IllegalArgumentException("Cannot convert ${it.second.javaClass.name} to PropertyValue") + is Enum<*> -> pvm[it.first] = it.second.toString() + else -> throw IllegalArgumentException("Cannot convert ${it.second.javaClass.name} to PropertyValue") } } @@ -45,4 +45,3 @@ fun propertyValueMapOf(vararg properties: Pair): PropertyValueMap * Intended to be used in conjunction with [propertyValueMapOf]. */ infix fun Property.to(that: Any): Pair = Pair(this, that) - diff --git a/lang/src/org/partiql/lang/util/ThreadInterruptUtils.kt b/lang/src/org/partiql/lang/util/ThreadInterruptUtils.kt index e25b508989..c42d1dce4b 100644 --- a/lang/src/org/partiql/lang/util/ThreadInterruptUtils.kt +++ b/lang/src/org/partiql/lang/util/ThreadInterruptUtils.kt @@ -2,7 +2,7 @@ package org.partiql.lang.util /** Throws [InterruptedException] if [Thread.interrupted] is set. */ internal fun checkThreadInterrupted() { - if(Thread.interrupted()) { + if (Thread.interrupted()) { throw InterruptedException() } } diff --git a/lang/src/org/partiql/lang/util/TokenListExtensions.kt b/lang/src/org/partiql/lang/util/TokenListExtensions.kt index b9ddd1664a..bcefa192d7 100644 --- a/lang/src/org/partiql/lang/util/TokenListExtensions.kt +++ b/lang/src/org/partiql/lang/util/TokenListExtensions.kt @@ -33,7 +33,7 @@ import org.partiql.lang.syntax.TokenType */ internal fun List.onlyEndOfStatement() = (size == 1 && this[0].type == TokenType.EOF) || - (size == 2 && this[0].type == TokenType.SEMICOLON && this[1].type == TokenType.EOF) + (size == 2 && this[0].type == TokenType.SEMICOLON && this[1].type == TokenType.EOF) /** * Given an error context ([PropertyValueMap]) and a source position ([SourcePosition]) populate the given @@ -72,10 +72,10 @@ internal fun Token?.errExpectedTokenType(expectedType: TokenType): Nothing { } internal fun List.atomFromHead(parseType: SqlParser.ParseType = SqlParser.ParseType.ATOM): SqlParser.ParseNode = - SqlParser.ParseNode(parseType, head, emptyList(), tail) + SqlParser.ParseNode(parseType, head, emptyList(), tail) internal fun List.err(message: String, errorCode: ErrorCode, errorContext: PropertyValueMap = PropertyValueMap()): Nothing = - head.err(message, errorCode, errorContext) + head.err(message, errorCode, errorContext) internal fun List.tailExpectedKeyword(keyword: String): List { when (head?.keywordText) { @@ -91,7 +91,5 @@ internal fun List.tailExpectedKeyword(keyword: String): List { internal fun List.tailExpectedToken(tokenType: TokenType): List = when (head?.type) { tokenType -> tail - else -> head.errExpectedTokenType(tokenType) + else -> head.errExpectedTokenType(tokenType) } - - diff --git a/lang/src/org/partiql/lang/util/WhenAsExpression.kt b/lang/src/org/partiql/lang/util/WhenAsExpression.kt index 93ea4127bb..189ac34308 100644 --- a/lang/src/org/partiql/lang/util/WhenAsExpression.kt +++ b/lang/src/org/partiql/lang/util/WhenAsExpression.kt @@ -12,7 +12,7 @@ * language governing permissions and limitations under the License. */ -package org.partiql.lang.util; +package org.partiql.lang.util /** * In Kotlin, the cases of a `when >...` must be exhaustive only when `when` is used as an expression. @@ -35,7 +35,6 @@ inline fun case(block: () -> Unit): WhenAsExpressionHelper { return WhenAsExpressionHelper.Instance } - /** * See [case] for a description of how to use this[] */ @@ -44,4 +43,4 @@ class WhenAsExpressionHelper private constructor() { companion object { val Instance = WhenAsExpressionHelper() } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/CustomTypeTestFixtures.kt b/lang/test/org/partiql/lang/CustomTypeTestFixtures.kt index 2385236f6f..61456df90f 100644 --- a/lang/test/org/partiql/lang/CustomTypeTestFixtures.kt +++ b/lang/test/org/partiql/lang/CustomTypeTestFixtures.kt @@ -36,7 +36,7 @@ val esFloatParameter = TypedOpParameter(StaticType.FLOAT) { /** Emulate the recursive validation function for ES_ANY. */ private fun esValidateAny(value: ExprValue): Boolean = - when (value.type){ + when (value.type) { ExprValueType.FLOAT -> esFloatParameter.validationThunk?.invoke(value) ?: true ExprValueType.MISSING, ExprValueType.NULL, @@ -64,7 +64,6 @@ val esAny = TypedOpParameter( ::esValidateAny ) - // RS_INTEGER private val rsIntegerPrecisionParameter = TypedOpParameter(IntType(IntType.IntRangeConstraint.INT4)) @@ -73,7 +72,8 @@ private val rsBigintPrecisionParameter = TypedOpParameter(IntType(IntType.IntRan // RS_VARCHAR_MAX private val rsStringParameter = TypedOpParameter( - StringType(StringType.StringLengthConstraint.Constrained(NumberConstraint.UpTo(10)))) + StringType(StringType.StringLengthConstraint.Constrained(NumberConstraint.UpTo(10))) +) // RS_REAL private val rsRealParameter = TypedOpParameter(StaticType.FLOAT) { diff --git a/lang/test/org/partiql/lang/Ion.kt b/lang/test/org/partiql/lang/Ion.kt index 26f329b5c1..3b486d4135 100644 --- a/lang/test/org/partiql/lang/Ion.kt +++ b/lang/test/org/partiql/lang/Ion.kt @@ -12,4 +12,3 @@ import com.amazon.ion.system.IonSystemBuilder * (Singletons like this probably should be avoided in production code.) */ internal val ION: IonSystem = IonSystemBuilder.standard().build() - diff --git a/lang/test/org/partiql/lang/TestBase.kt b/lang/test/org/partiql/lang/TestBase.kt index c910604711..6e80aa9388 100644 --- a/lang/test/org/partiql/lang/TestBase.kt +++ b/lang/test/org/partiql/lang/TestBase.kt @@ -18,11 +18,10 @@ import com.amazon.ion.Decimal import com.amazon.ion.IonSystem import com.amazon.ion.IonValue import com.amazon.ion.Timestamp -import com.amazon.ion.system.IonSystemBuilder -import org.junit.Assert -import org.junit.runner.RunWith import junitparams.JUnitParamsRunner import org.assertj.core.api.SoftAssertions +import org.junit.Assert +import org.junit.runner.RunWith import org.partiql.lang.ast.ExprNode import org.partiql.lang.ast.passes.AstRewriterBase import org.partiql.lang.errors.ErrorCode @@ -52,14 +51,14 @@ import kotlin.reflect.KClass * @param expectedValues expected values for errorContext */ fun SoftAssertions.checkErrorAndErrorContext(errorCode: ErrorCode?, ex: T, expectedValues: Map) { - if(ex.errorCode == null && errorCode != null) { + if (ex.errorCode == null && errorCode != null) { fail("Expected an error code but exception error code was null, message was: ${ex.message}") } else { this.assertThat(ex.errorCode).isEqualTo(errorCode) } val errorContext = ex.errorContext - if(errorCode != null) { + if (errorCode != null) { correctContextKeys(errorCode, errorContext) correctContextValues(errorCode, errorContext, expectedValues) } @@ -76,7 +75,6 @@ private fun SoftAssertions.correctContextKeys(errorCode: ErrorCode, errorContext assertThat(errorContext!!.hasProperty(it)) .withFailMessage("Error Context does not contain $it") .isTrue - } /** @@ -90,17 +88,19 @@ private fun SoftAssertions.correctContextKeys(errorCode: ErrorCode, errorContext private fun SoftAssertions.correctContextValues(errorCode: ErrorCode, errorContext: PropertyValueMap?, expected: Map) { assertThat(errorCode.getProperties().containsAll(expected.keys)) - .withFailMessage("Actual errorCode must contain these properties: " + - "${expected.keys.joinToString(", ")} but contained only: " + - errorCode.getProperties().joinToString(", ")) + .withFailMessage( + "Actual errorCode must contain these properties: " + + "${expected.keys.joinToString(", ")} but contained only: " + + errorCode.getProperties().joinToString(", ") + ) .isTrue val unexpectedProperties = errorCode.getProperties().filter { p -> !expected.containsKey(p) } - if(unexpectedProperties.any()) { + if (unexpectedProperties.any()) { fail("Unexpected properties found in error code: ${unexpectedProperties.joinToString(", ")}") } - if(errorContext == null) return + if (errorContext == null) return expected.forEach { entry -> val actualPropertyValue: PropertyValue? = errorContext[entry.key] assertThat(errorContext.hasProperty(entry.key)) @@ -109,15 +109,15 @@ private fun SoftAssertions.correctContextValues(errorCode: ErrorCode, errorConte val message by lazy { "Expected property ${entry.key} to have value '${entry.value}' " + - "but found value '${actualPropertyValue.toString()}'" + "but found value '$actualPropertyValue'" } val propertyValue: Any? = actualPropertyValue?.run { when (entry.key.propertyType) { - PropertyType.LONG_CLASS -> longValue() - PropertyType.STRING_CLASS -> stringValue() - PropertyType.INTEGER_CLASS -> integerValue() - PropertyType.TOKEN_CLASS -> tokenTypeValue() + PropertyType.LONG_CLASS -> longValue() + PropertyType.STRING_CLASS -> stringValue() + PropertyType.INTEGER_CLASS -> integerValue() + PropertyType.TOKEN_CLASS -> tokenTypeValue() PropertyType.ION_VALUE_CLASS -> ionValue() } } @@ -139,21 +139,23 @@ abstract class TestBase : Assert() { val defaultRewriter = AstRewriterBase() protected fun anyToExprValue(value: Any) = when (value) { - is String -> valueFactory.newString(value) - is Int -> valueFactory.newInt(value) + is String -> valueFactory.newString(value) + is Int -> valueFactory.newInt(value) is Decimal -> valueFactory.newDecimal(value) is Timestamp -> valueFactory.newTimestamp(value) is LocalDate -> valueFactory.newDate(value) - is Time -> valueFactory.newTime(value) - is Double -> valueFactory.newFloat(value) + is Time -> valueFactory.newTime(value) + is Double -> valueFactory.newFloat(value) is BigDecimal -> valueFactory.newDecimal(value) - else -> + else -> error("Can't convert receiver to ExprValue (please extend this function to support the receiver's data type).") } - inner class AssertExprValue(val exprValue: ExprValue, - val bindingsTransform: Bindings.() -> Bindings = { this }, - val message: String? = null) { + inner class AssertExprValue( + val exprValue: ExprValue, + val bindingsTransform: Bindings.() -> Bindings = { this }, + val message: String? = null + ) { fun assertBindings(predicate: Bindings.() -> Boolean) = assertTrue( exprValue.bindings.bindingsTransform().predicate() @@ -168,12 +170,12 @@ abstract class TestBase : Assert() { } } - protected fun assertBaseRewrite(originalSql: String, exprNode: ExprNode) { val clonedAst = defaultRewriter.rewriteExprNode(exprNode) assertEquals( "AST returned from default AstRewriterBase should match the original AST. SQL was: $originalSql", - exprNode, clonedAst) + exprNode, clonedAst + ) } protected fun assertSexpEquals( @@ -181,11 +183,11 @@ abstract class TestBase : Assert() { actualValue: IonValue, message: String = "" ) { - if(!expectedValue.equals(actualValue)) { + if (!expectedValue.equals(actualValue)) { fail( "Expected and actual values do not match: $message\n" + - "Expected:\n${SexpAstPrettyPrinter.format(expectedValue)}\n" + - "Actual:\n${SexpAstPrettyPrinter.format(actualValue)}" + "Expected:\n${SexpAstPrettyPrinter.format(expectedValue)}\n" + + "Actual:\n${SexpAstPrettyPrinter.format(actualValue)}" ) } } @@ -194,14 +196,12 @@ abstract class TestBase : Assert() { * Asserts that the specified [block] throws an [SqlException] and its [expectedErrorCode] matches the expected value. */ protected fun assertThrowsSqlException(expectedErrorCode: ErrorCode, block: () -> Unit) { - try { + try { block() fail("Expected EvaluationException but there was no Exception") - } - catch (e: SqlException) { + } catch (e: SqlException) { assertEquals("The expected error code did not match the actual error code", expectedErrorCode, e.errorCode) - } - catch (e: Exception) { + } catch (e: Exception) { fail("Expected EvaluationException but a different exception was thrown \n\t $e") } } @@ -210,20 +210,20 @@ abstract class TestBase : Assert() { * Asserts that the specified [block] throws an [EvaluationException] and its [errorCode] and * [expectErrorContextValues] match the expected values. */ - protected fun assertThrowsEvaluationException(errorCode: ErrorCode? = null, - expectErrorContextValues: Map, - cause: KClass? = null, - block: () -> Unit) { + protected fun assertThrowsEvaluationException( + errorCode: ErrorCode? = null, + expectErrorContextValues: Map, + cause: KClass? = null, + block: () -> Unit + ) { softAssert { try { block() fail("Expected EvaluationException but there was no Exception") - } - catch (e: EvaluationException) { + } catch (e: EvaluationException) { if (cause != null) assertThat(e).hasRootCauseExactlyInstanceOf(cause.java) checkErrorAndErrorContext(errorCode, e, expectErrorContextValues) - } - catch (e: Exception) { + } catch (e: Exception) { fail("Expected EvaluationException but a different exception was thrown \n\t $e") } } diff --git a/lang/test/org/partiql/lang/ast/AstNodeTest.kt b/lang/test/org/partiql/lang/ast/AstNodeTest.kt index 42717286b1..3fa516404e 100644 --- a/lang/test/org/partiql/lang/ast/AstNodeTest.kt +++ b/lang/test/org/partiql/lang/ast/AstNodeTest.kt @@ -4,8 +4,8 @@ import com.amazon.ion.IonSystem import com.amazon.ion.system.IonSystemBuilder import junitparams.JUnitParamsRunner import junitparams.Parameters -import org.junit.Assert.assertTrue import org.junit.Assert.assertEquals +import org.junit.Assert.assertTrue import org.junit.Test import org.junit.runner.RunWith import org.partiql.lang.syntax.SqlParser @@ -75,88 +75,116 @@ class AstNodeTest { fun parametersForIteratorTests() = listOf( IteratorTestCase( "MISSING", - "LiteralMissing"), + "LiteralMissing" + ), IteratorTestCase( "1", - "Literal"), + "Literal" + ), IteratorTestCase( "1 + 1", - "NAry|Literal|Literal"), + "NAry|Literal|Literal" + ), IteratorTestCase( "[1, 2]", - "Seq|Literal|Literal"), + "Seq|Literal|Literal" + ), IteratorTestCase( "{ 'fooField': 1 }", - "Struct|StructField|Literal|Literal"), + "Struct|StructField|Literal|Literal" + ), IteratorTestCase( "a.b.c", - "Path|VariableReference|PathComponentExpr|Literal|PathComponentExpr|Literal"), + "Path|VariableReference|PathComponentExpr|Literal|PathComponentExpr|Literal" + ), IteratorTestCase( "a[b].c", - "Path|VariableReference|PathComponentExpr|VariableReference|PathComponentExpr|Literal"), + "Path|VariableReference|PathComponentExpr|VariableReference|PathComponentExpr|Literal" + ), IteratorTestCase( "a[1].c", - "Path|VariableReference|PathComponentExpr|Literal|PathComponentExpr|Literal"), + "Path|VariableReference|PathComponentExpr|Literal|PathComponentExpr|Literal" + ), IteratorTestCase( "a[*].c", - "Path|VariableReference|PathComponentWildcard|PathComponentExpr|Literal"), + "Path|VariableReference|PathComponentWildcard|PathComponentExpr|Literal" + ), IteratorTestCase( "a.*.c", - "Path|VariableReference|PathComponentUnpivot|PathComponentExpr|Literal"), + "Path|VariableReference|PathComponentUnpivot|PathComponentExpr|Literal" + ), IteratorTestCase( "fcall(var1, var2)", - "NAry|VariableReference|VariableReference|VariableReference"), + "NAry|VariableReference|VariableReference|VariableReference" + ), IteratorTestCase( "CASE foo WHEN 1 THEN 10 ELSE 11 END", - "SimpleCase|VariableReference|SimpleCaseWhen|Literal|Literal|Literal"), + "SimpleCase|VariableReference|SimpleCaseWhen|Literal|Literal|Literal" + ), IteratorTestCase( "CASE WHEN 1 THEN 10 ELSE 11 END", - "SearchedCase|SearchedCaseWhen|Literal|Literal|Literal"), + "SearchedCase|SearchedCaseWhen|Literal|Literal|Literal" + ), IteratorTestCase( "SELECT * FROM foo", - "Select|SelectProjectionList|SelectListItemStar|FromSourceExpr|VariableReference"), + "Select|SelectProjectionList|SelectListItemStar|FromSourceExpr|VariableReference" + ), IteratorTestCase( "SELECT * FROM foo, bar", - //Reminder: this yields the same AST as: ... FROM foo INNER JOIN bar ON true - "Select|SelectProjectionList|SelectListItemStar|FromSourceJoin|FromSourceExpr|VariableReference|FromSourceExpr|VariableReference|Literal"), + // Reminder: this yields the same AST as: ... FROM foo INNER JOIN bar ON true + "Select|SelectProjectionList|SelectListItemStar|FromSourceJoin|FromSourceExpr|VariableReference|FromSourceExpr|VariableReference|Literal" + ), IteratorTestCase( "SELECT * FROM foo, bar", - "Select|SelectProjectionList|SelectListItemStar|FromSourceJoin|FromSourceExpr|VariableReference|FromSourceExpr|VariableReference|Literal"), + "Select|SelectProjectionList|SelectListItemStar|FromSourceJoin|FromSourceExpr|VariableReference|FromSourceExpr|VariableReference|Literal" + ), IteratorTestCase( "SELECT * FROM foo WHERE bar", - "Select|SelectProjectionList|SelectListItemStar|FromSourceExpr|VariableReference|VariableReference"), + "Select|SelectProjectionList|SelectListItemStar|FromSourceExpr|VariableReference|VariableReference" + ), IteratorTestCase( "SELECT * FROM foo INNER JOIN bar ON condition", - "Select|SelectProjectionList|SelectListItemStar|FromSourceJoin|FromSourceExpr|VariableReference|FromSourceExpr|VariableReference|VariableReference"), + "Select|SelectProjectionList|SelectListItemStar|FromSourceJoin|FromSourceExpr|VariableReference|FromSourceExpr|VariableReference|VariableReference" + ), IteratorTestCase( "SELECT f.* FROM foo AS f", - "Select|SelectProjectionList|SelectListItemProjectAll|VariableReference|FromSourceExpr|VariableReference"), + "Select|SelectProjectionList|SelectListItemProjectAll|VariableReference|FromSourceExpr|VariableReference" + ), IteratorTestCase( "SELECT VALUE foo FROM bar", - "Select|SelectProjectionValue|VariableReference|FromSourceExpr|VariableReference"), + "Select|SelectProjectionValue|VariableReference|FromSourceExpr|VariableReference" + ), IteratorTestCase( "PIVOT 1 AT 2 FROM 3", - "Select|SelectProjectionPivot|Literal|Literal|FromSourceExpr|Literal"), + "Select|SelectProjectionPivot|Literal|Literal|FromSourceExpr|Literal" + ), IteratorTestCase( "INSERT INTO foo VALUES (1)", - "DataManipulation|InsertOp|VariableReference|Seq|Seq|Literal|DmlOpList|InsertOp|VariableReference|Seq|Seq|Literal"), + "DataManipulation|InsertOp|VariableReference|Seq|Seq|Literal|DmlOpList|InsertOp|VariableReference|Seq|Seq|Literal" + ), IteratorTestCase( "UPDATE foo SET x.y = bar WHERE n", - "DataManipulation|AssignmentOp|Assignment|Path|VariableReference|PathComponentExpr|Literal|VariableReference|FromSourceExpr|VariableReference|VariableReference|DmlOpList|AssignmentOp|Assignment|Path|VariableReference|PathComponentExpr|Literal|VariableReference"), + "DataManipulation|AssignmentOp|Assignment|Path|VariableReference|PathComponentExpr|Literal|VariableReference|FromSourceExpr|VariableReference|VariableReference|DmlOpList|AssignmentOp|Assignment|Path|VariableReference|PathComponentExpr|Literal|VariableReference" + ), IteratorTestCase( "FROM x IN Y REMOVE p", - "DataManipulation|RemoveOp|VariableReference|FromSourceExpr|NAry|VariableReference|VariableReference|DmlOpList|RemoveOp|VariableReference"), + "DataManipulation|RemoveOp|VariableReference|FromSourceExpr|NAry|VariableReference|VariableReference|DmlOpList|RemoveOp|VariableReference" + ), IteratorTestCase( "DELETE FROM foo WHERE bar", - "DataManipulation|DeleteOp|FromSourceExpr|VariableReference|VariableReference|DmlOpList|DeleteOp"), + "DataManipulation|DeleteOp|FromSourceExpr|VariableReference|VariableReference|DmlOpList|DeleteOp" + ), IteratorTestCase( "CREATE TABLE foo", - "CreateTable"), + "CreateTable" + ), IteratorTestCase( "DROP TABLE foo", - "DropTable"), + "DropTable" + ), - IteratorTestCase("MISSING", "LiteralMissing")) + IteratorTestCase("MISSING", "LiteralMissing") + ) @Test fun nodeWithMultipleNonLeafChildren() { @@ -179,10 +207,14 @@ class AstNodeTest { fun literalMissingChildren() = assertTrue(LiteralMissing(emptyMeta).children.isEmpty()) @Test - fun variableReferenceChildren() = assertTrue(VariableReference("", - CaseSensitivity.INSENSITIVE, - ScopeQualifier.LEXICAL, - emptyMeta).children.isEmpty()) + fun variableReferenceChildren() = assertTrue( + VariableReference( + "", + CaseSensitivity.INSENSITIVE, + ScopeQualifier.LEXICAL, + emptyMeta + ).children.isEmpty() + ) @Test fun nAryChildren() { @@ -213,8 +245,10 @@ class AstNodeTest { val component1 = PathComponentExpr(literal("2"), CaseSensitivity.INSENSITIVE, emptyMetaContainer) val component2 = PathComponentExpr(literal("3"), CaseSensitivity.INSENSITIVE, emptyMetaContainer) - assertEquals(listOf(root, component1, component2), - Path(root, listOf(component1, component2), emptyMeta).children) + assertEquals( + listOf(root, component1, component2), + Path(root, listOf(component1, component2), emptyMeta).children + ) } @Test @@ -222,8 +256,10 @@ class AstNodeTest { val value = literal("1") val whenClause1 = SimpleCaseWhen(literal("21"), literal("22")) val whenClause2 = SimpleCaseWhen(literal("31"), literal("32")) - assertEquals(listOf(value, whenClause1, whenClause2), - SimpleCase(value, listOf(whenClause1, whenClause2), null, emptyMeta).children) + assertEquals( + listOf(value, whenClause1, whenClause2), + SimpleCase(value, listOf(whenClause1, whenClause2), null, emptyMeta).children + ) } @Test @@ -232,8 +268,10 @@ class AstNodeTest { val whenClause1 = SimpleCaseWhen(literal("21"), literal("22")) val whenClause2 = SimpleCaseWhen(literal("31"), literal("32")) val elseExpr = literal("4") - assertEquals(listOf(value, whenClause1, whenClause2, elseExpr), - SimpleCase(value, listOf(whenClause1, whenClause2), elseExpr, emptyMeta).children) + assertEquals( + listOf(value, whenClause1, whenClause2, elseExpr), + SimpleCase(value, listOf(whenClause1, whenClause2), elseExpr, emptyMeta).children + ) } @Test @@ -249,8 +287,10 @@ class AstNodeTest { val searchedCaseWhen1 = SearchedCaseWhen(literal("11"), literal("12")) val searchedCaseWhen2 = SearchedCaseWhen(literal("21"), literal("22")) - assertEquals(listOf(searchedCaseWhen1, searchedCaseWhen2), - SearchedCase(listOf(searchedCaseWhen1, searchedCaseWhen2), null, emptyMeta).children) + assertEquals( + listOf(searchedCaseWhen1, searchedCaseWhen2), + SearchedCase(listOf(searchedCaseWhen1, searchedCaseWhen2), null, emptyMeta).children + ) } @Test @@ -259,8 +299,10 @@ class AstNodeTest { val searchedCaseWhen2 = SearchedCaseWhen(literal("21"), literal("22")) val elseExpr = literal("3") - assertEquals(listOf(searchedCaseWhen1, searchedCaseWhen2, elseExpr), - SearchedCase(listOf(searchedCaseWhen1, searchedCaseWhen2), elseExpr, emptyMeta).children) + assertEquals( + listOf(searchedCaseWhen1, searchedCaseWhen2, elseExpr), + SearchedCase(listOf(searchedCaseWhen1, searchedCaseWhen2), elseExpr, emptyMeta).children + ) } @Test @@ -276,8 +318,10 @@ class AstNodeTest { val projection = SelectProjectionValue(literal("1"), emptyMetaContainer) val from = FromSourceExpr(literal("2"), LetVariables()) - assertEquals(listOf(projection, from), - Select(SetQuantifier.ALL, projection, from, null, null, null, null, null, null, null, emptyMeta).children) + assertEquals( + listOf(projection, from), + Select(SetQuantifier.ALL, projection, from, null, null, null, null, null, null, null, emptyMeta).children + ) } @Test @@ -292,8 +336,10 @@ class AstNodeTest { val limit = literal("5") val offset = literal("6") - assertEquals(listOf(projection, from, fromLet, where, groupBy, having, orderBy, limit, offset), - Select(SetQuantifier.ALL, projection, from, fromLet, where, groupBy, having, orderBy, limit, offset, emptyMeta).children) + assertEquals( + listOf(projection, from, fromLet, where, groupBy, having, orderBy, limit, offset), + Select(SetQuantifier.ALL, projection, from, fromLet, where, groupBy, having, orderBy, limit, offset, emptyMeta).children + ) } @Test @@ -363,8 +409,10 @@ class AstNodeTest { val child2 = FromSourceExpr(literal("2"), LetVariables()) val child3 = literal("3") - assertEquals(listOf(child1, child2, child3), - FromSourceJoin(JoinOp.INNER, child1, child2, child3, emptyMeta).children) + assertEquals( + listOf(child1, child2, child3), + FromSourceJoin(JoinOp.INNER, child1, child2, child3, emptyMeta).children + ) } @Test diff --git a/lang/test/org/partiql/lang/ast/IsIonLiteralMetaTest.kt b/lang/test/org/partiql/lang/ast/IsIonLiteralMetaTest.kt index 5277a7ced7..d1e6a63988 100644 --- a/lang/test/org/partiql/lang/ast/IsIonLiteralMetaTest.kt +++ b/lang/test/org/partiql/lang/ast/IsIonLiteralMetaTest.kt @@ -29,4 +29,4 @@ class IsIonLiteralMetaTest { val roundTrippedIonLiteral = ionLiteral.toAstStatement().toExprNode(ion) Assert.assertTrue(roundTrippedIonLiteral.metas.hasMeta(IsIonLiteralMeta.TAG)) } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/ast/PathComponentExprTest.kt b/lang/test/org/partiql/lang/ast/PathComponentExprTest.kt index 05a5f4ae1f..3fe338e5d0 100644 --- a/lang/test/org/partiql/lang/ast/PathComponentExprTest.kt +++ b/lang/test/org/partiql/lang/ast/PathComponentExprTest.kt @@ -62,7 +62,7 @@ class PathComponentExprTest { @Test @Parameters fun equivalenceTest(tc: TestCase) { - when(tc.shouldBeEquivalent) { + when (tc.shouldBeEquivalent) { true -> { assertTrue(tc.a.equals(tc.b), "a must equal b") assertTrue(tc.b.equals(tc.a), "b must equal a") @@ -74,8 +74,10 @@ class PathComponentExprTest { // .hashCode() uniqueness is not guaranteed so this assertion might fail problem someday. // however, the odds of this should be extremely low if `.hashCode()` is implemented effectively. - assertNotEquals(tc.a.hashCode(), tc.b.hashCode(), - "a.hashCode() must not equal b.hashCode(), most likely.") + assertNotEquals( + tc.a.hashCode(), tc.b.hashCode(), + "a.hashCode() must not equal b.hashCode(), most likely." + ) } } } @@ -117,5 +119,4 @@ class PathComponentExprTest { TestCase(fooSensitive, oneInsensitive, false), TestCase(oneSensitive, oneInsensitive, false) ) - } diff --git a/lang/test/org/partiql/lang/ast/SerializationRoundTripTests.kt b/lang/test/org/partiql/lang/ast/SerializationRoundTripTests.kt index 9ba3b52307..8d06b631fd 100644 --- a/lang/test/org/partiql/lang/ast/SerializationRoundTripTests.kt +++ b/lang/test/org/partiql/lang/ast/SerializationRoundTripTests.kt @@ -25,7 +25,8 @@ class SerializationRoundTripTests { failingTestNames = hashSetOf( // CAN_CAST is not supported by V0 and will never be. "canCastAsFloat1" - )) + ) + ) // we really don't need to test failure cases in this case since the (de)serializers are legacy. .filter { !it.expectFailure } .map { it.toExprNodeTestCase() } @@ -41,9 +42,8 @@ class SerializationRoundTripTests { deserializer.deserialize(sexp, AstVersion.V0) } - val originalStripped = MetaStrippingRewriter.stripMetas(tc.expr); + val originalStripped = MetaStrippingRewriter.stripMetas(tc.expr) val roundTrippedStripped = MetaStrippingRewriter.stripMetas(roundTrippedExprNode) assertEquals(originalStripped, roundTrippedStripped, "ExprNode deserialized from s-exp V0 AST must match the ExprNode") } } - diff --git a/lang/test/org/partiql/lang/ast/SourceLocationMetaTest.kt b/lang/test/org/partiql/lang/ast/SourceLocationMetaTest.kt index 16cb03dc1b..d2453acc0b 100644 --- a/lang/test/org/partiql/lang/ast/SourceLocationMetaTest.kt +++ b/lang/test/org/partiql/lang/ast/SourceLocationMetaTest.kt @@ -32,4 +32,4 @@ class SourceLocationMetaTest { val result = dg.first() assertEquals(expected, result) } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/ast/VariableReferenceTest.kt b/lang/test/org/partiql/lang/ast/VariableReferenceTest.kt index 379608a502..21e8e0dd0a 100644 --- a/lang/test/org/partiql/lang/ast/VariableReferenceTest.kt +++ b/lang/test/org/partiql/lang/ast/VariableReferenceTest.kt @@ -42,7 +42,8 @@ class VariableReferenceTest { fun caseSensitiveEquals() { assertEquals( sensitiveFoo, - sensitiveFoo.copy()) + sensitiveFoo.copy() + ) } @Test @@ -69,4 +70,4 @@ class VariableReferenceTest { assertNotEquals(insensitiveFoo, insensitiveFoo.copy(scopeQualifier = ScopeQualifier.LEXICAL)) assertNotEquals(insensitiveFoo, insensitiveFoo.copy(metas = metaContainerOf(SourceLocationMeta(1, 1)))) } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/ast/passes/AstRewriterBaseTest.kt b/lang/test/org/partiql/lang/ast/passes/AstRewriterBaseTest.kt index 2f2774182a..32f117209f 100644 --- a/lang/test/org/partiql/lang/ast/passes/AstRewriterBaseTest.kt +++ b/lang/test/org/partiql/lang/ast/passes/AstRewriterBaseTest.kt @@ -23,4 +23,3 @@ class AstRewriterBaseTest { tc.assertEquals(rewritten) } } - diff --git a/lang/test/org/partiql/lang/ast/passes/AstWalkerTests.kt b/lang/test/org/partiql/lang/ast/passes/AstWalkerTests.kt index 3fb11fffed..e8d9ab93b9 100644 --- a/lang/test/org/partiql/lang/ast/passes/AstWalkerTests.kt +++ b/lang/test/org/partiql/lang/ast/passes/AstWalkerTests.kt @@ -15,7 +15,6 @@ package org.partiql.lang.ast.passes import com.amazon.ion.system.IonSystemBuilder - import junitparams.JUnitParamsRunner import junitparams.Parameters import org.junit.Test @@ -39,7 +38,7 @@ import kotlin.test.assertEquals class AstWalkerTests { // these tests are duplicated on AstNodeTest but kept here until we delete AstWalker - + companion object { /** A dummy visitor that simply appends the type name of each node to a StringBuilder to that @@ -104,71 +103,93 @@ class AstWalkerTests { fun parametersForWalkerTest() = listOf( WalkerTestCase( "MISSING", - "LiteralMissing|"), + "LiteralMissing|" + ), WalkerTestCase( "1", - "Literal|"), + "Literal|" + ), WalkerTestCase( "1 + 1", - "NAry|Literal|Literal|"), + "NAry|Literal|Literal|" + ), WalkerTestCase( "[1, 2]", - "Seq|Literal|Literal|"), + "Seq|Literal|Literal|" + ), WalkerTestCase( "{ 'fooField': 1 }", - "Struct|Literal|Literal|"), + "Struct|Literal|Literal|" + ), WalkerTestCase( "a.b.c", - "Path|VariableReference|PathComponentExpr|Literal|PathComponentExpr|Literal|"), + "Path|VariableReference|PathComponentExpr|Literal|PathComponentExpr|Literal|" + ), WalkerTestCase( "a[b].c", - "Path|VariableReference|PathComponentExpr|VariableReference|PathComponentExpr|Literal|"), + "Path|VariableReference|PathComponentExpr|VariableReference|PathComponentExpr|Literal|" + ), WalkerTestCase( "a[1].c", - "Path|VariableReference|PathComponentExpr|Literal|PathComponentExpr|Literal|"), + "Path|VariableReference|PathComponentExpr|Literal|PathComponentExpr|Literal|" + ), WalkerTestCase( "a[*].c", - "Path|VariableReference|PathComponentWildcard|PathComponentExpr|Literal|"), + "Path|VariableReference|PathComponentWildcard|PathComponentExpr|Literal|" + ), WalkerTestCase( "a.*.c", - "Path|VariableReference|PathComponentUnpivot|PathComponentExpr|Literal|"), + "Path|VariableReference|PathComponentUnpivot|PathComponentExpr|Literal|" + ), WalkerTestCase( "fcall(var1, var2)", - "NAry|VariableReference|VariableReference|VariableReference|"), + "NAry|VariableReference|VariableReference|VariableReference|" + ), WalkerTestCase( "CASE foo WHEN 1 THEN 10 ELSE 11 END", - "SimpleCase|VariableReference|Literal|Literal|Literal|"), + "SimpleCase|VariableReference|Literal|Literal|Literal|" + ), WalkerTestCase( "CASE WHEN 1 THEN 10 ELSE 11 END", - "SearchedCase|Literal|Literal|Literal|"), + "SearchedCase|Literal|Literal|Literal|" + ), WalkerTestCase( "SELECT * FROM foo", - "Select|SelectProjectionList|SelectListItemStar|FromSourceExpr|VariableReference|"), + "Select|SelectProjectionList|SelectListItemStar|FromSourceExpr|VariableReference|" + ), WalkerTestCase( "SELECT * FROM foo, bar", - //Reminder: this yields the same AST as: ... FROM foo INNER JOIN bar ON true - "Select|SelectProjectionList|SelectListItemStar|FromSourceJoin|FromSourceExpr|VariableReference|FromSourceExpr|VariableReference|Literal|"), + // Reminder: this yields the same AST as: ... FROM foo INNER JOIN bar ON true + "Select|SelectProjectionList|SelectListItemStar|FromSourceJoin|FromSourceExpr|VariableReference|FromSourceExpr|VariableReference|Literal|" + ), WalkerTestCase( "SELECT * FROM foo, bar", - "Select|SelectProjectionList|SelectListItemStar|FromSourceJoin|FromSourceExpr|VariableReference|FromSourceExpr|VariableReference|Literal|"), + "Select|SelectProjectionList|SelectListItemStar|FromSourceJoin|FromSourceExpr|VariableReference|FromSourceExpr|VariableReference|Literal|" + ), WalkerTestCase( "SELECT * FROM foo INNER JOIN bar ON condition", - "Select|SelectProjectionList|SelectListItemStar|FromSourceJoin|FromSourceExpr|VariableReference|FromSourceExpr|VariableReference|VariableReference|"), + "Select|SelectProjectionList|SelectListItemStar|FromSourceJoin|FromSourceExpr|VariableReference|FromSourceExpr|VariableReference|VariableReference|" + ), WalkerTestCase( "SELECT f.* FROM foo AS f", - "Select|SelectProjectionList|SelectListItemProjectAll|VariableReference|FromSourceExpr|VariableReference|"), + "Select|SelectProjectionList|SelectListItemProjectAll|VariableReference|FromSourceExpr|VariableReference|" + ), WalkerTestCase( "SELECT VALUE foo FROM bar", - "Select|SelectProjectionValue|VariableReference|FromSourceExpr|VariableReference|"), + "Select|SelectProjectionValue|VariableReference|FromSourceExpr|VariableReference|" + ), WalkerTestCase( "PIVOT 1 AT 2 FROM 3", - "Select|SelectProjectionPivot|Literal|Literal|FromSourceExpr|Literal|"), + "Select|SelectProjectionPivot|Literal|Literal|FromSourceExpr|Literal|" + ), WalkerTestCase( "CREATE TABLE FOO", - "CreateTable|"), + "CreateTable|" + ), WalkerTestCase( "?", - "Parameter|"), + "Parameter|" + ), WalkerTestCase("MISSING", "LiteralMissing|"), @@ -176,5 +197,4 @@ class AstWalkerTests { WalkerTestCase("INSERT INTO foo VALUE 1 ON CONFLICT WHERE bar DO NOTHING", "DataManipulation|VariableReference|Literal|VariableReference|") ) - -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/ast/passes/RewriterTestBase.kt b/lang/test/org/partiql/lang/ast/passes/RewriterTestBase.kt index 313d251204..3af1a469cc 100644 --- a/lang/test/org/partiql/lang/ast/passes/RewriterTestBase.kt +++ b/lang/test/org/partiql/lang/ast/passes/RewriterTestBase.kt @@ -42,7 +42,8 @@ abstract class RewriterTestBase : SqlParserTestBase() { assertEquals( "The second pass of ${rewriter.javaClass.name} pass should be idempotent", MetaStrippingRewriter.stripMetas(actualExprNode), - MetaStrippingRewriter.stripMetas(anotherActualExprNode)) + MetaStrippingRewriter.stripMetas(anotherActualExprNode) + ) } /** @@ -60,9 +61,11 @@ abstract class RewriterTestBase : SqlParserTestBase() { val expectedExprNode = MetaStrippingRewriter.stripMetas(super.parser.parseExprNode(tc.expectedSql)) - val actualExprNode = MetaStrippingRewriter.stripMetas(rewriters.fold(originalExprNode) { expr, rewriter -> - rewriter.rewriteExprNode(expr) - }) + val actualExprNode = MetaStrippingRewriter.stripMetas( + rewriters.fold(originalExprNode) { expr, rewriter -> + rewriter.rewriteExprNode(expr) + } + ) assertEquals("The expected AST must match the rewritten AST", expectedExprNode, actualExprNode) } diff --git a/lang/test/org/partiql/lang/ast/passes/StatementRedactorTest.kt b/lang/test/org/partiql/lang/ast/passes/StatementRedactorTest.kt index f2627cd370..763e768a71 100644 --- a/lang/test/org/partiql/lang/ast/passes/StatementRedactorTest.kt +++ b/lang/test/org/partiql/lang/ast/passes/StatementRedactorTest.kt @@ -21,7 +21,7 @@ class StatementRedactorTest : SqlParserTestBase() { * 1. Which arguments are needed for [SafeFieldName] validation * 2. Which arguments are returned for redaction */ - private fun validateFuncContainsAndBeginsWith(args : List ) : List { + private fun validateFuncContainsAndBeginsWith(args: List): List { val path = args[0] val value = args[1] val argsToRedact = mutableListOf() @@ -59,254 +59,328 @@ class StatementRedactorTest : SqlParserTestBase() { // Typed RedactionTestCase( "SELECT * FROM tb WHERE hk IS MISSING AND attr IS MISSING", - "SELECT * FROM tb WHERE hk IS MISSING AND attr IS ***(Redacted)"), + "SELECT * FROM tb WHERE hk IS MISSING AND attr IS ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk IS NOT MISSING AND attr IS NOT MISSING", - "SELECT * FROM tb WHERE hk IS NOT MISSING AND attr IS NOT ***(Redacted)"), + "SELECT * FROM tb WHERE hk IS NOT MISSING AND attr IS NOT ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE attr IS MISSING", - "SELECT * FROM tb WHERE attr IS ***(Redacted)"), + "SELECT * FROM tb WHERE attr IS ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE attr IS NUMERIC", - "SELECT * FROM tb WHERE attr IS ***(Redacted)"), + "SELECT * FROM tb WHERE attr IS ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE attr IS NOT NUMERIC", - "SELECT * FROM tb WHERE attr IS NOT ***(Redacted)"), + "SELECT * FROM tb WHERE attr IS NOT ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE attr IS STRING", - "SELECT * FROM tb WHERE attr IS ***(Redacted)"), + "SELECT * FROM tb WHERE attr IS ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE attr IS NOT STRING", - "SELECT * FROM tb WHERE attr IS NOT ***(Redacted)"), + "SELECT * FROM tb WHERE attr IS NOT ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE attr IS TUPLE", - "SELECT * FROM tb WHERE attr IS ***(Redacted)"), + "SELECT * FROM tb WHERE attr IS ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE attr IS NOT TUPLE", - "SELECT * FROM tb WHERE attr IS NOT ***(Redacted)"), + "SELECT * FROM tb WHERE attr IS NOT ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE attr IS STRUCT", - "SELECT * FROM tb WHERE attr IS ***(Redacted)"), + "SELECT * FROM tb WHERE attr IS ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE attr IS NOT STRUCT", - "SELECT * FROM tb WHERE attr IS NOT ***(Redacted)"), + "SELECT * FROM tb WHERE attr IS NOT ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE attr IS BOOLEAN", - "SELECT * FROM tb WHERE attr IS ***(Redacted)"), + "SELECT * FROM tb WHERE attr IS ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE attr IS NOT BOOLEAN", - "SELECT * FROM tb WHERE attr IS NOT ***(Redacted)"), + "SELECT * FROM tb WHERE attr IS NOT ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE attr IS LIST", - "SELECT * FROM tb WHERE attr IS ***(Redacted)"), + "SELECT * FROM tb WHERE attr IS ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE attr IS NOT LIST", - "SELECT * FROM tb WHERE attr IS NOT ***(Redacted)"), + "SELECT * FROM tb WHERE attr IS NOT ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE attr IS BLOB", - "SELECT * FROM tb WHERE attr IS ***(Redacted)"), + "SELECT * FROM tb WHERE attr IS ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE attr IS NOT BLOB", - "SELECT * FROM tb WHERE attr IS NOT ***(Redacted)"), + "SELECT * FROM tb WHERE attr IS NOT ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE attr IS NULL", - "SELECT * FROM tb WHERE attr IS ***(Redacted)"), + "SELECT * FROM tb WHERE attr IS ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE attr IS NOT NULL", - "SELECT * FROM tb WHERE attr IS NOT ***(Redacted)"), + "SELECT * FROM tb WHERE attr IS NOT ***(Redacted)" + ), // Call and Nested Call RedactionTestCase( "SELECT * FROM tb WHERE hk = 1 AND begins_with(Attr, 'foo') AND begins_with(hk, 'foo')", - "SELECT * FROM tb WHERE hk = 1 AND begins_with(Attr, ***(Redacted)) AND begins_with(hk, 'foo')"), + "SELECT * FROM tb WHERE hk = 1 AND begins_with(Attr, ***(Redacted)) AND begins_with(hk, 'foo')" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = 1 AND contains(Attr, 'foo') AND contains(hk, 'foo')", - "SELECT * FROM tb WHERE hk = 1 AND contains(Attr, ***(Redacted)) AND contains(hk, 'foo')"), + "SELECT * FROM tb WHERE hk = 1 AND contains(Attr, ***(Redacted)) AND contains(hk, 'foo')" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = 1 AND arbitrary_udf(Attr, 'foo', arbitrary_udf(hk, 'foo'))", - "SELECT * FROM tb WHERE hk = 1 AND arbitrary_udf(Attr, ***(Redacted), arbitrary_udf(hk, ***(Redacted)))"), + "SELECT * FROM tb WHERE hk = 1 AND arbitrary_udf(Attr, ***(Redacted), arbitrary_udf(hk, ***(Redacted)))" + ), // Unary operator RedactionTestCase( "SELECT * FROM tb WHERE hk IS MISSING AND attr = - 'literal'", - "SELECT * FROM tb WHERE hk IS MISSING AND attr = - ***(Redacted)"), + "SELECT * FROM tb WHERE hk IS MISSING AND attr = - ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk IS MISSING AND attr = -+ -+ -+ non_literal", - "SELECT * FROM tb WHERE hk IS MISSING AND attr = -+ -+ -+ non_literal"), + "SELECT * FROM tb WHERE hk IS MISSING AND attr = -+ -+ -+ non_literal" + ), // Arithmetic operators are not redacted RedactionTestCase( "SELECT * FROM tb WHERE hk = 012-34-5678 AND ssn = 012-34-5678", - "SELECT * FROM tb WHERE hk = 012-34-5678 AND ssn = ***(Redacted)-***(Redacted)-***(Redacted)"), + "SELECT * FROM tb WHERE hk = 012-34-5678 AND ssn = ***(Redacted)-***(Redacted)-***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = 012-34-5678 AND ssn = 012*34*5678", - "SELECT * FROM tb WHERE hk = 012-34-5678 AND ssn = ***(Redacted)****(Redacted)****(Redacted)"), + "SELECT * FROM tb WHERE hk = 012-34-5678 AND ssn = ***(Redacted)****(Redacted)****(Redacted)" + ), // Concat operator RedactionTestCase( "SELECT * FROM tb WHERE hk = 'abc' || '123' AND ssn = 'abc' || '123' || 'xyz'", - "SELECT * FROM tb WHERE hk = 'abc' || '123' AND ssn = ***(Redacted) || ***(Redacted) || ***(Redacted)"), + "SELECT * FROM tb WHERE hk = 'abc' || '123' AND ssn = ***(Redacted) || ***(Redacted) || ***(Redacted)" + ), // In operator RedactionTestCase( "SELECT * FROM tb WHERE hk IN (1, 3, 5) AND attr IN (2, 4, 6)", - "SELECT * FROM tb WHERE hk IN (1, 3, 5) AND attr IN (***(Redacted), ***(Redacted), ***(Redacted))"), + "SELECT * FROM tb WHERE hk IN (1, 3, 5) AND attr IN (***(Redacted), ***(Redacted), ***(Redacted))" + ), // Between operator RedactionTestCase( "SELECT * FROM tb WHERE hk BETWEEN 1 AND 2 AND attr BETWEEN 1 AND 2", - "SELECT * FROM tb WHERE hk BETWEEN 1 AND 2 AND attr BETWEEN ***(Redacted) AND ***(Redacted)"), + "SELECT * FROM tb WHERE hk BETWEEN 1 AND 2 AND attr BETWEEN ***(Redacted) AND ***(Redacted)" + ), // Comparison operators RedactionTestCase( "SELECT * FROM tb WHERE (hk <> 1 or hk < 1 or hk <= 1 or hk > 1 or hk >=1 )", - "SELECT * FROM tb WHERE (hk <> 1 or hk < 1 or hk <= 1 or hk > 1 or hk >=1 )"), + "SELECT * FROM tb WHERE (hk <> 1 or hk < 1 or hk <= 1 or hk > 1 or hk >=1 )" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = 'a' and (attr1 <> 1 or attr2 < 1 or attr3 <= 1 or attr4 > 1 or attr5 >=1 )", - "SELECT * FROM tb WHERE hk = 'a' and (attr1 <> ***(Redacted) or attr2 < ***(Redacted) or attr3 <= ***(Redacted) or attr4 > ***(Redacted) or attr5 >=***(Redacted) )"), + "SELECT * FROM tb WHERE hk = 'a' and (attr1 <> ***(Redacted) or attr2 < ***(Redacted) or attr3 <= ***(Redacted) or attr4 > ***(Redacted) or attr5 >=***(Redacted) )" + ), // String RedactionTestCase( "SELECT * FROM tb WHERE hk = 'a' and attr = 'a'", - "SELECT * FROM tb WHERE hk = 'a' and attr = ***(Redacted)"), + "SELECT * FROM tb WHERE hk = 'a' and attr = ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = '2016-02-15' and attr = '2016-02-15'", - "SELECT * FROM tb WHERE hk = '2016-02-15' and attr = ***(Redacted)"), + "SELECT * FROM tb WHERE hk = '2016-02-15' and attr = ***(Redacted)" + ), // TODO: Fix metadata length for unicode // RedactionTestCase( // "SELECT * FROM tb WHERE hk = '\uD83D\uDE01\uD83D\uDE1E\uD83D\uDE38\uD83D\uDE38' and attr = '\uD83D\uDE01\uD83D\uDE1E\uD83D\uDE38\uD83D\uDE38'", // "SELECT * FROM tb WHERE hk = '\uD83D\uDE01\uD83D\uDE1E\uD83D\uDE38\uD83D\uDE38' and attr = ***(Redacted)"), RedactionTestCase( "SELECT * FROM tb WHERE hk = '話家身圧費谷料村能計税金' and attr = '話家身圧費谷料村能計税金'", - "SELECT * FROM tb WHERE hk = '話家身圧費谷料村能計税金' and attr = ***(Redacted)"), + "SELECT * FROM tb WHERE hk = '話家身圧費谷料村能計税金' and attr = ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = 'abcde\\u0832fgh' and attr = 'abcde\\u0832fgh'", - "SELECT * FROM tb WHERE hk = 'abcde\\u0832fgh' and attr = ***(Redacted)"), + "SELECT * FROM tb WHERE hk = 'abcde\\u0832fgh' and attr = ***(Redacted)" + ), // Int RedactionTestCase( "SELECT * FROM tb WHERE NOT hk = 1 AND NOT attr = 1", - "SELECT * FROM tb WHERE NOT hk = 1 AND NOT attr = ***(Redacted)"), + "SELECT * FROM tb WHERE NOT hk = 1 AND NOT attr = ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE NOT hk = 0 AND NOT attr = 0", - "SELECT * FROM tb WHERE NOT hk = 0 AND NOT attr = ***(Redacted)"), + "SELECT * FROM tb WHERE NOT hk = 0 AND NOT attr = ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = -0 AND attr = -0", - "SELECT * FROM tb WHERE hk = -0 AND attr = -***(Redacted)"), + "SELECT * FROM tb WHERE hk = -0 AND attr = -***(Redacted)" + ), // Decimal RedactionTestCase( "SELECT * FROM tb WHERE hk = 0.123 AND attr = 0.123", - "SELECT * FROM tb WHERE hk = 0.123 AND attr = ***(Redacted)"), + "SELECT * FROM tb WHERE hk = 0.123 AND attr = ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = -0.123 AND attr = -0.123", - "SELECT * FROM tb WHERE hk = -0.123 AND attr = -***(Redacted)"), + "SELECT * FROM tb WHERE hk = -0.123 AND attr = -***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = 0.000 AND attr = 0.000", - "SELECT * FROM tb WHERE hk = 0.000 AND attr = ***(Redacted)"), + "SELECT * FROM tb WHERE hk = 0.000 AND attr = ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = -0.000 AND attr = -0.000", - "SELECT * FROM tb WHERE hk = -0.000 AND attr = -***(Redacted)"), + "SELECT * FROM tb WHERE hk = -0.000 AND attr = -***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = 0.12e-4 AND attr = 0.12e-4", - "SELECT * FROM tb WHERE hk = 0.12e-4 AND attr = ***(Redacted)"), + "SELECT * FROM tb WHERE hk = 0.12e-4 AND attr = ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = -0.01200e5 AND attr = -0.01200e5", - "SELECT * FROM tb WHERE hk = -0.01200e5 AND attr = -***(Redacted)"), + "SELECT * FROM tb WHERE hk = -0.01200e5 AND attr = -***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = 0. AND attr = 0.", - "SELECT * FROM tb WHERE hk = 0. AND attr = ***(Redacted)"), + "SELECT * FROM tb WHERE hk = 0. AND attr = ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = 0E0 AND attr = 0E0", - "SELECT * FROM tb WHERE hk = 0E0 AND attr = ***(Redacted)"), + "SELECT * FROM tb WHERE hk = 0E0 AND attr = ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = 0E-0 AND attr = 0E-0", - "SELECT * FROM tb WHERE hk = 0E-0 AND attr = ***(Redacted)"), + "SELECT * FROM tb WHERE hk = 0E-0 AND attr = ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = 0.0E1 AND attr = 0.0E1", - "SELECT * FROM tb WHERE hk = 0.0E1 AND attr = ***(Redacted)"), + "SELECT * FROM tb WHERE hk = 0.0E1 AND attr = ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = -0E0 AND attr = -0E0", - "SELECT * FROM tb WHERE hk = -0E0 AND attr = -***(Redacted)"), + "SELECT * FROM tb WHERE hk = -0E0 AND attr = -***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = -0. AND attr = -0.", - "SELECT * FROM tb WHERE hk = -0. AND attr = -***(Redacted)"), + "SELECT * FROM tb WHERE hk = -0. AND attr = -***(Redacted)" + ), // Boolean on non-key attr RedactionTestCase( "SELECT * FROM tb WHERE hk = TRUE AND attr = TRUE", - "SELECT * FROM tb WHERE hk = TRUE AND attr = ***(Redacted)"), + "SELECT * FROM tb WHERE hk = TRUE AND attr = ***(Redacted)" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = FALSE AND attr = FALSE", - "SELECT * FROM tb WHERE hk = FALSE AND attr = ***(Redacted)"), + "SELECT * FROM tb WHERE hk = FALSE AND attr = ***(Redacted)" + ), // NULL on non-key attr RedactionTestCase( "SELECT * FROM tb WHERE hk = NULL AND attr = NULL", - "SELECT * FROM tb WHERE hk = NULL AND attr = ***(Redacted)"), + "SELECT * FROM tb WHERE hk = NULL AND attr = ***(Redacted)" + ), // Map on non-key attr RedactionTestCase( "SELECT * FROM tb WHERE hk = 'a' AND attr = { 'hk' : 'value' }", - "SELECT * FROM tb WHERE hk = 'a' AND attr = { ***(Redacted) : ***(Redacted) }"), + "SELECT * FROM tb WHERE hk = 'a' AND attr = { ***(Redacted) : ***(Redacted) }" + ), // List RedactionTestCase( "SELECT * FROM tb WHERE hk = 'a' AND attr = [ 'value1', 'value2' ]", - "SELECT * FROM tb WHERE hk = 'a' AND attr = [ ***(Redacted), ***(Redacted) ]"), + "SELECT * FROM tb WHERE hk = 'a' AND attr = [ ***(Redacted), ***(Redacted) ]" + ), // Set RedactionTestCase( "SELECT * FROM tb WHERE hk = 'a' AND attr = << 'value1', 'value2' >>", - "SELECT * FROM tb WHERE hk = 'a' AND attr = << ***(Redacted), ***(Redacted) >>"), + "SELECT * FROM tb WHERE hk = 'a' AND attr = << ***(Redacted), ***(Redacted) >>" + ), // Space and new line preserved RedactionTestCase( "SELECT * FROM tb WHERE hk =1 \n AND attr =1", "SELECT * FROM tb WHERE hk =1 \n" + - " AND attr =***(Redacted)"), + " AND attr =***(Redacted)" + ), RedactionTestCase( "SELECT * \n FROM tb \n WHERE hk =1 \n\r AND attr = 'asdfas \n\r df \n\r sa' AND hk = 2", - "SELECT * \n FROM tb \n WHERE hk =1 \n\r AND attr = ***(Redacted) AND hk = 2"), + "SELECT * \n FROM tb \n WHERE hk =1 \n\r AND attr = ***(Redacted) AND hk = 2" + ), // Multiple Args cases RedactionTestCase( "SELECT * FROM tb WHERE hk = 1 and rk = '1'", - "SELECT * FROM tb WHERE hk = 1 and rk = '1'"), + "SELECT * FROM tb WHERE hk = 1 and rk = '1'" + ), RedactionTestCase( "SELECT * FROM tb WHERE hk = 'a' and (rk = 1 or (rk = 2 and attr = '1'))", - "SELECT * FROM tb WHERE hk = 'a' and (rk = 1 or (rk = 2 and attr = ***(Redacted)))"), + "SELECT * FROM tb WHERE hk = 'a' and (rk = 1 or (rk = 2 and attr = ***(Redacted)))" + ), // Nested Map RedactionTestCase( "SELECT * FROM tb WHERE hk = 'a' AND attr = { 'name' : { 'name' : 'value' } }", - "SELECT * FROM tb WHERE hk = 'a' AND attr = { ***(Redacted) : { ***(Redacted) : ***(Redacted) } }"), + "SELECT * FROM tb WHERE hk = 'a' AND attr = { ***(Redacted) : { ***(Redacted) : ***(Redacted) } }" + ), // Insert Into RedactionTestCase( "INSERT INTO tb VALUE { 'hk': 'a', 'rk': 1, 'attr': 'b' }", - "INSERT INTO tb VALUE { 'hk': 'a', 'rk': 1, 'attr': ***(Redacted) }"), + "INSERT INTO tb VALUE { 'hk': 'a', 'rk': 1, 'attr': ***(Redacted) }" + ), RedactionTestCase( "INSERT INTO tb VALUE { 'hk': 'a', 'rk': 1, 'attr': { 'hk': 'a' }}", - "INSERT INTO tb VALUE { 'hk': 'a', 'rk': 1, 'attr': { ***(Redacted): ***(Redacted) }}"), + "INSERT INTO tb VALUE { 'hk': 'a', 'rk': 1, 'attr': { ***(Redacted): ***(Redacted) }}" + ), RedactionTestCase( "INSERT INTO tb VALUE `{ 'hk': 'a', 'rk': 1, 'attr': { 'hk': 'a' }}`", - "INSERT INTO tb VALUE ***(Redacted)"), + "INSERT INTO tb VALUE ***(Redacted)" + ), RedactionTestCase( "INSERT INTO tb VALUE HK", - "INSERT INTO tb VALUE HK"), + "INSERT INTO tb VALUE HK" + ), RedactionTestCase( "INSERT INTO tb VALUE hk", - "INSERT INTO tb VALUE hk"), + "INSERT INTO tb VALUE hk" + ), RedactionTestCase( "INSERT INTO tb VALUE hk = 'a' and attr = 'a'", - "INSERT INTO tb VALUE hk = 'a' and attr = ***(Redacted)"), + "INSERT INTO tb VALUE hk = 'a' and attr = ***(Redacted)" + ), RedactionTestCase( "INSERT INTO tb VALUE MISSING", - "INSERT INTO tb VALUE MISSING"), + "INSERT INTO tb VALUE MISSING" + ), RedactionTestCase( "INSERT INTO tb VALUE << 'value1', 'value2' >>", - "INSERT INTO tb VALUE << ***(Redacted), ***(Redacted) >>"), + "INSERT INTO tb VALUE << ***(Redacted), ***(Redacted) >>" + ), // Update Assignment RedactionTestCase( "update nonExistentTable set foo = 'bar' where attr1='testValue'", - "update nonExistentTable set foo = ***(Redacted) where attr1=***(Redacted)"), + "update nonExistentTable set foo = ***(Redacted) where attr1=***(Redacted)" + ), RedactionTestCase( "UPDATE tb SET hk = 'b', rk = 2, attr = 2 WHERE hk = 'a' and rk = 1 and attr = 1", - "UPDATE tb SET hk = 'b', rk = 2, attr = ***(Redacted) WHERE hk = 'a' and rk = 1 and attr = ***(Redacted)"), + "UPDATE tb SET hk = 'b', rk = 2, attr = ***(Redacted) WHERE hk = 'a' and rk = 1 and attr = ***(Redacted)" + ), // Delete RedactionTestCase( "DELETE FROM tb WHERE hk = 'a' AND attr = 'b'", - "DELETE FROM tb WHERE hk = 'a' AND attr = ***(Redacted)"), + "DELETE FROM tb WHERE hk = 'a' AND attr = ***(Redacted)" + ), // Path RedactionTestCase( "SELECT ?, tb.a from tb where tb.hk = ? and tb.rk = 'eggs' and tb[*].bar = ? or tb[*].rk = 'foo'", - "SELECT ?, tb.a from tb where tb.hk = ? and tb.rk = 'eggs' and tb[*].bar = ? or tb[*].rk = 'foo'"), + "SELECT ?, tb.a from tb where tb.hk = ? and tb.rk = 'eggs' and tb[*].bar = ? or tb[*].rk = 'foo'" + ), // Parameter RedactionTestCase( "SELECT ?, tb.a from tb where ? = 'foo' and ? = ? and ?.rk = 'eggs' and ?[*].bar = ? or ?[*].rk = 'foo'", - "SELECT ?, tb.a from tb where ? = 'foo' and ? = ? and ?.rk = 'eggs' and ?[*].bar = ? or ?[*].rk = 'foo'") + "SELECT ?, tb.a from tb where ? = 'foo' and ? = ? and ?.rk = 'eggs' and ?[*].bar = ? or ?[*].rk = 'foo'" + ) ) @Test diff --git a/lang/test/org/partiql/lang/ast/passes/inference/StaticTypeCastTests.kt b/lang/test/org/partiql/lang/ast/passes/inference/StaticTypeCastTests.kt index 1314126014..f5d20bdb99 100644 --- a/lang/test/org/partiql/lang/ast/passes/inference/StaticTypeCastTests.kt +++ b/lang/test/org/partiql/lang/ast/passes/inference/StaticTypeCastTests.kt @@ -96,7 +96,7 @@ class StaticTypeCastTests { private val numberOrMissingType = StaticType.unionOf(StaticType.MISSING, numberType) private val numberOrUnknownType = StaticType.unionOf(StaticType.MISSING, StaticType.NULL, numberType) - fun List.addCastToAnyCases(): List = this + this.map{ + fun List.addCastToAnyCases(): List = this + this.map { it.copy( targetType = StaticType.ANY, expectedType = it.sourceType @@ -435,8 +435,8 @@ class StaticTypeCastTests { val unconstrainedInt = IntType(IntType.IntRangeConstraint.UNCONSTRAINED) val decimal4_2 = DecimalType(DecimalType.PrecisionScaleConstraint.Constrained(4, 2)) - val decimal7_2 = DecimalType( DecimalType.PrecisionScaleConstraint.Constrained(7, 2)) - val decimal32_0 = DecimalType( DecimalType.PrecisionScaleConstraint.Constrained(32, 0)) + val decimal7_2 = DecimalType(DecimalType.PrecisionScaleConstraint.Constrained(7, 2)) + val decimal32_0 = DecimalType(DecimalType.PrecisionScaleConstraint.Constrained(32, 0)) return listOf( TestCase(smallint, smallint, smallint), @@ -474,11 +474,10 @@ class StaticTypeCastTests { TestCase(decimal32_0, bigint, StaticType.unionOf(StaticType.MISSING, bigint)), TestCase(decimal32_0, unconstrainedInt, unconstrainedInt), - TestCase(StaticType.FLOAT, smallint, StaticType.unionOf(StaticType.MISSING, smallint)), TestCase(decimal4_2, smallint, smallint), TestCase(smallint, decimal7_2, decimal7_2) ).addCastToAnyCases() } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/domains/PartiqlAstToExprNodeRoundTripTests.kt b/lang/test/org/partiql/lang/domains/PartiqlAstToExprNodeRoundTripTests.kt index 75fdd1894b..ff7edb51d2 100644 --- a/lang/test/org/partiql/lang/domains/PartiqlAstToExprNodeRoundTripTests.kt +++ b/lang/test/org/partiql/lang/domains/PartiqlAstToExprNodeRoundTripTests.kt @@ -22,4 +22,3 @@ class PartiqlAstToExprNodeRoundTripTests { assertEquals(stripped, roundTrippedExprNode) } } - diff --git a/lang/test/org/partiql/lang/errors/LexerErrorsTest.kt b/lang/test/org/partiql/lang/errors/LexerErrorsTest.kt index 310884570f..b7b442c453 100644 --- a/lang/test/org/partiql/lang/errors/LexerErrorsTest.kt +++ b/lang/test/org/partiql/lang/errors/LexerErrorsTest.kt @@ -32,9 +32,11 @@ class LexerErrorsTest : TestBase() { else -> "'${String(Character.toChars(codePoint))}' [U+${Integer.toHexString(codePoint)}]" } - private fun checkInputThrowingLexerException(input: String, - errorCode: ErrorCode, - expectErrorContextValues: Map) { + private fun checkInputThrowingLexerException( + input: String, + errorCode: ErrorCode, + expectErrorContextValues: Map + ) { try { lexer.tokenize(input) fail("Expected LexerException but there was no Exception") @@ -49,31 +51,40 @@ class LexerErrorsTest : TestBase() { @Test fun testInvalidChar() { - checkInputThrowingLexerException("©", + checkInputThrowingLexerException( + "©", ErrorCode.LEXER_INVALID_CHAR, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 1L, - Property.TOKEN_STRING to representation("©".codePointAt(0)))) + Property.TOKEN_STRING to representation("©".codePointAt(0)) + ) + ) } @Test fun testInvalidOperator() { - checkInputThrowingLexerException("10 ^ 4", + checkInputThrowingLexerException( + "10 ^ 4", ErrorCode.LEXER_INVALID_OPERATOR, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 5L, - Property.TOKEN_STRING to "^")) + Property.TOKEN_STRING to "^" + ) + ) } @Test fun testInvalidIonLiteral() { - checkInputThrowingLexerException("`{I am not a list}`", - ErrorCode.LEXER_INVALID_ION_LITERAL, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 20L, - Property.TOKEN_STRING to "{I am not a list}")) + checkInputThrowingLexerException( + "`{I am not a list}`", + ErrorCode.LEXER_INVALID_ION_LITERAL, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 20L, + Property.TOKEN_STRING to "{I am not a list}" + ) + ) } - } +} diff --git a/lang/test/org/partiql/lang/errors/ParserErrorsTest.kt b/lang/test/org/partiql/lang/errors/ParserErrorsTest.kt index 97849acf33..597eb5f903 100644 --- a/lang/test/org/partiql/lang/errors/ParserErrorsTest.kt +++ b/lang/test/org/partiql/lang/errors/ParserErrorsTest.kt @@ -14,93 +14,107 @@ package org.partiql.lang.errors import com.amazon.ion.Timestamp -import org.junit.Ignore -import org.partiql.lang.syntax.TokenType import org.junit.Test import org.partiql.lang.syntax.SqlParserTestBase +import org.partiql.lang.syntax.TokenType import org.partiql.lang.util.sourceLocationProperties class ParserErrorsTest : SqlParserTestBase() { - fun emptyQuery() = checkInputThrowingParserException("", + fun emptyQuery() = checkInputThrowingParserException( + "", ErrorCode.PARSE_UNEXPECTED_TERM, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 1L, Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) - + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) @Test fun expectedKeyword() { - checkInputThrowingParserException("5 BETWEEN 1 10", + checkInputThrowingParserException( + "5 BETWEEN 1 10", ErrorCode.PARSE_EXPECTED_KEYWORD, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 14L, Property.KEYWORD to "AND", Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(10))) + Property.TOKEN_VALUE to ion.newInt(10) + ) + ) } @Test fun expectedTypeName() { - checkInputThrowingParserException("NULL is `null`", + checkInputThrowingParserException( + "NULL is `null`", ErrorCode.PARSE_EXPECTED_TYPE_NAME, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 9L, Property.TOKEN_TYPE to TokenType.ION_LITERAL, - Property.TOKEN_VALUE to ion.newNull())) - + Property.TOKEN_VALUE to ion.newNull() + ) + ) } @Test fun expectedIdentAfterAT() { - checkInputThrowingParserException("@", + checkInputThrowingParserException( + "@", ErrorCode.PARSE_MISSING_IDENT_AFTER_AT, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 1L, Property.TOKEN_TYPE to TokenType.OPERATOR, - Property.TOKEN_VALUE to ion.newSymbol("@"))) - + Property.TOKEN_VALUE to ion.newSymbol("@") + ) + ) } @Test fun expectedExpectedTypeName() { - checkInputThrowingParserException("a is 'missing'", + checkInputThrowingParserException( + "a is 'missing'", ErrorCode.PARSE_EXPECTED_TYPE_NAME, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("missing"))) - + Property.TOKEN_VALUE to ion.newString("missing") + ) + ) } @Test fun expectedUnexpectedToken() { - checkInputThrowingParserException("SELECT ord, val FROM table1 AT ord AS val", + checkInputThrowingParserException( + "SELECT ord, val FROM table1 AT ord AS val", ErrorCode.PARSE_UNEXPECTED_TOKEN, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 36L, Property.TOKEN_TYPE to TokenType.AS, - Property.TOKEN_VALUE to ion.newSymbol("as"))) - + Property.TOKEN_VALUE to ion.newSymbol("as") + ) + ) } @Test fun expectedUnexpectedKeyword() { - checkInputThrowingParserException("SELECT FROM table1", + checkInputThrowingParserException( + "SELECT FROM table1", ErrorCode.PARSE_UNEXPECTED_KEYWORD, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 8L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("from"))) - + Property.TOKEN_VALUE to ion.newSymbol("from") + ) + ) } @Test @@ -111,7 +125,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 30L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("from"))) + Property.TOKEN_VALUE to ion.newSymbol("from") + ) + ) @Test fun unexpectedKeywordUpdateInSelectList() = checkInputThrowingParserException( @@ -121,35 +137,42 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 11L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("update"))) + Property.TOKEN_VALUE to ion.newSymbol("update") + ) + ) @Test fun expectedInvalidPathComponent() { - checkInputThrowingParserException("x...a", + checkInputThrowingParserException( + "x...a", ErrorCode.PARSE_INVALID_PATH_COMPONENT, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 3L, Property.TOKEN_TYPE to TokenType.DOT, - Property.TOKEN_VALUE to ion.newSymbol("."))) - + Property.TOKEN_VALUE to ion.newSymbol(".") + ) + ) } @Test fun expectedInvalidPathComponentForKeyword() { - checkInputThrowingParserException("""SELECT foo.id, foo.table FROM `[{id: 1, table: "foos"}]` AS foo""", - ErrorCode.PARSE_INVALID_PATH_COMPONENT, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 20L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("table"))) - + checkInputThrowingParserException( + """SELECT foo.id, foo.table FROM `[{id: 1, table: "foos"}]` AS foo""", + ErrorCode.PARSE_INVALID_PATH_COMPONENT, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 20L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("table") + ) + ) } @Test fun expectedCastAsIntArity() { - checkInputThrowingParserException("CAST(5 AS INTEGER(10))", + checkInputThrowingParserException( + "CAST(5 AS INTEGER(10))", ErrorCode.PARSE_CAST_ARITY, mapOf( Property.LINE_NUMBER to 1L, @@ -158,13 +181,15 @@ class ParserErrorsTest : SqlParserTestBase() { Property.EXPECTED_ARITY_MIN to 0, Property.EXPECTED_ARITY_MAX to 0, Property.CAST_TO to "integer", - Property.TOKEN_VALUE to ion.newSymbol("("))) - + Property.TOKEN_VALUE to ion.newSymbol("(") + ) + ) } @Test fun expectedCastAsRealArity() { - checkInputThrowingParserException("CAST(5 AS REAL(10))", + checkInputThrowingParserException( + "CAST(5 AS REAL(10))", ErrorCode.PARSE_CAST_ARITY, mapOf( Property.LINE_NUMBER to 1L, @@ -173,1300 +198,1685 @@ class ParserErrorsTest : SqlParserTestBase() { Property.EXPECTED_ARITY_MIN to 0, Property.EXPECTED_ARITY_MAX to 0, Property.CAST_TO to "real", - Property.TOKEN_VALUE to ion.newSymbol("("))) + Property.TOKEN_VALUE to ion.newSymbol("(") + ) + ) } @Test fun expectedInvalidTypeParameter() { - checkInputThrowingParserException("CAST(5 AS VARCHAR(a))", + checkInputThrowingParserException( + "CAST(5 AS VARCHAR(a))", ErrorCode.PARSE_INVALID_TYPE_PARAM, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 11L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("character_varying"))) - + Property.TOKEN_VALUE to ion.newSymbol("character_varying") + ) + ) } @Test fun castToVarCharToTooBigLength() { - checkInputThrowingParserException("CAST(5 AS VARCHAR(2147483648))", + checkInputThrowingParserException( + "CAST(5 AS VARCHAR(2147483648))", ErrorCode.PARSE_TYPE_PARAMETER_EXCEEDED_MAXIMUM_VALUE, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 19L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(2147483648L))) + Property.TOKEN_VALUE to ion.newInt(2147483648L) + ) + ) } @Test fun castToDecimalToTooBigLength_1() { - checkInputThrowingParserException("CAST(5 AS DECIMAL(2147483648))", + checkInputThrowingParserException( + "CAST(5 AS DECIMAL(2147483648))", ErrorCode.PARSE_TYPE_PARAMETER_EXCEEDED_MAXIMUM_VALUE, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 19L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(2147483648L))) + Property.TOKEN_VALUE to ion.newInt(2147483648L) + ) + ) } @Test fun castToDecimalToTooBigLength_2() { - checkInputThrowingParserException("CAST(5 AS DECIMAL(1, 2147483648))", + checkInputThrowingParserException( + "CAST(5 AS DECIMAL(1, 2147483648))", ErrorCode.PARSE_TYPE_PARAMETER_EXCEEDED_MAXIMUM_VALUE, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 22L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(2147483648L))) - + Property.TOKEN_VALUE to ion.newInt(2147483648L) + ) + ) } @Test fun castToNumericToTooBigLength_1() { - checkInputThrowingParserException("CAST(5 AS NUMERIC(2147483648))", + checkInputThrowingParserException( + "CAST(5 AS NUMERIC(2147483648))", ErrorCode.PARSE_TYPE_PARAMETER_EXCEEDED_MAXIMUM_VALUE, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 19L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(2147483648L))) + Property.TOKEN_VALUE to ion.newInt(2147483648L) + ) + ) } @Test fun castToNumericToTooBigLength_2() { - checkInputThrowingParserException("CAST(5 AS NUMERIC(1, 2147483648))", + checkInputThrowingParserException( + "CAST(5 AS NUMERIC(1, 2147483648))", ErrorCode.PARSE_TYPE_PARAMETER_EXCEEDED_MAXIMUM_VALUE, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 22L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(2147483648L))) - + Property.TOKEN_VALUE to ion.newInt(2147483648L) + ) + ) } @Test fun expectedExpectedWhenClause() { - checkInputThrowingParserException("CASE name ELSE 1 END", + checkInputThrowingParserException( + "CASE name ELSE 1 END", ErrorCode.PARSE_EXPECTED_WHEN_CLAUSE, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 11L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("else"))) - + Property.TOKEN_VALUE to ion.newSymbol("else") + ) + ) } @Test fun expectedUnexpectedOperator() { - checkInputThrowingParserException("SELECT a, b FROM data WHERE LIKE a b", + checkInputThrowingParserException( + "SELECT a, b FROM data WHERE LIKE a b", ErrorCode.PARSE_UNEXPECTED_OPERATOR, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 29L, Property.TOKEN_TYPE to TokenType.OPERATOR, - Property.TOKEN_VALUE to ion.newSymbol("like"))) - + Property.TOKEN_VALUE to ion.newSymbol("like") + ) + ) } @Test fun expectedExpression() { - checkInputThrowingParserException("SELECT a, b FROM data WHERE a LIKE b ESCAPE", + checkInputThrowingParserException( + "SELECT a, b FROM data WHERE a LIKE b ESCAPE", ErrorCode.PARSE_EXPECTED_EXPRESSION, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 38L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("escape"))) - + Property.TOKEN_VALUE to ion.newSymbol("escape") + ) + ) } @Test fun expectedExpressionTernaryOperator() { - checkInputThrowingParserException("SELECT a, b FROM data WHERE a LIKE", + checkInputThrowingParserException( + "SELECT a, b FROM data WHERE a LIKE", ErrorCode.PARSE_EXPECTED_EXPRESSION, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 31L, Property.TOKEN_TYPE to TokenType.OPERATOR, - Property.TOKEN_VALUE to ion.newSymbol("like"))) - + Property.TOKEN_VALUE to ion.newSymbol("like") + ) + ) } @Test fun expectedTokenType() { - checkInputThrowingParserException("(1 + 2", + checkInputThrowingParserException( + "(1 + 2", ErrorCode.PARSE_EXPECTED_TOKEN_TYPE, mapOf( Property.EXPECTED_TOKEN_TYPE to TokenType.RIGHT_PAREN, Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 7L, Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) - + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) } @Test fun expectedCastMissingLeftParen() { - checkInputThrowingParserException("CAST 5 as integer", + checkInputThrowingParserException( + "CAST 5 as integer", ErrorCode.PARSE_EXPECTED_LEFT_PAREN_AFTER_CAST, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(5))) - + Property.TOKEN_VALUE to ion.newInt(5) + ) + ) } @Test fun expectedLeftParenValueConstructor() { - checkInputThrowingParserException("values 1,2)", + checkInputThrowingParserException( + "values 1,2)", ErrorCode.PARSE_EXPECTED_LEFT_PAREN_VALUE_CONSTRUCTOR, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 8L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(1))) - + Property.TOKEN_VALUE to ion.newInt(1) + ) + ) } @Test fun expectedUnexpectedTerm() { - checkInputThrowingParserException("select () from data", + checkInputThrowingParserException( + "select () from data", ErrorCode.PARSE_UNEXPECTED_TERM, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 9L, Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol(")"))) - + Property.TOKEN_VALUE to ion.newSymbol(")") + ) + ) } @Test fun expectedSelectMissingFrom() { - checkInputThrowingParserException("select a data", + checkInputThrowingParserException( + "select a data", ErrorCode.PARSE_SELECT_MISSING_FROM, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 15L, Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) - + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) } @Test fun expectedUnsupportedLiteralsGroupBy() { - checkInputThrowingParserException("select a from data group by 1", - ErrorCode.PARSE_UNSUPPORTED_LITERALS_GROUPBY, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 29L, - Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(1))) + checkInputThrowingParserException( + "select a from data group by 1", + ErrorCode.PARSE_UNSUPPORTED_LITERALS_GROUPBY, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 29L, + Property.TOKEN_TYPE to TokenType.LITERAL, + Property.TOKEN_VALUE to ion.newInt(1) + ) + ) } @Test fun expectedAsForLet() { - checkInputThrowingParserException("SELECT a FROM foo LET bar b", + checkInputThrowingParserException( + "SELECT a FROM foo LET bar b", ErrorCode.PARSE_EXPECTED_AS_FOR_LET, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 27L, Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.TOKEN_VALUE to ion.newSymbol("b"))) + Property.TOKEN_VALUE to ion.newSymbol("b") + ) + ) } @Test fun expectedIdentForAlias() { - checkInputThrowingParserException("select a as true from data", + checkInputThrowingParserException( + "select a as true from data", ErrorCode.PARSE_EXPECTED_IDENT_FOR_ALIAS, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 13L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newBool(true))) - + Property.TOKEN_VALUE to ion.newBool(true) + ) + ) } @Test fun expectedIdentForAt() { - checkInputThrowingParserException("select a from data at true", + checkInputThrowingParserException( + "select a from data at true", ErrorCode.PARSE_EXPECTED_IDENT_FOR_ALIAS, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 23L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newBool(true))) - + Property.TOKEN_VALUE to ion.newBool(true) + ) + ) } @Test fun expectedIdentForAliasLet() { - checkInputThrowingParserException("SELECT a FROM foo LET bar AS", + checkInputThrowingParserException( + "SELECT a FROM foo LET bar AS", ErrorCode.PARSE_EXPECTED_IDENT_FOR_ALIAS, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 29L, Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) } @Test fun substringMissingLeftParen() { - //12345678901234567890123456789 - checkInputThrowingParserException("select substring from 'asdf' for 1) FROM foo", - ErrorCode.PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 18L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("from"))) + // 12345678901234567890123456789 + checkInputThrowingParserException( + "select substring from 'asdf' for 1) FROM foo", + ErrorCode.PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 18L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("from") + ) + ) } @Test fun substringMissingFromOrComma() { - //12345678901234567890123456789 - checkInputThrowingParserException("select substring('str' 1) from foo", - ErrorCode.PARSE_EXPECTED_ARGUMENT_DELIMITER, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 24L, - Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(1))) + // 12345678901234567890123456789 + checkInputThrowingParserException( + "select substring('str' 1) from foo", + ErrorCode.PARSE_EXPECTED_ARGUMENT_DELIMITER, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 24L, + Property.TOKEN_TYPE to TokenType.LITERAL, + Property.TOKEN_VALUE to ion.newInt(1) + ) + ) } @Test fun substringSql92WithoutLengthMissingRightParen() { - //123456789012345678901234567890123456789 - checkInputThrowingParserException("select substring('str' from 1 from foo ", - ErrorCode.PARSE_EXPECTED_2_TOKEN_TYPES, - mapOf( - Property.LINE_NUMBER to 1L, - Property.EXPECTED_TOKEN_TYPE_1_OF_2 to TokenType.FOR, - Property.EXPECTED_TOKEN_TYPE_2_OF_2 to TokenType.RIGHT_PAREN, - Property.COLUMN_NUMBER to 31L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("from"))) + // 123456789012345678901234567890123456789 + checkInputThrowingParserException( + "select substring('str' from 1 from foo ", + ErrorCode.PARSE_EXPECTED_2_TOKEN_TYPES, + mapOf( + Property.LINE_NUMBER to 1L, + Property.EXPECTED_TOKEN_TYPE_1_OF_2 to TokenType.FOR, + Property.EXPECTED_TOKEN_TYPE_2_OF_2 to TokenType.RIGHT_PAREN, + Property.COLUMN_NUMBER to 31L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("from") + ) + ) } @Test fun substringSql92WithLengthMissingRightParen() { - //123456789012345678901234567890123456789 - checkInputThrowingParserException("select substring('str' from 1 for 1 from foo ", - ErrorCode.PARSE_EXPECTED_TOKEN_TYPE, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 37L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.EXPECTED_TOKEN_TYPE to TokenType.RIGHT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol("from"))) + // 123456789012345678901234567890123456789 + checkInputThrowingParserException( + "select substring('str' from 1 for 1 from foo ", + ErrorCode.PARSE_EXPECTED_TOKEN_TYPE, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 37L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.EXPECTED_TOKEN_TYPE to TokenType.RIGHT_PAREN, + Property.TOKEN_VALUE to ion.newSymbol("from") + ) + ) } @Test fun substringWithoutLengthMissingRightParen() { - //123456789012345678901234567890123456789 - checkInputThrowingParserException("select substring('str', 1 from foo ", - ErrorCode.PARSE_EXPECTED_2_TOKEN_TYPES, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 27L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.EXPECTED_TOKEN_TYPE_1_OF_2 to TokenType.COMMA, - Property.EXPECTED_TOKEN_TYPE_2_OF_2 to TokenType.RIGHT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol("from"))) + // 123456789012345678901234567890123456789 + checkInputThrowingParserException( + "select substring('str', 1 from foo ", + ErrorCode.PARSE_EXPECTED_2_TOKEN_TYPES, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 27L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.EXPECTED_TOKEN_TYPE_1_OF_2 to TokenType.COMMA, + Property.EXPECTED_TOKEN_TYPE_2_OF_2 to TokenType.RIGHT_PAREN, + Property.TOKEN_VALUE to ion.newSymbol("from") + ) + ) } @Test fun substringMissingRightParen() { - //123456789012345678901234567890123456789 - checkInputThrowingParserException("select substring('str', 1, 1 from foo ", - ErrorCode.PARSE_EXPECTED_TOKEN_TYPE, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 30L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.EXPECTED_TOKEN_TYPE to TokenType.RIGHT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol("from"))) - + // 123456789012345678901234567890123456789 + checkInputThrowingParserException( + "select substring('str', 1, 1 from foo ", + ErrorCode.PARSE_EXPECTED_TOKEN_TYPE, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 30L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.EXPECTED_TOKEN_TYPE to TokenType.RIGHT_PAREN, + Property.TOKEN_VALUE to ion.newSymbol("from") + ) + ) } @Test fun callTrimNoLeftParen() { - checkInputThrowingParserException("trim ' ')", - ErrorCode.PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 6L, - Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString(" "))) + checkInputThrowingParserException( + "trim ' ')", + ErrorCode.PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 6L, + Property.TOKEN_TYPE to TokenType.LITERAL, + Property.TOKEN_VALUE to ion.newString(" ") + ) + ) } @Test fun callTrimNoRightParen() { - checkInputThrowingParserException("trim (' '", - ErrorCode.PARSE_EXPECTED_RIGHT_PAREN_BUILTIN_FUNCTION_CALL, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 10L, - Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) + checkInputThrowingParserException( + "trim (' '", + ErrorCode.PARSE_EXPECTED_RIGHT_PAREN_BUILTIN_FUNCTION_CALL, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 10L, + Property.TOKEN_TYPE to TokenType.EOF, + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) } @Test fun callTrimFourArguments() { - checkInputThrowingParserException("trim(both ' ' from 'test' 2)", - ErrorCode.PARSE_EXPECTED_RIGHT_PAREN_BUILTIN_FUNCTION_CALL, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 27L, - Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(2))) + checkInputThrowingParserException( + "trim(both ' ' from 'test' 2)", + ErrorCode.PARSE_EXPECTED_RIGHT_PAREN_BUILTIN_FUNCTION_CALL, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 27L, + Property.TOKEN_TYPE to TokenType.LITERAL, + Property.TOKEN_VALUE to ion.newInt(2) + ) + ) } @Test fun callTrimSpecificationWithoutFrom() { - checkInputThrowingParserException("trim(both 'test')", - ErrorCode.PARSE_EXPECTED_KEYWORD, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 17L, - Property.KEYWORD to "FROM", - Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol(")"))) + checkInputThrowingParserException( + "trim(both 'test')", + ErrorCode.PARSE_EXPECTED_KEYWORD, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 17L, + Property.KEYWORD to "FROM", + Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, + Property.TOKEN_VALUE to ion.newSymbol(")") + ) + ) } @Test fun callTrimSpecificationAndRemoveWithoutFrom() { - checkInputThrowingParserException("trim(both '' 'test')", - ErrorCode.PARSE_EXPECTED_KEYWORD, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 14L, - Property.TOKEN_TYPE to TokenType.LITERAL, - Property.KEYWORD to "FROM", - Property.TOKEN_VALUE to ion.newString("test"))) + checkInputThrowingParserException( + "trim(both '' 'test')", + ErrorCode.PARSE_EXPECTED_KEYWORD, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 14L, + Property.TOKEN_TYPE to TokenType.LITERAL, + Property.KEYWORD to "FROM", + Property.TOKEN_VALUE to ion.newString("test") + ) + ) } @Test fun callTrimWithoutString() { - checkInputThrowingParserException("trim(from)", - ErrorCode.PARSE_UNEXPECTED_TERM, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 10L, - Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol(")"))) + checkInputThrowingParserException( + "trim(from)", + ErrorCode.PARSE_UNEXPECTED_TERM, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 10L, + Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, + Property.TOKEN_VALUE to ion.newSymbol(")") + ) + ) } @Test fun callTrimNoArgs() { - checkInputThrowingParserException("trim()", - ErrorCode.PARSE_UNEXPECTED_TERM, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 6L, - Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol(")"))) + checkInputThrowingParserException( + "trim()", + ErrorCode.PARSE_UNEXPECTED_TERM, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 6L, + Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, + Property.TOKEN_VALUE to ion.newSymbol(")") + ) + ) } @Test fun callTrimSpecificationMissingFrom() { - checkInputThrowingParserException("trim(trailing '')", - ErrorCode.PARSE_EXPECTED_KEYWORD, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 17L, - Property.KEYWORD to "FROM", - Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol(")"))) + checkInputThrowingParserException( + "trim(trailing '')", + ErrorCode.PARSE_EXPECTED_KEYWORD, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 17L, + Property.KEYWORD to "FROM", + Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, + Property.TOKEN_VALUE to ion.newSymbol(")") + ) + ) } @Test fun callTrimZeroArguments() { - checkInputThrowingParserException("trim()", - ErrorCode.PARSE_UNEXPECTED_TERM, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 6L, - Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol(")"))) + checkInputThrowingParserException( + "trim()", + ErrorCode.PARSE_UNEXPECTED_TERM, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 6L, + Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, + Property.TOKEN_VALUE to ion.newSymbol(")") + ) + ) } @Test fun callTrimAllButString() { - checkInputThrowingParserException("trim(trailing '' from)", - ErrorCode.PARSE_UNEXPECTED_TERM, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 22L, - Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol(")"))) + checkInputThrowingParserException( + "trim(trailing '' from)", + ErrorCode.PARSE_UNEXPECTED_TERM, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 22L, + Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, + Property.TOKEN_VALUE to ion.newSymbol(")") + ) + ) } @Test fun callTwoArgumentsNoFrom() { - checkInputThrowingParserException("trim(' ' ' 1 ')", - ErrorCode.PARSE_EXPECTED_RIGHT_PAREN_BUILTIN_FUNCTION_CALL, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 10L, - Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString(" 1 "))) + checkInputThrowingParserException( + "trim(' ' ' 1 ')", + ErrorCode.PARSE_EXPECTED_RIGHT_PAREN_BUILTIN_FUNCTION_CALL, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 10L, + Property.TOKEN_TYPE to TokenType.LITERAL, + Property.TOKEN_VALUE to ion.newString(" 1 ") + ) + ) } @Test fun callTrimSpecificationAndFromMissingString() { - checkInputThrowingParserException("trim(trailing from)", - ErrorCode.PARSE_UNEXPECTED_TERM, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 19L, - Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol(")"))) + checkInputThrowingParserException( + "trim(trailing from)", + ErrorCode.PARSE_UNEXPECTED_TERM, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 19L, + Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, + Property.TOKEN_VALUE to ion.newSymbol(")") + ) + ) } @Test fun nullIsNotNullIonLiteral() { - checkInputThrowingParserException("NULL is not `null`", - ErrorCode.PARSE_EXPECTED_TYPE_NAME, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 13L, - Property.TOKEN_TYPE to TokenType.ION_LITERAL, - Property.TOKEN_VALUE to ion.newNull())) + checkInputThrowingParserException( + "NULL is not `null`", + ErrorCode.PARSE_EXPECTED_TYPE_NAME, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 13L, + Property.TOKEN_TYPE to TokenType.ION_LITERAL, + Property.TOKEN_VALUE to ion.newNull() + ) + ) } @Test fun idIsNotStringLiteral() { - checkInputThrowingParserException("a is not 'missing'", - ErrorCode.PARSE_EXPECTED_TYPE_NAME, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 10L, - Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("missing"))) + checkInputThrowingParserException( + "a is not 'missing'", + ErrorCode.PARSE_EXPECTED_TYPE_NAME, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 10L, + Property.TOKEN_TYPE to TokenType.LITERAL, + Property.TOKEN_VALUE to ion.newString("missing") + ) + ) } @Test fun idIsNotGroupMissing() { - checkInputThrowingParserException("a is not (missing)", - ErrorCode.PARSE_EXPECTED_TYPE_NAME , - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 10L, - Property.TOKEN_TYPE to TokenType.LEFT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol("("))) + checkInputThrowingParserException( + "a is not (missing)", + ErrorCode.PARSE_EXPECTED_TYPE_NAME, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 10L, + Property.TOKEN_TYPE to TokenType.LEFT_PAREN, + Property.TOKEN_VALUE to ion.newSymbol("(") + ) + ) } @Test fun aggregateWithNoArgs() { - checkInputThrowingParserException("SUM()", - ErrorCode.PARSE_NON_UNARY_AGREGATE_FUNCTION_CALL, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 5L, - Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol(")"))) + checkInputThrowingParserException( + "SUM()", + ErrorCode.PARSE_NON_UNARY_AGREGATE_FUNCTION_CALL, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 5L, + Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, + Property.TOKEN_VALUE to ion.newSymbol(")") + ) + ) } @Test fun aggregateWithTooManyArgs() { - checkInputThrowingParserException("SUM(a, b)", - ErrorCode.PARSE_NON_UNARY_AGREGATE_FUNCTION_CALL, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 5L, - Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.TOKEN_VALUE to ion.newSymbol("a"))) + checkInputThrowingParserException( + "SUM(a, b)", + ErrorCode.PARSE_NON_UNARY_AGREGATE_FUNCTION_CALL, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 5L, + Property.TOKEN_TYPE to TokenType.IDENTIFIER, + Property.TOKEN_VALUE to ion.newSymbol("a") + ) + ) } @Test fun aggregateWithWildcardOnNonCount() { - checkInputThrowingParserException("SUM(*)", - ErrorCode.PARSE_UNSUPPORTED_CALL_WITH_STAR, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 5L, - Property.TOKEN_TYPE to TokenType.STAR, - Property.TOKEN_VALUE to ion.newSymbol("*"))) + checkInputThrowingParserException( + "SUM(*)", + ErrorCode.PARSE_UNSUPPORTED_CALL_WITH_STAR, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 5L, + Property.TOKEN_TYPE to TokenType.STAR, + Property.TOKEN_VALUE to ion.newSymbol("*") + ) + ) } @Test fun aggregateWithWildcardOnNonCountNonAggregate() { - checkInputThrowingParserException("F(*)", - ErrorCode.PARSE_UNSUPPORTED_CALL_WITH_STAR, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 3L, - Property.TOKEN_TYPE to TokenType.STAR, - Property.TOKEN_VALUE to ion.newSymbol("*"))) + checkInputThrowingParserException( + "F(*)", + ErrorCode.PARSE_UNSUPPORTED_CALL_WITH_STAR, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 3L, + Property.TOKEN_TYPE to TokenType.STAR, + Property.TOKEN_VALUE to ion.newSymbol("*") + ) + ) } @Test fun castTooManyArgs() { - checkInputThrowingParserException("CAST(5 AS INTEGER(10))", - ErrorCode.PARSE_CAST_ARITY, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 18L, - Property.EXPECTED_ARITY_MIN to 0, // kinda funny - Property.EXPECTED_ARITY_MAX to 0, - Property.CAST_TO to "integer", - Property.TOKEN_TYPE to TokenType.LEFT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol("("))) + checkInputThrowingParserException( + "CAST(5 AS INTEGER(10))", + ErrorCode.PARSE_CAST_ARITY, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 18L, + Property.EXPECTED_ARITY_MIN to 0, // kinda funny + Property.EXPECTED_ARITY_MAX to 0, + Property.CAST_TO to "integer", + Property.TOKEN_TYPE to TokenType.LEFT_PAREN, + Property.TOKEN_VALUE to ion.newSymbol("(") + ) + ) } @Test fun castNonLiteralArg() { - checkInputThrowingParserException("CAST(5 AS VARCHAR(a))", - ErrorCode.PARSE_INVALID_TYPE_PARAM, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 11L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("character_varying"))) + checkInputThrowingParserException( + "CAST(5 AS VARCHAR(a))", + ErrorCode.PARSE_INVALID_TYPE_PARAM, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 11L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("character_varying") + ) + ) } @Test fun castNegativeArg() { - checkInputThrowingParserException("CAST(5 AS VARCHAR(-1))", - ErrorCode.PARSE_INVALID_TYPE_PARAM, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 11L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("character_varying"))) + checkInputThrowingParserException( + "CAST(5 AS VARCHAR(-1))", + ErrorCode.PARSE_INVALID_TYPE_PARAM, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 11L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("character_varying") + ) + ) } @Test fun castNonTypArg() { - checkInputThrowingParserException("CAST(5 AS SELECT)", - ErrorCode.PARSE_EXPECTED_TYPE_NAME, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 11L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("select"))) + checkInputThrowingParserException( + "CAST(5 AS SELECT)", + ErrorCode.PARSE_EXPECTED_TYPE_NAME, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 11L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("select") + ) + ) } @Test fun caseOnlyEnd() { - checkInputThrowingParserException("CASE END", - ErrorCode.PARSE_UNEXPECTED_KEYWORD, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 6L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("end"))) + checkInputThrowingParserException( + "CASE END", + ErrorCode.PARSE_UNEXPECTED_KEYWORD, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 6L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("end") + ) + ) } @Test fun searchedCaseNoWhenWithElse() { - checkInputThrowingParserException("CASE ELSE 1 END", - ErrorCode.PARSE_UNEXPECTED_KEYWORD, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 6L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("else"))) + checkInputThrowingParserException( + "CASE ELSE 1 END", + ErrorCode.PARSE_UNEXPECTED_KEYWORD, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 6L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("else") + ) + ) } @Test fun simpleCaseNoWhenWithElse() { - checkInputThrowingParserException("CASE name ELSE 1 END", - ErrorCode.PARSE_EXPECTED_WHEN_CLAUSE, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 11L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("else"))) + checkInputThrowingParserException( + "CASE name ELSE 1 END", + ErrorCode.PARSE_EXPECTED_WHEN_CLAUSE, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 11L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("else") + ) + ) } @Test fun groupByOrdinal() { - checkInputThrowingParserException("SELECT a FROM data GROUP BY 1", - ErrorCode.PARSE_UNSUPPORTED_LITERALS_GROUPBY, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 29L, - Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(1))) + checkInputThrowingParserException( + "SELECT a FROM data GROUP BY 1", + ErrorCode.PARSE_UNSUPPORTED_LITERALS_GROUPBY, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 29L, + Property.TOKEN_TYPE to TokenType.LITERAL, + Property.TOKEN_VALUE to ion.newInt(1) + ) + ) } @Test fun groupByOutOfBoundsOrdinal() { // looks the same as the previous one - checkInputThrowingParserException("SELECT a FROM data GROUP BY 2", - ErrorCode.PARSE_UNSUPPORTED_LITERALS_GROUPBY, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 29L, - Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(2))) + checkInputThrowingParserException( + "SELECT a FROM data GROUP BY 2", + ErrorCode.PARSE_UNSUPPORTED_LITERALS_GROUPBY, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 29L, + Property.TOKEN_TYPE to TokenType.LITERAL, + Property.TOKEN_VALUE to ion.newInt(2) + ) + ) } @Test fun groupByBadOrdinal() { - checkInputThrowingParserException("SELECT a FROM data GROUP BY -1", // looks duplicate - ErrorCode.PARSE_UNSUPPORTED_LITERALS_GROUPBY, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 30L, - Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(-1))) + checkInputThrowingParserException( + "SELECT a FROM data GROUP BY -1", // looks duplicate + ErrorCode.PARSE_UNSUPPORTED_LITERALS_GROUPBY, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 30L, + Property.TOKEN_TYPE to TokenType.LITERAL, + Property.TOKEN_VALUE to ion.newInt(-1) + ) + ) } @Test fun groupByStringConstantOrdinal() { - checkInputThrowingParserException("SELECT a FROM data GROUP BY 'a'", - ErrorCode.PARSE_UNSUPPORTED_LITERALS_GROUPBY, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 29L, - Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("a"))) + checkInputThrowingParserException( + "SELECT a FROM data GROUP BY 'a'", + ErrorCode.PARSE_UNSUPPORTED_LITERALS_GROUPBY, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 29L, + Property.TOKEN_TYPE to TokenType.LITERAL, + Property.TOKEN_VALUE to ion.newString("a") + ) + ) } @Test fun orderByMissingBYAndSortSpec() { - checkInputThrowingParserException("SELECT a FROM tb ORDER", - ErrorCode.PARSE_EXPECTED_TOKEN_TYPE, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 23L, - Property.TOKEN_TYPE to TokenType.EOF, - Property.EXPECTED_TOKEN_TYPE to TokenType.BY, - Property.TOKEN_VALUE to ion.newSymbol("EOF")) + checkInputThrowingParserException( + "SELECT a FROM tb ORDER", + ErrorCode.PARSE_EXPECTED_TOKEN_TYPE, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 23L, + Property.TOKEN_TYPE to TokenType.EOF, + Property.EXPECTED_TOKEN_TYPE to TokenType.BY, + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) ) } @Test fun orderByMissingBy() { - checkInputThrowingParserException("SELECT a FROM tb ORDER foo", - ErrorCode.PARSE_EXPECTED_TOKEN_TYPE, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 24L, - Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.EXPECTED_TOKEN_TYPE to TokenType.BY, - Property.TOKEN_VALUE to ion.newSymbol("foo")) + checkInputThrowingParserException( + "SELECT a FROM tb ORDER foo", + ErrorCode.PARSE_EXPECTED_TOKEN_TYPE, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 24L, + Property.TOKEN_TYPE to TokenType.IDENTIFIER, + Property.EXPECTED_TOKEN_TYPE to TokenType.BY, + Property.TOKEN_VALUE to ion.newSymbol("foo") + ) ) } @Test fun orderByMissingSortSpec() { - checkInputThrowingParserException("SELECT a FROM tb ORDER BY", - ErrorCode.PARSE_UNEXPECTED_TERM, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 26L, - Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF")) + checkInputThrowingParserException( + "SELECT a FROM tb ORDER BY", + ErrorCode.PARSE_UNEXPECTED_TERM, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 26L, + Property.TOKEN_TYPE to TokenType.EOF, + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) ) } @Test fun orderByMultipleAttributesInSortSpec() { - checkInputThrowingParserException("SELECT a FROM tb ORDER BY foo bar", - ErrorCode.PARSE_UNEXPECTED_TOKEN, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 31L, - Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.TOKEN_VALUE to ion.newSymbol("bar")) + checkInputThrowingParserException( + "SELECT a FROM tb ORDER BY foo bar", + ErrorCode.PARSE_UNEXPECTED_TOKEN, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 31L, + Property.TOKEN_TYPE to TokenType.IDENTIFIER, + Property.TOKEN_VALUE to ion.newSymbol("bar") + ) ) } @Test fun orderByMultipleEmptyParsedCommaList() { - checkInputThrowingParserException("SELECT a FROM tb ORDER BY foo, ,", - ErrorCode.PARSE_UNEXPECTED_TERM, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 32L, - Property.TOKEN_TYPE to TokenType.COMMA, - Property.TOKEN_VALUE to ion.newSymbol(",")) + checkInputThrowingParserException( + "SELECT a FROM tb ORDER BY foo, ,", + ErrorCode.PARSE_UNEXPECTED_TERM, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 32L, + Property.TOKEN_TYPE to TokenType.COMMA, + Property.TOKEN_VALUE to ion.newSymbol(",") + ) ) } @Test fun orderByMissingAttributeName() { - checkInputThrowingParserException("SELECT a FROM tb ORDER BY asc, bar", - ErrorCode.PARSE_UNEXPECTED_TERM, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 27L, - Property.TOKEN_TYPE to TokenType.ASC, - Property.TOKEN_VALUE to ion.newSymbol("asc")) + checkInputThrowingParserException( + "SELECT a FROM tb ORDER BY asc, bar", + ErrorCode.PARSE_UNEXPECTED_TERM, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 27L, + Property.TOKEN_TYPE to TokenType.ASC, + Property.TOKEN_VALUE to ion.newSymbol("asc") + ) ) } @Test fun orderByInvalidPunctuation() { - checkInputThrowingParserException("SELECT a FROM tb ORDER BY asc; bar", - ErrorCode.PARSE_UNEXPECTED_TERM, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 27L, - Property.TOKEN_TYPE to TokenType.ASC, - Property.TOKEN_VALUE to ion.newSymbol("asc")) + checkInputThrowingParserException( + "SELECT a FROM tb ORDER BY asc; bar", + ErrorCode.PARSE_UNEXPECTED_TERM, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 27L, + Property.TOKEN_TYPE to TokenType.ASC, + Property.TOKEN_VALUE to ion.newSymbol("asc") + ) ) } @Test fun orderByMultipleOrderingSpecs() { - checkInputThrowingParserException("SELECT a FROM tb ORDER BY foo asc desc", - ErrorCode.PARSE_UNEXPECTED_TOKEN, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 35L, - Property.TOKEN_TYPE to TokenType.DESC, - Property.TOKEN_VALUE to ion.newSymbol("desc")) + checkInputThrowingParserException( + "SELECT a FROM tb ORDER BY foo asc desc", + ErrorCode.PARSE_UNEXPECTED_TOKEN, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 35L, + Property.TOKEN_TYPE to TokenType.DESC, + Property.TOKEN_VALUE to ion.newSymbol("desc") + ) ) } @Test fun orderByUnexpectedKeywordAsAttribute() { - checkInputThrowingParserException("SELECT a FROM tb ORDER BY SELECT", - ErrorCode.PARSE_UNEXPECTED_TERM, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 33L, - Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF")) + checkInputThrowingParserException( + "SELECT a FROM tb ORDER BY SELECT", + ErrorCode.PARSE_UNEXPECTED_TERM, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 33L, + Property.TOKEN_TYPE to TokenType.EOF, + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) ) } @Test fun offsetBeforeLimit() { - checkInputThrowingParserException("SELECT a FROM tb OFFSET 5 LIMIT 10", + checkInputThrowingParserException( + "SELECT a FROM tb OFFSET 5 LIMIT 10", ErrorCode.PARSE_UNEXPECTED_TOKEN, - mapOf(Property.LINE_NUMBER to 1L, + mapOf( + Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 27L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("limit")) + Property.TOKEN_VALUE to ion.newSymbol("limit") + ) ) } @Test fun limitOffsetBeforeOrderBy() { - checkInputThrowingParserException("SELECT a FROM tb LIMIT 10 OFFSET 5 ORDER BY b ASC", + checkInputThrowingParserException( + "SELECT a FROM tb LIMIT 10 OFFSET 5 ORDER BY b ASC", ErrorCode.PARSE_UNEXPECTED_TOKEN, - mapOf(Property.LINE_NUMBER to 1L, + mapOf( + Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 36L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("order")) + Property.TOKEN_VALUE to ion.newSymbol("order") + ) ) } @Test fun offsetMissingArgument() { - checkInputThrowingParserException("SELECT a FROM tb OFFSET", + checkInputThrowingParserException( + "SELECT a FROM tb OFFSET", ErrorCode.PARSE_UNEXPECTED_TERM, - mapOf(Property.LINE_NUMBER to 1L, + mapOf( + Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 24L, Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF")) + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) ) } @Test fun offsetUnexpectedKeywordAsAttribute() { - checkInputThrowingParserException("SELECT a FROM tb OFFSET SELECT", + checkInputThrowingParserException( + "SELECT a FROM tb OFFSET SELECT", ErrorCode.PARSE_UNEXPECTED_TERM, - mapOf(Property.LINE_NUMBER to 1L, + mapOf( + Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 31L, Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF")) + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) ) } @Test fun onConflictUnexpectedTokenOnConflict() { - checkInputThrowingParserException("INSERT INTO foo VALUE 1 ON_CONFLICT WHERE bar DO NOTHING", - ErrorCode.PARSE_UNEXPECTED_TOKEN, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 25L, - Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.TOKEN_VALUE to ion.newSymbol("ON_CONFLICT")) + checkInputThrowingParserException( + "INSERT INTO foo VALUE 1 ON_CONFLICT WHERE bar DO NOTHING", + ErrorCode.PARSE_UNEXPECTED_TOKEN, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 25L, + Property.TOKEN_TYPE to TokenType.IDENTIFIER, + Property.TOKEN_VALUE to ion.newSymbol("ON_CONFLICT") + ) ) } @Test fun onConflictUnexpectedKeywordConflict() { - checkInputThrowingParserException("INSERT INTO foo VALUE 1 CONFLICT WHERE bar DO NOTHING", - ErrorCode.PARSE_UNEXPECTED_TOKEN, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 25L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("conflict")) + checkInputThrowingParserException( + "INSERT INTO foo VALUE 1 CONFLICT WHERE bar DO NOTHING", + ErrorCode.PARSE_UNEXPECTED_TOKEN, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 25L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("conflict") + ) ) } @Test fun onConflictUnexpectedKeywordWhen() { - checkInputThrowingParserException("INSERT INTO foo VALUE 1 ON CONFLICT WHEN bar DO NOTHING", - ErrorCode.PARSE_EXPECTED_WHERE_CLAUSE, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 37L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("when")) + checkInputThrowingParserException( + "INSERT INTO foo VALUE 1 ON CONFLICT WHEN bar DO NOTHING", + ErrorCode.PARSE_EXPECTED_WHERE_CLAUSE, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 37L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("when") + ) ) } @Test fun onConflictMissingOnConflictExpression() { - checkInputThrowingParserException("INSERT INTO foo VALUE 1 ON CONFLICT WHERE DO NOTHING", - ErrorCode.PARSE_UNEXPECTED_KEYWORD, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 43L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("do_nothing")) + checkInputThrowingParserException( + "INSERT INTO foo VALUE 1 ON CONFLICT WHERE DO NOTHING", + ErrorCode.PARSE_UNEXPECTED_KEYWORD, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 43L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("do_nothing") + ) ) } @Test fun onConflictMissingConflictAction() { - checkInputThrowingParserException("INSERT INTO foo VALUE 1 ON CONFLICT WHERE bar", - ErrorCode.PARSE_EXPECTED_CONFLICT_ACTION, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 37L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("where")) + checkInputThrowingParserException( + "INSERT INTO foo VALUE 1 ON CONFLICT WHERE bar", + ErrorCode.PARSE_EXPECTED_CONFLICT_ACTION, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 37L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("where") + ) ) } @Test fun onConflictInvalidConflictAction() { - checkInputThrowingParserException("INSERT INTO foo VALUE 1 ON CONFLICT WHERE bar DO SOMETHING", - ErrorCode.PARSE_EXPECTED_CONFLICT_ACTION, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 37L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("where")) + checkInputThrowingParserException( + "INSERT INTO foo VALUE 1 ON CONFLICT WHERE bar DO SOMETHING", + ErrorCode.PARSE_EXPECTED_CONFLICT_ACTION, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 37L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("where") + ) ) } @Test fun atOnConflictUnexpectedTokenOnConflict() { - checkInputThrowingParserException("INSERT INTO foo VALUE 1 AT pos ON_CONFLICT WHERE bar DO NOTHING", - ErrorCode.PARSE_UNEXPECTED_TOKEN, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 32L, - Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.TOKEN_VALUE to ion.newSymbol("ON_CONFLICT")) + checkInputThrowingParserException( + "INSERT INTO foo VALUE 1 AT pos ON_CONFLICT WHERE bar DO NOTHING", + ErrorCode.PARSE_UNEXPECTED_TOKEN, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 32L, + Property.TOKEN_TYPE to TokenType.IDENTIFIER, + Property.TOKEN_VALUE to ion.newSymbol("ON_CONFLICT") + ) ) } @Test fun atOnConflictUnexpectedKeywordConflict() { - checkInputThrowingParserException("INSERT INTO foo VALUE 1 AT pos CONFLICT WHERE bar DO NOTHING", - ErrorCode.PARSE_UNEXPECTED_TOKEN, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 32L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("conflict")) + checkInputThrowingParserException( + "INSERT INTO foo VALUE 1 AT pos CONFLICT WHERE bar DO NOTHING", + ErrorCode.PARSE_UNEXPECTED_TOKEN, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 32L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("conflict") + ) ) } @Test fun atOnConflictUnexpectedKeywordWhen() { - checkInputThrowingParserException("INSERT INTO foo VALUE 1 AT pos ON CONFLICT WHEN bar DO NOTHING", - ErrorCode.PARSE_EXPECTED_WHERE_CLAUSE, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 44L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("when")) + checkInputThrowingParserException( + "INSERT INTO foo VALUE 1 AT pos ON CONFLICT WHEN bar DO NOTHING", + ErrorCode.PARSE_EXPECTED_WHERE_CLAUSE, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 44L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("when") + ) ) } @Test fun atOnConflictMissingOnConflictExpression() { - checkInputThrowingParserException("INSERT INTO foo VALUE 1 AT pos ON CONFLICT WHERE DO NOTHING", - ErrorCode.PARSE_UNEXPECTED_KEYWORD, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 50L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("do_nothing")) + checkInputThrowingParserException( + "INSERT INTO foo VALUE 1 AT pos ON CONFLICT WHERE DO NOTHING", + ErrorCode.PARSE_UNEXPECTED_KEYWORD, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 50L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("do_nothing") + ) ) } @Test fun atOnConflictMissingConflictAction() { - checkInputThrowingParserException("INSERT INTO foo VALUE 1 AT pos ON CONFLICT WHERE bar", - ErrorCode.PARSE_EXPECTED_CONFLICT_ACTION, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 44L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("where")) + checkInputThrowingParserException( + "INSERT INTO foo VALUE 1 AT pos ON CONFLICT WHERE bar", + ErrorCode.PARSE_EXPECTED_CONFLICT_ACTION, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 44L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("where") + ) ) } @Test fun atOnConflictInvalidConflictAction() { - checkInputThrowingParserException("INSERT INTO foo VALUE 1 AT pos ON CONFLICT WHERE bar DO SOMETHING", - ErrorCode.PARSE_EXPECTED_CONFLICT_ACTION, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 44L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("where")) + checkInputThrowingParserException( + "INSERT INTO foo VALUE 1 AT pos ON CONFLICT WHERE bar DO SOMETHING", + ErrorCode.PARSE_EXPECTED_CONFLICT_ACTION, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 44L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("where") + ) ) } @Test fun leftOvers() { - checkInputThrowingParserException("5 5", - ErrorCode.PARSE_UNEXPECTED_TOKEN, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 3L, - Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(5))) + checkInputThrowingParserException( + "5 5", + ErrorCode.PARSE_UNEXPECTED_TOKEN, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 3L, + Property.TOKEN_TYPE to TokenType.LITERAL, + Property.TOKEN_VALUE to ion.newInt(5) + ) + ) } @Test fun likeColNameLikeColNameEscapeTypo() { - checkInputThrowingParserException("SELECT a, b FROM data WHERE a LIKE b ECSAPE '\\'", - ErrorCode.PARSE_UNEXPECTED_TOKEN , - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 38L, - Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.TOKEN_VALUE to ion.newSymbol("ECSAPE"))) + checkInputThrowingParserException( + "SELECT a, b FROM data WHERE a LIKE b ECSAPE '\\'", + ErrorCode.PARSE_UNEXPECTED_TOKEN, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 38L, + Property.TOKEN_TYPE to TokenType.IDENTIFIER, + Property.TOKEN_VALUE to ion.newSymbol("ECSAPE") + ) + ) } @Test fun likeWrongOrderOfArgs() { - checkInputThrowingParserException("SELECT a, b FROM data WHERE LIKE a b", - ErrorCode.PARSE_UNEXPECTED_OPERATOR, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 29L, - Property.TOKEN_TYPE to TokenType.OPERATOR, - Property.TOKEN_VALUE to ion.newSymbol("like"))) + checkInputThrowingParserException( + "SELECT a, b FROM data WHERE LIKE a b", + ErrorCode.PARSE_UNEXPECTED_OPERATOR, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 29L, + Property.TOKEN_TYPE to TokenType.OPERATOR, + Property.TOKEN_VALUE to ion.newSymbol("like") + ) + ) } @Test fun likeMissingEscapeValue() { - checkInputThrowingParserException("SELECT a, b FROM data WHERE a LIKE b ESCAPE", - ErrorCode.PARSE_EXPECTED_EXPRESSION, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 38L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("escape"))) + checkInputThrowingParserException( + "SELECT a, b FROM data WHERE a LIKE b ESCAPE", + ErrorCode.PARSE_EXPECTED_EXPRESSION, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 38L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("escape") + ) + ) } @Test fun likeMissingPattern() { - checkInputThrowingParserException("SELECT a, b FROM data WHERE a LIKE", - ErrorCode.PARSE_EXPECTED_EXPRESSION, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 31L, - Property.TOKEN_TYPE to TokenType.OPERATOR, - Property.TOKEN_VALUE to ion.newSymbol("like"))) + checkInputThrowingParserException( + "SELECT a, b FROM data WHERE a LIKE", + ErrorCode.PARSE_EXPECTED_EXPRESSION, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 31L, + Property.TOKEN_TYPE to TokenType.OPERATOR, + Property.TOKEN_VALUE to ion.newSymbol("like") + ) + ) } @Test fun likeEscapeIncorrectOrder() { - checkInputThrowingParserException("SELECT a, b FROM data WHERE ESCAPE '\\' a LIKE b ", - ErrorCode.PARSE_UNEXPECTED_KEYWORD, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 29L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("escape"))) + checkInputThrowingParserException( + "SELECT a, b FROM data WHERE ESCAPE '\\' a LIKE b ", + ErrorCode.PARSE_UNEXPECTED_KEYWORD, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 29L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("escape") + ) + ) } @Test fun likeEscapeAsSecondArgument() { - checkInputThrowingParserException("SELECT a, b FROM data WHERE a LIKE ESCAPE '\\' b", - ErrorCode.PARSE_UNEXPECTED_KEYWORD, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 36L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("escape"))) + checkInputThrowingParserException( + "SELECT a, b FROM data WHERE a LIKE ESCAPE '\\' b", + ErrorCode.PARSE_UNEXPECTED_KEYWORD, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 36L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("escape") + ) + ) } @Test fun atOperatorOnNonIdentifier() { - checkInputThrowingParserException("@(a)", - ErrorCode.PARSE_MISSING_IDENT_AFTER_AT, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 1L, - Property.TOKEN_TYPE to TokenType.OPERATOR, - Property.TOKEN_VALUE to ion.newSymbol("@"))) + checkInputThrowingParserException( + "@(a)", + ErrorCode.PARSE_MISSING_IDENT_AFTER_AT, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 1L, + Property.TOKEN_TYPE to TokenType.OPERATOR, + Property.TOKEN_VALUE to ion.newSymbol("@") + ) + ) } @Test fun atOperatorDoubleOnIdentifier() { - checkInputThrowingParserException("@ @a", - ErrorCode.PARSE_MISSING_IDENT_AFTER_AT, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 1L, - Property.TOKEN_TYPE to TokenType.OPERATOR, - Property.TOKEN_VALUE to ion.newSymbol("@"))) + checkInputThrowingParserException( + "@ @a", + ErrorCode.PARSE_MISSING_IDENT_AFTER_AT, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 1L, + Property.TOKEN_TYPE to TokenType.OPERATOR, + Property.TOKEN_VALUE to ion.newSymbol("@") + ) + ) } @Test fun nullIsNullIonLiteral() { - checkInputThrowingParserException("NULL is `null`", - ErrorCode.PARSE_EXPECTED_TYPE_NAME, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 9L, - Property.TOKEN_TYPE to TokenType.ION_LITERAL, - Property.TOKEN_VALUE to ion.newNull())) + checkInputThrowingParserException( + "NULL is `null`", + ErrorCode.PARSE_EXPECTED_TYPE_NAME, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 9L, + Property.TOKEN_TYPE to TokenType.ION_LITERAL, + Property.TOKEN_VALUE to ion.newNull() + ) + ) } @Test fun idIsStringLiteral() { - checkInputThrowingParserException("a is 'missing'", - ErrorCode.PARSE_EXPECTED_TYPE_NAME, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 6L, - Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("missing"))) + checkInputThrowingParserException( + "a is 'missing'", + ErrorCode.PARSE_EXPECTED_TYPE_NAME, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 6L, + Property.TOKEN_TYPE to TokenType.LITERAL, + Property.TOKEN_VALUE to ion.newString("missing") + ) + ) } @Test fun idIsGroupMissing() { - checkInputThrowingParserException("a is (missing)", - ErrorCode.PARSE_EXPECTED_TYPE_NAME , - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 6L, - Property.TOKEN_TYPE to TokenType.LEFT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol("("))) + checkInputThrowingParserException( + "a is (missing)", + ErrorCode.PARSE_EXPECTED_TYPE_NAME, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 6L, + Property.TOKEN_TYPE to TokenType.LEFT_PAREN, + Property.TOKEN_VALUE to ion.newSymbol("(") + ) + ) } @Test fun selectWithFromAtAndAs() { - checkInputThrowingParserException("SELECT ord, val FROM table1 AT ord AS val", - ErrorCode.PARSE_UNEXPECTED_TOKEN , - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 36L, - Property.TOKEN_TYPE to TokenType.AS, - Property.TOKEN_VALUE to ion.newSymbol("as"))) + checkInputThrowingParserException( + "SELECT ord, val FROM table1 AT ord AS val", + ErrorCode.PARSE_UNEXPECTED_TOKEN, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 36L, + Property.TOKEN_TYPE to TokenType.AS, + Property.TOKEN_VALUE to ion.newSymbol("as") + ) + ) } @Test fun pivotNoAt() { - checkInputThrowingParserException("PIVOT v FROM data", - ErrorCode.PARSE_EXPECTED_KEYWORD, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 9L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.KEYWORD to "AT", - Property.TOKEN_VALUE to ion.newSymbol("from"))) + checkInputThrowingParserException( + "PIVOT v FROM data", + ErrorCode.PARSE_EXPECTED_KEYWORD, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 9L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.KEYWORD to "AT", + Property.TOKEN_VALUE to ion.newSymbol("from") + ) + ) } @Test fun callExtractMissingFrom() { - checkInputThrowingParserException("extract(year b)", - ErrorCode.PARSE_EXPECTED_KEYWORD, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 14L, - Property.KEYWORD to "FROM", - Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.TOKEN_VALUE to ion.newSymbol("b"))) + checkInputThrowingParserException( + "extract(year b)", + ErrorCode.PARSE_EXPECTED_KEYWORD, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 14L, + Property.KEYWORD to "FROM", + Property.TOKEN_TYPE to TokenType.IDENTIFIER, + Property.TOKEN_VALUE to ion.newSymbol("b") + ) + ) } @Test fun callExtractMissingFromWithComma() { - checkInputThrowingParserException("extract(year, b)", - ErrorCode.PARSE_EXPECTED_KEYWORD, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 13L, - Property.KEYWORD to "FROM", - Property.TOKEN_TYPE to TokenType.COMMA, - Property.TOKEN_VALUE to ion.newSymbol(","))) + checkInputThrowingParserException( + "extract(year, b)", + ErrorCode.PARSE_EXPECTED_KEYWORD, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 13L, + Property.KEYWORD to "FROM", + Property.TOKEN_TYPE to TokenType.COMMA, + Property.TOKEN_VALUE to ion.newSymbol(",") + ) + ) } @Test fun callExtractMissingSecondArgument() { - checkInputThrowingParserException("extract(year from)", - ErrorCode.PARSE_UNEXPECTED_TERM, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 18L, - Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol(")"))) + checkInputThrowingParserException( + "extract(year from)", + ErrorCode.PARSE_UNEXPECTED_TERM, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 18L, + Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, + Property.TOKEN_VALUE to ion.newSymbol(")") + ) + ) } @Test fun callExtractMissingDateTimePart() { - checkInputThrowingParserException("extract(from b)", - ErrorCode.PARSE_EXPECTED_DATE_TIME_PART, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 9L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("from"))) + checkInputThrowingParserException( + "extract(from b)", + ErrorCode.PARSE_EXPECTED_DATE_TIME_PART, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 9L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("from") + ) + ) } @Test fun callExtractOnlySecondArgument() { - checkInputThrowingParserException("extract(b)", - ErrorCode.PARSE_EXPECTED_DATE_TIME_PART, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 9L, - Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.TOKEN_VALUE to ion.newSymbol("b"))) + checkInputThrowingParserException( + "extract(b)", + ErrorCode.PARSE_EXPECTED_DATE_TIME_PART, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 9L, + Property.TOKEN_TYPE to TokenType.IDENTIFIER, + Property.TOKEN_VALUE to ion.newSymbol("b") + ) + ) } @Test fun callExtractOnlyDateTimePart() { - checkInputThrowingParserException("extract(year)", - ErrorCode.PARSE_EXPECTED_KEYWORD, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 13L, - Property.KEYWORD to "FROM", - Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol(")"))) + checkInputThrowingParserException( + "extract(year)", + ErrorCode.PARSE_EXPECTED_KEYWORD, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 13L, + Property.KEYWORD to "FROM", + Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, + Property.TOKEN_VALUE to ion.newSymbol(")") + ) + ) } // NOTE that we do not test DATE_DIFF below because the parser uses the same code for both date_add and date_diff @Test fun callDateAddNoArguments() { - checkInputThrowingParserException("date_add()", + checkInputThrowingParserException( + "date_add()", ErrorCode.PARSE_EXPECTED_DATE_TIME_PART, - mapOf(Property.LINE_NUMBER to 1L, + mapOf( + Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 10L, Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol(")"))) + Property.TOKEN_VALUE to ion.newSymbol(")") + ) + ) } @Test fun callDateAddInvalidDateTimePart() { - checkInputThrowingParserException("date_add(foobar", + checkInputThrowingParserException( + "date_add(foobar", ErrorCode.PARSE_EXPECTED_DATE_TIME_PART, - mapOf(Property.LINE_NUMBER to 1L, + mapOf( + Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 10L, Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.TOKEN_VALUE to ion.newSymbol("foobar"))) + Property.TOKEN_VALUE to ion.newSymbol("foobar") + ) + ) } @Test fun callDateAddOneArgument() { - checkInputThrowingParserException("date_add(year)", + checkInputThrowingParserException( + "date_add(year)", ErrorCode.PARSE_EXPECTED_TOKEN_TYPE, - mapOf(Property.LINE_NUMBER to 1L, + mapOf( + Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 14L, Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, Property.TOKEN_VALUE to ion.newSymbol(")"), - Property.EXPECTED_TOKEN_TYPE to TokenType.COMMA)) + Property.EXPECTED_TOKEN_TYPE to TokenType.COMMA + ) + ) } @Test fun callDateAddOneArgumentTrailingComma() { - checkInputThrowingParserException("date_add(year,)", + checkInputThrowingParserException( + "date_add(year,)", ErrorCode.PARSE_UNEXPECTED_TERM, - mapOf(Property.LINE_NUMBER to 1L, + mapOf( + Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 15L, Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol(")"))) + Property.TOKEN_VALUE to ion.newSymbol(")") + ) + ) } @Test fun callDateAddTwoArguments() { - checkInputThrowingParserException("date_add(year, b)", + checkInputThrowingParserException( + "date_add(year, b)", ErrorCode.PARSE_EXPECTED_TOKEN_TYPE, - mapOf(Property.LINE_NUMBER to 1L, + mapOf( + Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 17L, Property.TOKEN_TYPE to TokenType.RIGHT_PAREN, Property.TOKEN_VALUE to ion.newSymbol(")"), - Property.EXPECTED_TOKEN_TYPE to TokenType.COMMA)) + Property.EXPECTED_TOKEN_TYPE to TokenType.COMMA + ) + ) } @Test fun callDateAddCommaAfterThirdArgument() { - checkInputThrowingParserException("date_add(year, b, c,)", + checkInputThrowingParserException( + "date_add(year, b, c,)", ErrorCode.PARSE_EXPECTED_TOKEN_TYPE, - mapOf(Property.LINE_NUMBER to 1L, + mapOf( + Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 20L, Property.TOKEN_TYPE to TokenType.COMMA, Property.TOKEN_VALUE to ion.newSymbol(","), - Property.EXPECTED_TOKEN_TYPE to TokenType.RIGHT_PAREN)) + Property.EXPECTED_TOKEN_TYPE to TokenType.RIGHT_PAREN + ) + ) } @Test fun callDateAddMissingComma() { - checkInputThrowingParserException("date_add(year a, b)", + checkInputThrowingParserException( + "date_add(year a, b)", ErrorCode.PARSE_EXPECTED_TOKEN_TYPE, - mapOf(Property.LINE_NUMBER to 1L, + mapOf( + Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 15L, Property.TOKEN_TYPE to TokenType.IDENTIFIER, Property.TOKEN_VALUE to ion.newSymbol("a"), Property.EXPECTED_TOKEN_TYPE to TokenType.COMMA - )) + ) + ) } @Test fun callDateAddMissingDateTimePart() { - checkInputThrowingParserException("date_add(a, b, c)", + checkInputThrowingParserException( + "date_add(a, b, c)", ErrorCode.PARSE_EXPECTED_DATE_TIME_PART, - mapOf(Property.LINE_NUMBER to 1L, + mapOf( + Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 10L, Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.TOKEN_VALUE to ion.newSymbol("a"))) + Property.TOKEN_VALUE to ion.newSymbol("a") + ) + ) } @Test fun tokensAfterSemicolon() { - checkInputThrowingParserException("1;1", - ErrorCode.PARSE_UNEXPECTED_TOKEN, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 3L, - Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(1))) + checkInputThrowingParserException( + "1;1", + ErrorCode.PARSE_UNEXPECTED_TOKEN, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 3L, + Property.TOKEN_TYPE to TokenType.LITERAL, + Property.TOKEN_VALUE to ion.newInt(1) + ) + ) } @Test fun validQueriesSeparatedBySemicolon() { - checkInputThrowingParserException("SELECT * FROM <<1>>;SELECT * FROM <<1>>", - ErrorCode.PARSE_UNEXPECTED_TOKEN, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 21L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("select"))) + checkInputThrowingParserException( + "SELECT * FROM <<1>>;SELECT * FROM <<1>>", + ErrorCode.PARSE_UNEXPECTED_TOKEN, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 21L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("select") + ) + ) } @Test fun semicolonInsideExpression() { - checkInputThrowingParserException("(1;)", - ErrorCode.PARSE_EXPECTED_TOKEN_TYPE, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 3L, - Property.EXPECTED_TOKEN_TYPE to TokenType.RIGHT_PAREN, - Property.TOKEN_TYPE to TokenType.SEMICOLON, - Property.TOKEN_VALUE to ion.newSymbol(";"))) + checkInputThrowingParserException( + "(1;)", + ErrorCode.PARSE_EXPECTED_TOKEN_TYPE, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 3L, + Property.EXPECTED_TOKEN_TYPE to TokenType.RIGHT_PAREN, + Property.TOKEN_TYPE to TokenType.SEMICOLON, + Property.TOKEN_VALUE to ion.newSymbol(";") + ) + ) } - @Test fun selectStarStar() = checkInputThrowingParserException( "SELECT *, * FROM <<1>>", ErrorCode.PARSE_ASTERISK_IS_NOT_ALONE_IN_SELECT_LIST, - sourceLocationProperties(1, 8)) - + sourceLocationProperties(1, 8) + ) @Test fun selectStarAliasDotStar() = checkInputThrowingParserException( "SELECT *, foo.* FROM <<{ a: 1 }>> as foo", ErrorCode.PARSE_ASTERISK_IS_NOT_ALONE_IN_SELECT_LIST, - sourceLocationProperties(1, 8)) - + sourceLocationProperties(1, 8) + ) @Test fun selectAliasDotStarStar() = checkInputThrowingParserException( "SELECT foo.*, * FROM <<{ a: 1 }>> as foo", ErrorCode.PARSE_ASTERISK_IS_NOT_ALONE_IN_SELECT_LIST, - sourceLocationProperties(1, 15)) - + sourceLocationProperties(1, 15) + ) @Test fun selectExpressionStar() = checkInputThrowingParserException( "SELECT 1, * FROM <<{ a: 1 }>>", ErrorCode.PARSE_ASTERISK_IS_NOT_ALONE_IN_SELECT_LIST, - sourceLocationProperties(1, 11)) - + sourceLocationProperties(1, 11) + ) @Test fun selectStarExpression() = checkInputThrowingParserException( "SELECT *, 1 FROM <<{ a: 1 }>>", ErrorCode.PARSE_ASTERISK_IS_NOT_ALONE_IN_SELECT_LIST, - sourceLocationProperties(1, 8)) + sourceLocationProperties(1, 8) + ) @Test fun countDistinctStar() { - checkInputThrowingParserException("COUNT(DISTINCT *)", + checkInputThrowingParserException( + "COUNT(DISTINCT *)", ErrorCode.PARSE_UNSUPPORTED_CALL_WITH_STAR, - mapOf(Property.LINE_NUMBER to 1L, + mapOf( + Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 7L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("distinct"))) + Property.TOKEN_VALUE to ion.newSymbol("distinct") + ) + ) } @Test fun countAllStar() { - checkInputThrowingParserException("COUNT(ALL *)", + checkInputThrowingParserException( + "COUNT(ALL *)", ErrorCode.PARSE_UNSUPPORTED_CALL_WITH_STAR, - mapOf(Property.LINE_NUMBER to 1L, + mapOf( + Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 7L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("all"))) + Property.TOKEN_VALUE to ion.newSymbol("all") + ) + ) } @Test fun countExpressionStar() { - checkInputThrowingParserException("COUNT(a, *)", + checkInputThrowingParserException( + "COUNT(a, *)", ErrorCode.PARSE_UNEXPECTED_TERM, - mapOf(Property.LINE_NUMBER to 1L, + mapOf( + Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 10L, Property.TOKEN_TYPE to TokenType.STAR, - Property.TOKEN_VALUE to ion.newSymbol("*"))) + Property.TOKEN_VALUE to ion.newSymbol("*") + ) + ) } @Test @@ -1477,7 +1887,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 11L, Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) @Test fun setWithExpression() = checkInputThrowingParserException( @@ -1487,7 +1899,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 13L, Property.TOKEN_TYPE to TokenType.COMMA, - Property.TOKEN_VALUE to ion.newSymbol(","))) + Property.TOKEN_VALUE to ion.newSymbol(",") + ) + ) @Test fun setWithWildcardPath() = checkInputThrowingParserException( @@ -1497,7 +1911,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 14L, Property.TOKEN_TYPE to TokenType.STAR, - Property.TOKEN_VALUE to ion.newSymbol("*"))) + Property.TOKEN_VALUE to ion.newSymbol("*") + ) + ) @Test fun setWithExpressionPath() = checkInputThrowingParserException( @@ -1507,7 +1923,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 14L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(1))) + Property.TOKEN_VALUE to ion.newInt(1) + ) + ) @Test fun fromWithDelete() = checkInputThrowingParserException( @@ -1517,7 +1935,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 8L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("delete"))) + Property.TOKEN_VALUE to ion.newSymbol("delete") + ) + ) @Test fun fromWithUpdate() = checkInputThrowingParserException( @@ -1527,7 +1947,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 8L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("update"))) + Property.TOKEN_VALUE to ion.newSymbol("update") + ) + ) @Test fun deleteNoFrom() = checkInputThrowingParserException( @@ -1537,7 +1959,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 8L, Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.TOKEN_VALUE to ion.newSymbol("x"))) + Property.TOKEN_VALUE to ion.newSymbol("x") + ) + ) @Test fun deleteFromList() = checkInputThrowingParserException( @@ -1547,37 +1971,45 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 14L, Property.TOKEN_TYPE to TokenType.COMMA, - Property.TOKEN_VALUE to ion.newSymbol(","))) + Property.TOKEN_VALUE to ion.newSymbol(",") + ) + ) @Test fun deleteFromListWithAListMemberThatHasPath() = checkInputThrowingParserException( - "DELETE FROM x.n, a", - ErrorCode.PARSE_UNEXPECTED_TOKEN, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 16L, - Property.TOKEN_TYPE to TokenType.COMMA, - Property.TOKEN_VALUE to ion.newSymbol(","))) + "DELETE FROM x.n, a", + ErrorCode.PARSE_UNEXPECTED_TOKEN, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 16L, + Property.TOKEN_TYPE to TokenType.COMMA, + Property.TOKEN_VALUE to ion.newSymbol(",") + ) + ) @Test fun deleteFromListWithAListMemberThatHasAnAlias() = checkInputThrowingParserException( - "DELETE FROM x.n.m AS y, a", - ErrorCode.PARSE_UNEXPECTED_TOKEN, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 23L, - Property.TOKEN_TYPE to TokenType.COMMA, - Property.TOKEN_VALUE to ion.newSymbol(","))) + "DELETE FROM x.n.m AS y, a", + ErrorCode.PARSE_UNEXPECTED_TOKEN, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 23L, + Property.TOKEN_TYPE to TokenType.COMMA, + Property.TOKEN_VALUE to ion.newSymbol(",") + ) + ) @Test fun deleteFromListWithAListMemberThatHasAnAliasAndPosition() = checkInputThrowingParserException( - "DELETE FROM x.n.m AS y AT z, a", - ErrorCode.PARSE_UNEXPECTED_TOKEN, - mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 28L, - Property.TOKEN_TYPE to TokenType.COMMA, - Property.TOKEN_VALUE to ion.newSymbol(","))) + "DELETE FROM x.n.m AS y AT z, a", + ErrorCode.PARSE_UNEXPECTED_TOKEN, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 28L, + Property.TOKEN_TYPE to TokenType.COMMA, + Property.TOKEN_VALUE to ion.newSymbol(",") + ) + ) @Test fun updateNoSet() = checkInputThrowingParserException( @@ -1587,7 +2019,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 9L, Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) @Test fun updateWithNestedSet() = checkInputThrowingParserException( @@ -1597,7 +2031,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 21L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("set"))) + Property.TOKEN_VALUE to ion.newSymbol("set") + ) + ) @Test fun updateWithRemove() = checkInputThrowingParserException( @@ -1607,7 +2043,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 21L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("remove"))) + Property.TOKEN_VALUE to ion.newSymbol("remove") + ) + ) @Test fun updateWithInsert() = checkInputThrowingParserException( @@ -1617,7 +2055,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 21L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("insert_into"))) + Property.TOKEN_VALUE to ion.newSymbol("insert_into") + ) + ) @Test fun updateWithDelete() = checkInputThrowingParserException( @@ -1627,7 +2067,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 21L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("delete"))) + Property.TOKEN_VALUE to ion.newSymbol("delete") + ) + ) @Test fun updateWithExec() = checkInputThrowingParserException( @@ -1637,7 +2079,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 26L, Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.TOKEN_VALUE to ion.newSymbol("foo"))) + Property.TOKEN_VALUE to ion.newSymbol("foo") + ) + ) @Test fun updateWithCreateTable() = checkInputThrowingParserException( @@ -1647,7 +2091,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 21L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("create"))) + Property.TOKEN_VALUE to ion.newSymbol("create") + ) + ) @Test fun updateWithDropTable() = checkInputThrowingParserException( @@ -1657,7 +2103,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 21L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("drop"))) + Property.TOKEN_VALUE to ion.newSymbol("drop") + ) + ) @Test fun updateWithCreateIndex() = checkInputThrowingParserException( @@ -1667,7 +2115,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 21L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("create"))) + Property.TOKEN_VALUE to ion.newSymbol("create") + ) + ) @Test fun nestedRemove() = checkInputThrowingParserException( @@ -1677,7 +2127,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 8L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("remove"))) + Property.TOKEN_VALUE to ion.newSymbol("remove") + ) + ) @Test fun nestedInsertInto() = checkInputThrowingParserException( @@ -1687,7 +2139,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 23L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("insert_into"))) + Property.TOKEN_VALUE to ion.newSymbol("insert_into") + ) + ) @Test fun selectAndRemove() = checkInputThrowingParserException( @@ -1697,7 +2151,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 8L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("remove"))) + Property.TOKEN_VALUE to ion.newSymbol("remove") + ) + ) @Test fun selectAndRemove2() = checkInputThrowingParserException( @@ -1707,7 +2163,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 15L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("remove"))) + Property.TOKEN_VALUE to ion.newSymbol("remove") + ) + ) @Test fun updateWithDropIndex() = checkInputThrowingParserException( @@ -1717,7 +2175,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 21L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("drop"))) + Property.TOKEN_VALUE to ion.newSymbol("drop") + ) + ) @Test fun updateFromList() = checkInputThrowingParserException( @@ -1727,89 +2187,117 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 9L, Property.TOKEN_TYPE to TokenType.COMMA, - Property.TOKEN_VALUE to ion.newSymbol(","))) + Property.TOKEN_VALUE to ion.newSymbol(",") + ) + ) @Test fun insertValueMissingReturning() = checkInputThrowingParserException( - "INSERT INTO foo VALUE 1 MODIFIED OLD foo", - ErrorCode.PARSE_UNEXPECTED_TOKEN, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 25L, - Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("modified_old"))) + "INSERT INTO foo VALUE 1 MODIFIED OLD foo", + ErrorCode.PARSE_UNEXPECTED_TOKEN, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 25L, + Property.TOKEN_TYPE to TokenType.KEYWORD, + Property.TOKEN_VALUE to ion.newSymbol("modified_old") + ) + ) @Test fun insertValueReturningMissingReturningElem() = checkInputThrowingParserException( - "INSERT INTO foo VALUE 1 RETURNING", - ErrorCode.PARSE_EXPECTED_RETURNING_CLAUSE, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 34L, - Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) + "INSERT INTO foo VALUE 1 RETURNING", + ErrorCode.PARSE_EXPECTED_RETURNING_CLAUSE, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 34L, + Property.TOKEN_TYPE to TokenType.EOF, + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) @Test fun insertValueReturningMissingReturningMapping() = checkInputThrowingParserException( - "INSERT INTO foo VALUE 1 RETURNING *", - ErrorCode.PARSE_EXPECTED_RETURNING_CLAUSE, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 35L, - Property.TOKEN_TYPE to TokenType.STAR, - Property.TOKEN_VALUE to ion.newSymbol("*"))) + "INSERT INTO foo VALUE 1 RETURNING *", + ErrorCode.PARSE_EXPECTED_RETURNING_CLAUSE, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 35L, + Property.TOKEN_TYPE to TokenType.STAR, + Property.TOKEN_VALUE to ion.newSymbol("*") + ) + ) @Test fun insertValueReturningMissingReturningColumn() = checkInputThrowingParserException( - "INSERT INTO foo VALUE 1 RETURNING MODIFIED OLD", - ErrorCode.PARSE_UNEXPECTED_TERM, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 47L, - Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) - + "INSERT INTO foo VALUE 1 RETURNING MODIFIED OLD", + ErrorCode.PARSE_UNEXPECTED_TERM, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 47L, + Property.TOKEN_TYPE to TokenType.EOF, + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) @Test fun insertValueMultiReturningMissingReturningColumn() = checkInputThrowingParserException( - "INSERT INTO foo VALUE 1 RETURNING MODIFIED OLD , ALL OLD *", - ErrorCode.PARSE_UNEXPECTED_TERM, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 48L, - Property.TOKEN_TYPE to TokenType.COMMA, - Property.TOKEN_VALUE to ion.newSymbol(","))) + "INSERT INTO foo VALUE 1 RETURNING MODIFIED OLD , ALL OLD *", + ErrorCode.PARSE_UNEXPECTED_TERM, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 48L, + Property.TOKEN_TYPE to TokenType.COMMA, + Property.TOKEN_VALUE to ion.newSymbol(",") + ) + ) @Test fun insertValueMisSpellReturning() = checkInputThrowingParserException( - "INSERT INTO foo VALUE 1 RETURING MODIFIED OLD foo", - ErrorCode.PARSE_UNEXPECTED_TOKEN, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 25L, - Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.TOKEN_VALUE to ion.newSymbol("RETURING"))) + "INSERT INTO foo VALUE 1 RETURING MODIFIED OLD foo", + ErrorCode.PARSE_UNEXPECTED_TOKEN, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 25L, + Property.TOKEN_TYPE to TokenType.IDENTIFIER, + Property.TOKEN_VALUE to ion.newSymbol("RETURING") + ) + ) @Test fun insertValueReturningInvalidReturningMapping() = checkInputThrowingParserException( - "INSERT INTO foo VALUE 1 RETURNING UPDATED OLD foo", - ErrorCode.PARSE_EXPECTED_RETURNING_CLAUSE, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 35L, - Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.TOKEN_VALUE to ion.newSymbol("UPDATED"))) + "INSERT INTO foo VALUE 1 RETURNING UPDATED OLD foo", + ErrorCode.PARSE_EXPECTED_RETURNING_CLAUSE, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 35L, + Property.TOKEN_TYPE to TokenType.IDENTIFIER, + Property.TOKEN_VALUE to ion.newSymbol("UPDATED") + ) + ) @Test fun insertValueReturningInvalidReturningColumn() = checkInputThrowingParserException( - "INSERT INTO foo VALUE 1 RETURNING MODIFIED OLD ;", - ErrorCode.PARSE_UNEXPECTED_TERM, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 48L, - Property.TOKEN_TYPE to TokenType.SEMICOLON, - Property.TOKEN_VALUE to ion.newSymbol(";"))) + "INSERT INTO foo VALUE 1 RETURNING MODIFIED OLD ;", + ErrorCode.PARSE_UNEXPECTED_TERM, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 48L, + Property.TOKEN_TYPE to TokenType.SEMICOLON, + Property.TOKEN_VALUE to ion.newSymbol(";") + ) + ) @Test fun insertValueReturningMultipleReturningColumn() = checkInputThrowingParserException( - "INSERT INTO foo VALUE 1 RETURNING MODIFIED OLD a,b", - ErrorCode.PARSE_EXPECTED_RETURNING_CLAUSE, - mapOf(Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 50L, - Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.TOKEN_VALUE to ion.newSymbol("b"))) + "INSERT INTO foo VALUE 1 RETURNING MODIFIED OLD a,b", + ErrorCode.PARSE_EXPECTED_RETURNING_CLAUSE, + mapOf( + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 50L, + Property.TOKEN_TYPE to TokenType.IDENTIFIER, + Property.TOKEN_VALUE to ion.newSymbol("b") + ) + ) @Test fun createTableWithKeyword() = checkInputThrowingParserException( @@ -1819,7 +2307,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 14L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("select"))) + Property.TOKEN_VALUE to ion.newSymbol("select") + ) + ) @Test fun createForUnsupportedObject() = checkInputThrowingParserException( @@ -1829,7 +2319,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 8L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("view"))) + Property.TOKEN_VALUE to ion.newSymbol("view") + ) + ) @Test fun createTableWithNoIdentifier() = checkInputThrowingParserException( @@ -1839,7 +2331,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 13L, Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) @Test fun createTableWithOperatorAfterIdentifier() = checkInputThrowingParserException( @@ -1849,7 +2343,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 17L, Property.TOKEN_TYPE to TokenType.OPERATOR, - Property.TOKEN_VALUE to ion.newSymbol("-"))) + Property.TOKEN_VALUE to ion.newSymbol("-") + ) + ) @Test fun nestedCreateTable() = checkInputThrowingParserException( @@ -1859,7 +2355,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 14L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("create"))) + Property.TOKEN_VALUE to ion.newSymbol("create") + ) + ) @Test fun dropTableWithOperatorAfterIdentifier() = checkInputThrowingParserException( @@ -1869,7 +2367,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 15L, Property.TOKEN_TYPE to TokenType.OPERATOR, - Property.TOKEN_VALUE to ion.newSymbol("+"))) + Property.TOKEN_VALUE to ion.newSymbol("+") + ) + ) @Test fun createIndexWithoutAnythingElse() = checkInputThrowingParserException( @@ -1879,7 +2379,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 13L, Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) @Test fun createIndexWithName() = checkInputThrowingParserException( @@ -1889,7 +2391,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 14L, Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.TOKEN_VALUE to ion.newSymbol("foo_index"))) + Property.TOKEN_VALUE to ion.newSymbol("foo_index") + ) + ) @Test fun createIndexNoNameNoTarget() = checkInputThrowingParserException( @@ -1899,7 +2403,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 17L, Property.TOKEN_TYPE to TokenType.LEFT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol("("))) + Property.TOKEN_VALUE to ion.newSymbol("(") + ) + ) @Test fun createIndexNoNameNoKeyParenthesis() = checkInputThrowingParserException( @@ -1909,7 +2415,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 21L, Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.TOKEN_VALUE to ion.newSymbol("bar"))) + Property.TOKEN_VALUE to ion.newSymbol("bar") + ) + ) @Test fun createIndexNoNameKeyExpression() = checkInputThrowingParserException( @@ -1919,7 +2427,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 22L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(1))) + Property.TOKEN_VALUE to ion.newInt(1) + ) + ) @Test fun createIndexWithOperatorAtTail() = checkInputThrowingParserException( @@ -1929,7 +2439,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 27L, Property.TOKEN_TYPE to TokenType.OPERATOR, - Property.TOKEN_VALUE to ion.newSymbol("+"))) + Property.TOKEN_VALUE to ion.newSymbol("+") + ) + ) @Test fun createIndexNoNameKeyWildcardPath() = checkInputThrowingParserException( @@ -1939,7 +2451,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 24L, Property.TOKEN_TYPE to TokenType.STAR, - Property.TOKEN_VALUE to ion.newSymbol("*"))) + Property.TOKEN_VALUE to ion.newSymbol("*") + ) + ) @Test fun createIndexNoNameKeyExpressionPath() = checkInputThrowingParserException( @@ -1949,7 +2463,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 24L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(1))) + Property.TOKEN_VALUE to ion.newInt(1) + ) + ) @Test fun dropIndexWithoutAnythingElse() = checkInputThrowingParserException( @@ -1959,7 +2475,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 11L, Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) @Test fun dropIndexNoIdentifierNoTarget() = checkInputThrowingParserException( @@ -1969,7 +2487,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 12L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("on"))) + Property.TOKEN_VALUE to ion.newSymbol("on") + ) + ) @Test fun dropIndexMissingOnKeyWord() = checkInputThrowingParserException( @@ -1979,7 +2499,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 16L, Property.TOKEN_TYPE to TokenType.IDENTIFIER, - Property.TOKEN_VALUE to ion.newSymbol("foo"))) + Property.TOKEN_VALUE to ion.newSymbol("foo") + ) + ) @Test fun dropIndexWithExpression() = checkInputThrowingParserException( @@ -1989,7 +2511,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 12L, Property.TOKEN_TYPE to TokenType.LEFT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol("("))) + Property.TOKEN_VALUE to ion.newSymbol("(") + ) + ) @Test fun dropIndexWithParenthesisAtTail() = checkInputThrowingParserException( @@ -1999,7 +2523,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 23L, Property.TOKEN_TYPE to TokenType.LEFT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol("("))) + Property.TOKEN_VALUE to ion.newSymbol("(") + ) + ) @Test fun dropIndexWithOperatorAtTail() = checkInputThrowingParserException( @@ -2009,7 +2535,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 23L, Property.TOKEN_TYPE to TokenType.OPERATOR, - Property.TOKEN_VALUE to ion.newSymbol("+"))) + Property.TOKEN_VALUE to ion.newSymbol("+") + ) + ) @Test fun insertValueWithCollection() = checkInputThrowingParserException( @@ -2019,7 +2547,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 27L, Property.TOKEN_TYPE to TokenType.COMMA, - Property.TOKEN_VALUE to ion.newSymbol(","))) + Property.TOKEN_VALUE to ion.newSymbol(",") + ) + ) @Test fun insertValuesWithAt() = checkInputThrowingParserException( @@ -2029,7 +2559,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 31L, Property.TOKEN_TYPE to TokenType.AT, - Property.TOKEN_VALUE to ion.newSymbol("at"))) + Property.TOKEN_VALUE to ion.newSymbol("at") + ) + ) @Test fun valueAsTopLevelExpression() = checkInputThrowingParserException( @@ -2039,7 +2571,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 1L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("value"))) + Property.TOKEN_VALUE to ion.newSymbol("value") + ) + ) @Test fun innerCrossJoinWithOnCondition() = checkInputThrowingParserException( @@ -2049,7 +2583,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 40L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("on"))) + Property.TOKEN_VALUE to ion.newSymbol("on") + ) + ) @Test fun leftCrossJoinWithOnCondition() = checkInputThrowingParserException( @@ -2059,7 +2595,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 39L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("on"))) + Property.TOKEN_VALUE to ion.newSymbol("on") + ) + ) @Test fun rightCrossJoinWithOnCondition() = checkInputThrowingParserException( @@ -2069,7 +2607,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 40L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("on"))) + Property.TOKEN_VALUE to ion.newSymbol("on") + ) + ) @Test fun innerJoinWithOutOnCondition() = checkInputThrowingParserException( @@ -2079,7 +2619,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 33L, Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) @Test fun leftJoinWithOutOnCondition() = checkInputThrowingParserException( @@ -2089,7 +2631,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 32L, Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) @Test fun rightJoinWithOutOnCondition() = checkInputThrowingParserException( @@ -2099,7 +2643,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 33L, Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) @Test fun parenJoinWithoutOnClause() = checkInputThrowingParserException( @@ -2109,11 +2655,13 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 58L, Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) - //**************************************** + // **************************************** // EXEC clause parsing errors - //**************************************** + // **************************************** @Test fun execNoStoredProcedureProvided() = checkInputThrowingParserException( @@ -2123,7 +2671,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 5L, Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) @Test fun execCommaBetweenStoredProcedureAndArg() = checkInputThrowingParserException( @@ -2133,7 +2683,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 9L, Property.TOKEN_TYPE to TokenType.COMMA, - Property.TOKEN_VALUE to ion.newSymbol(","))) + Property.TOKEN_VALUE to ion.newSymbol(",") + ) + ) @Test fun execArgTrailingComma() = checkInputThrowingParserException( @@ -2143,7 +2695,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 21L, Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) @Test fun execUnexpectedParen() = checkInputThrowingParserException( @@ -2153,7 +2707,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 9L, Property.TOKEN_TYPE to TokenType.LEFT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol("("))) + Property.TOKEN_VALUE to ion.newSymbol("(") + ) + ) @Test fun execUnexpectedParenWithArgs() = checkInputThrowingParserException( @@ -2163,7 +2719,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 9L, Property.TOKEN_TYPE to TokenType.LEFT_PAREN, - Property.TOKEN_VALUE to ion.newSymbol("("))) + Property.TOKEN_VALUE to ion.newSymbol("(") + ) + ) @Test fun execAtUnexpectedLocation() = checkInputThrowingParserException( @@ -2173,7 +2731,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("exec"))) + Property.TOKEN_VALUE to ion.newSymbol("exec") + ) + ) @Test fun execAtUnexpectedLocationAfterExec() = checkInputThrowingParserException( @@ -2183,7 +2743,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 10L, Property.TOKEN_TYPE to TokenType.KEYWORD, - Property.TOKEN_VALUE to ion.newSymbol("exec"))) + Property.TOKEN_VALUE to ion.newSymbol("exec") + ) + ) @Test fun missingDateString() = checkInputThrowingParserException( @@ -2193,7 +2755,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 5L, Property.TOKEN_TYPE to TokenType.EOF, - Property.TOKEN_VALUE to ion.newSymbol("EOF"))) + Property.TOKEN_VALUE to ion.newSymbol("EOF") + ) + ) @Test fun invalidTypeIntForDateString() = checkInputThrowingParserException( @@ -2203,7 +2767,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(2012))) + Property.TOKEN_VALUE to ion.newInt(2012) + ) + ) @Test fun invalidTypeIntForDateString2() = checkInputThrowingParserException( @@ -2213,7 +2779,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newInt(2012))) + Property.TOKEN_VALUE to ion.newInt(2012) + ) + ) @Test fun invalidTypeTimestampForDateString() = checkInputThrowingParserException( @@ -2223,7 +2791,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.ION_LITERAL, - Property.TOKEN_VALUE to ion.newTimestamp(Timestamp.forDay(2012, 8, 28)))) + Property.TOKEN_VALUE to ion.newTimestamp(Timestamp.forDay(2012, 8, 28)) + ) + ) @Test fun invalidDateStringFormat() = checkInputThrowingParserException( @@ -2233,7 +2803,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("date_string"))) + Property.TOKEN_VALUE to ion.newString("date_string") + ) + ) @Test fun invalidDateStringFormatMissingDashes() = checkInputThrowingParserException( @@ -2243,7 +2815,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("20210310"))) + Property.TOKEN_VALUE to ion.newString("20210310") + ) + ) @Test fun invalidDateStringFormatUnexpectedColons() = checkInputThrowingParserException( @@ -2253,7 +2827,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("2021:03:10"))) + Property.TOKEN_VALUE to ion.newString("2021:03:10") + ) + ) @Test fun invalidDateStringFormatInvalidDate() = checkInputThrowingParserException( @@ -2263,7 +2839,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("2021-02-29"))) + Property.TOKEN_VALUE to ion.newString("2021-02-29") + ) + ) @Test fun invalidDateStringFormatMMDDYYYY() = checkInputThrowingParserException( @@ -2273,7 +2851,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("03-10-2021"))) + Property.TOKEN_VALUE to ion.newString("03-10-2021") + ) + ) @Test fun invalidDateStringFormatDDMMYYYY() = checkInputThrowingParserException( @@ -2283,7 +2863,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("10-03-2021"))) + Property.TOKEN_VALUE to ion.newString("10-03-2021") + ) + ) @Test fun invalidExtendedDateString() = checkInputThrowingParserException( @@ -2293,7 +2875,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("+99999-03-10"))) + Property.TOKEN_VALUE to ion.newString("+99999-03-10") + ) + ) @Test fun invalidDateStringNegativeYear() = checkInputThrowingParserException( @@ -2303,7 +2887,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("-9999-03-10"))) + Property.TOKEN_VALUE to ion.newString("-9999-03-10") + ) + ) @Test fun invalidDateStringPositiveYear() = checkInputThrowingParserException( @@ -2313,7 +2899,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("+9999-03-10"))) + Property.TOKEN_VALUE to ion.newString("+9999-03-10") + ) + ) @Test fun invalidDateStringNegativeMonth() = checkInputThrowingParserException( @@ -2323,7 +2911,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("2021--03-10"))) + Property.TOKEN_VALUE to ion.newString("2021--03-10") + ) + ) @Test fun invalidDateStringPositiveMonth() = checkInputThrowingParserException( @@ -2333,7 +2923,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("2021-+03-10"))) + Property.TOKEN_VALUE to ion.newString("2021-+03-10") + ) + ) @Test fun invalidDateStringNegativeDay() = checkInputThrowingParserException( @@ -2343,7 +2935,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("2021-03--10"))) + Property.TOKEN_VALUE to ion.newString("2021-03--10") + ) + ) @Test fun invalidDateStringPositiveDay() = checkInputThrowingParserException( @@ -2353,7 +2947,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("2021-03-+10"))) + Property.TOKEN_VALUE to ion.newString("2021-03-+10") + ) + ) @Test fun invalidDateStringMonthOutOfRange() = checkInputThrowingParserException( @@ -2363,7 +2959,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("9999-300000000-10"))) + Property.TOKEN_VALUE to ion.newString("9999-300000000-10") + ) + ) @Test fun invalidDateStringDayOutOfRangeForOct() = checkInputThrowingParserException( @@ -2373,7 +2971,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("1999-10-32"))) + Property.TOKEN_VALUE to ion.newString("1999-10-32") + ) + ) @Test fun invalidDateStringDayOutOfRangeForNov() = checkInputThrowingParserException( @@ -2383,7 +2983,9 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("1999-11-31"))) + Property.TOKEN_VALUE to ion.newString("1999-11-31") + ) + ) @Test fun invalidDateStringDayPaddedZeroMissingFromMonth() = checkInputThrowingParserException( @@ -2393,6 +2995,7 @@ class ParserErrorsTest : SqlParserTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.TOKEN_TYPE to TokenType.LITERAL, - Property.TOKEN_VALUE to ion.newString("1999-1-31"))) - + Property.TOKEN_VALUE to ion.newString("1999-1-31") + ) + ) } diff --git a/lang/test/org/partiql/lang/errors/PropertyValueMapTest.kt b/lang/test/org/partiql/lang/errors/PropertyValueMapTest.kt index 274a1d475a..a3e8c86896 100644 --- a/lang/test/org/partiql/lang/errors/PropertyValueMapTest.kt +++ b/lang/test/org/partiql/lang/errors/PropertyValueMapTest.kt @@ -14,11 +14,10 @@ package org.partiql.lang.errors -import org.partiql.lang.syntax.TokenType import org.junit.Before import org.junit.Test import org.partiql.lang.TestBase - +import org.partiql.lang.syntax.TokenType class PropertyValueMapTest : TestBase() { @@ -44,7 +43,6 @@ class PropertyValueMapTest : TestBase() { assertNull(onlyColumnValueMap[Property.LINE_NUMBER]) } - @Test fun getValues() { assertEquals(11L, oneOfEachType[Property.COLUMN_NUMBER]?.longValue()) assertEquals(TokenType.COMMA, oneOfEachType[Property.EXPECTED_TOKEN_TYPE]?.tokenTypeValue()) @@ -52,5 +50,4 @@ class PropertyValueMapTest : TestBase() { assertEquals(1, oneOfEachType[Property.EXPECTED_ARITY_MAX]?.integerValue()) assertEquals(11L, oneOfEachType[Property.COLUMN_NUMBER]?.longValue()) } - -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/errors/SqlExceptionTest.kt b/lang/test/org/partiql/lang/errors/SqlExceptionTest.kt index bb449303a3..913645b5a5 100644 --- a/lang/test/org/partiql/lang/errors/SqlExceptionTest.kt +++ b/lang/test/org/partiql/lang/errors/SqlExceptionTest.kt @@ -65,4 +65,4 @@ class SqlExceptionTest : TestBase() { assertEquals("$prefix Unexpected token\n\tLexer Error: at line 20, column 10: invalid character at, c\n", ex.toString()) assertEquals("$prefix Unexpected token\n\tLexer Error: at line 20, column 10: invalid character at, c\n", ex.toString()) } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/BindingsTest.kt b/lang/test/org/partiql/lang/eval/BindingsTest.kt index 9fbd84bdea..fd775bcb06 100644 --- a/lang/test/org/partiql/lang/eval/BindingsTest.kt +++ b/lang/test/org/partiql/lang/eval/BindingsTest.kt @@ -23,9 +23,11 @@ class BindingsTest : TestBase() { fun bind(text: String): Bindings = valueFactory.newFromIonText(text).bindings - fun over(text: String, - bindingsTransform: Bindings.() -> Bindings, - block: AssertExprValue.() -> Unit) = + fun over( + text: String, + bindingsTransform: Bindings.() -> Bindings, + block: AssertExprValue.() -> Unit + ) = AssertExprValue( valueFactory.newFromIonText(text), bindingsTransform @@ -66,7 +68,6 @@ class BindingsTest : TestBase() { fun lookupSensitive(name: String) = testBindings[BindingName(name, BindingCase.SENSITIVE)]!!.scalar.numberValue()!!.toInt() - fun lookupInsensitive(name: String) = testBindings[BindingName(name, BindingCase.INSENSITIVE)]!!.scalar.numberValue()!!.toInt() @@ -94,7 +95,6 @@ class BindingsTest : TestBase() { kotlin.test.assertEquals(0, bAtEvaluateCount, "bAt should not yet be evaluated") kotlin.test.assertEquals(0, BaTEvaluateCount, "BaT should not yet be evaluated") - // Multiple case-sensitive lookups of bAt should cause it to only be evaluated once assertEquals(30, lookupSensitive("bAt")) assertEquals(30, lookupSensitive("bAt")) @@ -124,15 +124,18 @@ class BindingsTest : TestBase() { add("valueThatExists", ion.newInt(1)) add("duplicateFieldName", ion.newInt(1)) add("duplicateFieldName", ion.newInt(2)) - }, valueFactory) - + }, + valueFactory + ) private val bindingForCaseInsensitiveTests = Bindings.ofIonStruct( ion.newEmptyStruct().apply { add("valueThatExists", ion.newInt(1)) add("ambiguousFieldName", ion.newInt(1)) add("AmbiguousFieldName", ion.newInt(2)) - }, valueFactory ) + }, + valueFactory + ) @Test fun BindingsOfIonStruct_caseSensitiveNotFound() = @@ -142,18 +145,18 @@ class BindingsTest : TestBase() { fun BindingsOfIonStruct_caseSensitiveFound() = assertEquals( ion.newInt(1), - bindingForCaseSensitiveTests[BindingName("valueThatExists", BindingCase.SENSITIVE)]?.ionValue) + bindingForCaseSensitiveTests[BindingName("valueThatExists", BindingCase.SENSITIVE)]?.ionValue + ) @Test fun BindingsOfIonStruct_caseSensitiveAmbiguous() = try { bindingForCaseSensitiveTests[BindingName("duplicateFieldName", BindingCase.SENSITIVE)] fail("Didn't throw") - } catch(ex: EvaluationException) { + } catch (ex: EvaluationException) { assertEquals(ErrorCode.EVALUATOR_AMBIGUOUS_BINDING, ex.errorCode) } - @Test fun BindingsOfIonStruct_caseInsensitiveNotFound() = assertNull(bindingForCaseInsensitiveTests[BindingName("doesnt_exist", BindingCase.INSENSITIVE)]) @@ -162,14 +165,15 @@ class BindingsTest : TestBase() { fun BindingsOfIonStruct_caseInsensitiveFound() = assertEquals( ion.newInt(1), - bindingForCaseInsensitiveTests[BindingName("valueThatExists", BindingCase.INSENSITIVE)]?.ionValue) + bindingForCaseInsensitiveTests[BindingName("valueThatExists", BindingCase.INSENSITIVE)]?.ionValue + ) @Test fun BindingsOfIonStruct_caseInsensitiveAmbiguous() = try { bindingForCaseInsensitiveTests[BindingName("AMBIGUOUSFIELDNAME", BindingCase.INSENSITIVE)] fail("Didn't throw") - } catch(ex: EvaluationException) { + } catch (ex: EvaluationException) { assertEquals(ErrorCode.EVALUATOR_AMBIGUOUS_BINDING, ex.errorCode) } } diff --git a/lang/test/org/partiql/lang/eval/CoalesceEvaluationTest.kt b/lang/test/org/partiql/lang/eval/CoalesceEvaluationTest.kt index 6383d049f0..c79a1f915c 100644 --- a/lang/test/org/partiql/lang/eval/CoalesceEvaluationTest.kt +++ b/lang/test/org/partiql/lang/eval/CoalesceEvaluationTest.kt @@ -27,7 +27,8 @@ class CoalesceEvaluationTest : EvaluatorTestBase() { data class CoalesceTestCase( val args: List, val expectedLegacyResult: String, - val expectedPermissiveResult: String = expectedLegacyResult) + val expectedPermissiveResult: String = expectedLegacyResult + ) @ParameterizedTest @MethodSource("coalesceEvaluationTests") @@ -35,48 +36,78 @@ class CoalesceEvaluationTest : EvaluatorTestBase() { assertEvalExprValue( "coalesce(${tc.args.joinToString(",")})", expectedLegacyModeResult = tc.expectedLegacyResult, - expectedPermissiveModeResult = tc.expectedPermissiveResult) + expectedPermissiveModeResult = tc.expectedPermissiveResult + ) companion object { fun testCase( vararg args: String, expectedLegacyResult: String, - expectedPermissiveResult: String = expectedLegacyResult) = + expectedPermissiveResult: String = expectedLegacyResult + ) = CoalesceTestCase(args.toList(), expectedLegacyResult, expectedPermissiveResult) @JvmStatic @Suppress("unused") fun coalesceEvaluationTests() = listOf( - testCase("null", - expectedLegacyResult = "null"), - testCase("null", "null", - expectedLegacyResult = "null"), - testCase("missing", "null", "missing", - expectedLegacyResult = "null"), - testCase("null", "missing", - expectedLegacyResult = "null"), - testCase("missing", + testCase( + "null", + expectedLegacyResult = "null" + ), + testCase( + "null", "null", + expectedLegacyResult = "null" + ), + testCase( + "missing", "null", "missing", + expectedLegacyResult = "null" + ), + testCase( + "null", "missing", + expectedLegacyResult = "null" + ), + testCase( + "missing", expectedLegacyResult = "null", - expectedPermissiveResult = "missing"), - testCase("missing", "missing", + expectedPermissiveResult = "missing" + ), + testCase( + "missing", "missing", expectedLegacyResult = "null", - expectedPermissiveResult = "missing"), - testCase("1", "null", - expectedLegacyResult = "1"), - testCase("null", "2", - expectedLegacyResult = "2"), - testCase("1", "missing", - expectedLegacyResult = "1"), - testCase("missing", "2", - expectedLegacyResult = "2"), - testCase("null", "missing", "null", "null", "2", "3", "4", "5", - expectedLegacyResult = "2"), - testCase("null", "null", "2", "3", "4", "5", - expectedLegacyResult = "2"), - testCase("missing", "missing", "2", "3", "4", "5", - expectedLegacyResult = "2"), - testCase("null", "missing", "null", "null", "2 in [1,2,3]", "3", "4", "5", - expectedLegacyResult = "true") + expectedPermissiveResult = "missing" + ), + testCase( + "1", "null", + expectedLegacyResult = "1" + ), + testCase( + "null", "2", + expectedLegacyResult = "2" + ), + testCase( + "1", "missing", + expectedLegacyResult = "1" + ), + testCase( + "missing", "2", + expectedLegacyResult = "2" + ), + testCase( + "null", "missing", "null", "null", "2", "3", "4", "5", + expectedLegacyResult = "2" + ), + testCase( + "null", "null", "2", "3", "4", "5", + expectedLegacyResult = "2" + ), + testCase( + "missing", "missing", "2", "3", "4", "5", + expectedLegacyResult = "2" + ), + testCase( + "null", "missing", "null", "null", "2 in [1,2,3]", "3", "4", "5", + expectedLegacyResult = "true" + ) ) } } diff --git a/lang/test/org/partiql/lang/eval/CompOptions.kt b/lang/test/org/partiql/lang/eval/CompOptions.kt index c1fb58e27b..07d7cefd91 100644 --- a/lang/test/org/partiql/lang/eval/CompOptions.kt +++ b/lang/test/org/partiql/lang/eval/CompOptions.kt @@ -18,26 +18,36 @@ package org.partiql.lang.eval enum class CompOptions(val options: CompileOptions) { STANDARD(CompileOptions.standard()), - UNDEF_VAR_MISSING(CompileOptions.build { - undefinedVariable(UndefinedVariableBehavior.MISSING) - }), - - PROJECT_UNFILTERED_UNDEF_VAR_MISSING(CompileOptions.build { - projectionIteration(ProjectionIterationBehavior.UNFILTERED) - undefinedVariable(UndefinedVariableBehavior.MISSING) - }), - - PROJECT_UNFILTERED(CompileOptions.build { - projectionIteration(ProjectionIterationBehavior.UNFILTERED) - }), - - TYPED_OP_BEHAVIOR_HONOR_PARAMS(CompileOptions.build { - typedOpBehavior(TypedOpBehavior.HONOR_PARAMETERS) - }), - - PERMISSIVE(CompileOptions.build { - this.typingMode(TypingMode.PERMISSIVE) - }); + UNDEF_VAR_MISSING( + CompileOptions.build { + undefinedVariable(UndefinedVariableBehavior.MISSING) + } + ), + + PROJECT_UNFILTERED_UNDEF_VAR_MISSING( + CompileOptions.build { + projectionIteration(ProjectionIterationBehavior.UNFILTERED) + undefinedVariable(UndefinedVariableBehavior.MISSING) + } + ), + + PROJECT_UNFILTERED( + CompileOptions.build { + projectionIteration(ProjectionIterationBehavior.UNFILTERED) + } + ), + + TYPED_OP_BEHAVIOR_HONOR_PARAMS( + CompileOptions.build { + typedOpBehavior(TypedOpBehavior.HONOR_PARAMETERS) + } + ), + + PERMISSIVE( + CompileOptions.build { + this.typingMode(TypingMode.PERMISSIVE) + } + ); companion object { /** Only those options from [CompOptions] which have [UndefinedVariableBehavior.MISSING]. */ @@ -46,4 +56,4 @@ enum class CompOptions(val options: CompileOptions) { /** Only those options from [CompOptions] which have [ProjectionIterationBehavior.UNFILTERED] set. */ val onlyProjectIterationBehaviorFilterMissing = listOf(STANDARD, UNDEF_VAR_MISSING) } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/ErrorSignalerTests.kt b/lang/test/org/partiql/lang/eval/ErrorSignalerTests.kt index c73fd31d84..f3de26e967 100644 --- a/lang/test/org/partiql/lang/eval/ErrorSignalerTests.kt +++ b/lang/test/org/partiql/lang/eval/ErrorSignalerTests.kt @@ -33,7 +33,7 @@ class ErrorSignalerTests { val ex = try { runTest(b, 6) fail("Didn't throw") - } catch(ex: EvaluationException) { + } catch (ex: EvaluationException) { ex } assertEquals(ex.errorCode, ErrorCode.EVALUATOR_CAST_FAILED) @@ -47,7 +47,6 @@ class ErrorSignalerTests { // The choice of ErrorCode.EVALUATOR_CAST_FAILED is arbitrary just for this test. ErrorCode.EVALUATOR_CAST_FAILED, { ErrorDetails(dummyMetas, "The value can't be 6") }, - { valueFactory.newInt(value * 10) }) - + { valueFactory.newInt(value * 10) } + ) } - diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerCustomTypeCastTests.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerCustomTypeCastTests.kt index 63a346f4d2..cf3503fecd 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerCustomTypeCastTests.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerCustomTypeCastTests.kt @@ -9,7 +9,6 @@ import org.partiql.lang.util.honorTypedOpParameters import org.partiql.lang.util.legacyTypingMode import org.partiql.lang.util.permissiveTypingMode - class EvaluatingCompilerCustomTypeCastTests : CastTestBase() { companion object { @@ -170,17 +169,19 @@ class EvaluatingCompilerCustomTypeCastTests : CastTestBase() { ).types(listOf("RS_DOUBLE_PRECISION", "RS_FLOAT", "RS_FLOAT8", "SPARK_DOUBLE")) ).flatten() - private val customTypeCastConfiguredTestCases = (customTypeCases.map { case -> - ConfiguredCastCase(case, "HONOR_PARAM_CAST, LEGACY_TYPING_MODE") { - honorTypedOpParameters() - legacyTypingMode() - } - } + customTypeCases.toPermissive().map { case -> - ConfiguredCastCase(case, "HONOR_PARAM_CAST, PERMISSIVE_TYPING_MODE") { - honorTypedOpParameters() - permissiveTypingMode() + private val customTypeCastConfiguredTestCases = ( + customTypeCases.map { case -> + ConfiguredCastCase(case, "HONOR_PARAM_CAST, LEGACY_TYPING_MODE") { + honorTypedOpParameters() + legacyTypingMode() + } + } + customTypeCases.toPermissive().map { case -> + ConfiguredCastCase(case, "HONOR_PARAM_CAST, PERMISSIVE_TYPING_MODE") { + honorTypedOpParameters() + permissiveTypingMode() + } } - }).map { + ).map { it.copy( configurePipeline = { customDataTypes(CUSTOM_TEST_TYPES) @@ -193,8 +194,7 @@ class EvaluatingCompilerCustomTypeCastTests : CastTestBase() { override fun getParameters() = customTypeCastConfiguredTestCases } - @ParameterizedTest @ArgumentsSource(ConfiguredCastArguments::class) fun configuredCast(configuredCastCase: CastTestBase.ConfiguredCastCase) = configuredCastCase.assertCase() -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerDateTimeTests.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerDateTimeTests.kt index 64d2585eae..b19d9c876e 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerDateTimeTests.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerDateTimeTests.kt @@ -22,7 +22,7 @@ class EvaluatingCompilerDateTimeTests : EvaluatorTestBase() { @ParameterizedTest @ArgumentsSource(ArgumentsForDateLiterals::class) - fun testDate(tc: EvaluatorTestCase) { + fun testDate(tc: EvaluatorTestCase) { val originalExprValue = eval(tc.sqlUnderTest) assertEquals(originalExprValue.toString(), tc.expectedSql) if (originalExprValue.type == ExprValueType.DATE) { @@ -62,7 +62,7 @@ class EvaluatingCompilerDateTimeTests : EvaluatorTestBase() { @ParameterizedTest @ArgumentsSource(ArgumentsForTimeLiterals::class) - fun testTime(tc: TimeTestCase) { + fun testTime(tc: TimeTestCase) { val originalExprValue = eval(source = tc.query, compileOptions = tc.compileOptions) assertEquals(tc.expected, originalExprValue.toString()) if (originalExprValue.type == ExprValueType.TIME) { @@ -99,8 +99,8 @@ class EvaluatingCompilerDateTimeTests : EvaluatorTestBase() { case("TIME (3) '12:24:12.12300'", "12:24:12.123", TimeForValidation(12, 24, 12, 123000000, 3)), case("TIME (4) '12:24:12.12300'", "12:24:12.1230", TimeForValidation(12, 24, 12, 123000000, 4)), case("TIME (4) '12:24:12.123'", "12:24:12.1230", TimeForValidation(12, 24, 12, 123000000, 4)), - case("TIME (0) '12:59:59.9'", "13:00:00", TimeForValidation(13, 0,0, 0, 0)), - case("TIME WITH TIME ZONE '00:00:00'", "00:00:00${defaultTimezoneOffset.getOffsetHHmm()}", TimeForValidation(0,0,0,0,0, defaultTzMinutes)), + case("TIME (0) '12:59:59.9'", "13:00:00", TimeForValidation(13, 0, 0, 0, 0)), + case("TIME WITH TIME ZONE '00:00:00'", "00:00:00${defaultTimezoneOffset.getOffsetHHmm()}", TimeForValidation(0, 0, 0, 0, 0, defaultTzMinutes)), case("TIME (2) WITH TIME ZONE '12:24:12.123'", "12:24:12.12${defaultTimezoneOffset.getOffsetHHmm()}", TimeForValidation(12, 24, 12, 120000000, 2, defaultTzMinutes)), case("TIME WITH TIME ZONE '12:24:12.12300'", "12:24:12.12300${defaultTimezoneOffset.getOffsetHHmm()}", TimeForValidation(12, 24, 12, 123000000, 5, defaultTzMinutes)), case("TIME (3) WITH TIME ZONE '12:24:12.12300'", "12:24:12.123${defaultTimezoneOffset.getOffsetHHmm()}", TimeForValidation(12, 24, 12, 123000000, 3, defaultTzMinutes)), @@ -117,18 +117,18 @@ class EvaluatingCompilerDateTimeTests : EvaluatorTestBase() { case("TIME WITH TIME ZONE '12:25:12.123456' IS TIME", "true"), case("TIME (2) WITH TIME ZONE '01:01:12' IS TIME", "true"), case("'01:01:12' IS TIME", "false"), - case("TIME WITH TIME ZONE '00:00:00'", "00:00:00-01:00", TimeForValidation(0,0,0,0,0, -60), buildCompileOptions(-1)), - case("TIME WITH TIME ZONE '11:23:45.678'", "11:23:45.678+06:00", TimeForValidation(11,23,45,678000000,3, 360), buildCompileOptions(6)), - case("TIME WITH TIME ZONE '11:23:45.678-05:30'", "11:23:45.678-05:30", TimeForValidation(11,23,45,678000000,3, -330), buildCompileOptions(6)), + case("TIME WITH TIME ZONE '00:00:00'", "00:00:00-01:00", TimeForValidation(0, 0, 0, 0, 0, -60), buildCompileOptions(-1)), + case("TIME WITH TIME ZONE '11:23:45.678'", "11:23:45.678+06:00", TimeForValidation(11, 23, 45, 678000000, 3, 360), buildCompileOptions(6)), + case("TIME WITH TIME ZONE '11:23:45.678-05:30'", "11:23:45.678-05:30", TimeForValidation(11, 23, 45, 678000000, 3, -330), buildCompileOptions(6)), case("TIME (2) WITH TIME ZONE '12:59:59.13456'", "12:59:59.13-05:30", TimeForValidation(12, 59, 59, 130000000, 2, -330), buildCompileOptions(-5, -30)) ) } private val randomGenerator = generateRandomSeed() - private fun generateRandomSeed() : Random { + private fun generateRandomSeed(): Random { val seed = Random.nextInt() - println("Randomly generated seed is ${seed}. Use this to reproduce failures in dev environment.") + println("Randomly generated seed is $seed. Use this to reproduce failures in dev environment.") return Random(seed) } @@ -141,7 +141,7 @@ class EvaluatingCompilerDateTimeTests : EvaluatorTestBase() { val tz_minutes: Int? = null ) { fun expectedTimeString(withTimeZone: Boolean): String { - val timezoneMinutes = when(withTimeZone) { + val timezoneMinutes = when (withTimeZone) { true -> tz_minutes ?: ZoneOffset.UTC.totalSeconds / SECONDS_PER_MINUTE else -> null } @@ -150,20 +150,21 @@ class EvaluatingCompilerDateTimeTests : EvaluatorTestBase() { override fun toString(): String { val hourStr = hour.toString().padStart(2, '0') - val minStr = minute.toString().padStart(2,'0') + val minStr = minute.toString().padStart(2, '0') val secStr = second.toString().padStart(2, '0') val nanoStr = nano.toString().padStart(9, '0') - val timezoneStr = tz_minutes?.let { "" + - (if (it >= 0) "+" else "-") + - (it.absoluteValue / 60).toString().padStart(2, '0') + - ":" + - (it.absoluteValue % 60).toString().padStart(2, '0') + val timezoneStr = tz_minutes?.let { + "" + + (if (it >= 0) "+" else "-") + + (it.absoluteValue / 60).toString().padStart(2, '0') + + ":" + + (it.absoluteValue % 60).toString().padStart(2, '0') } ?: "" return "$hourStr:$minStr:$secStr.$nanoStr$timezoneStr" } } - private fun Random.nextTime(withPrecision: Boolean = false, withTimezone: Boolean = false) : TimeForValidation { + private fun Random.nextTime(withPrecision: Boolean = false, withTimezone: Boolean = false): TimeForValidation { val hour = nextInt(24) val minute = nextInt(60) val second = nextInt(60) @@ -219,7 +220,7 @@ class EvaluatingCompilerDateTimeTests : EvaluatorTestBase() { @Test fun testRandomTimesWithPrecision() { - (RANDOM_TIMES_WITH_PRECISION + RANDOM_TIMES_WITH_PRECISION_AND_TIMEZONE).map { + (RANDOM_TIMES_WITH_PRECISION + RANDOM_TIMES_WITH_PRECISION_AND_TIMEZONE).map { val query = "TIME (${it.precision}) '$it'" val expected = it.expectedTimeString(withTimeZone = false) val actual = eval(query) @@ -229,7 +230,7 @@ class EvaluatingCompilerDateTimeTests : EvaluatorTestBase() { @Test fun testRandomTimesWithTimezone() { - (RANDOM_TIMES + RANDOM_TIMES_WITH_TIMEZONE).map { + (RANDOM_TIMES + RANDOM_TIMES_WITH_TIMEZONE).map { val query = "TIME WITH TIME ZONE '$it'" val expected = it.expectedTimeString(withTimeZone = true) val actual = eval(query) @@ -254,7 +255,7 @@ class EvaluatingCompilerDateTimeTests : EvaluatorTestBase() { try { voidEval(tc.query) fail("Expected ${tc.query} to throw an error") - } catch(e: EvaluationException) { + } catch (e: EvaluationException) { // EvaluationException is thrown as expected, do nothing. } false -> { @@ -299,4 +300,4 @@ class EvaluatingCompilerDateTimeTests : EvaluatorTestBase() { errorCase("TIME WITH TIME ZONE '12:12:13' > DATE '2012-02-29'") ) } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerExceptionsTest.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerExceptionsTest.kt index 3d48478e60..08d5c408da 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerExceptionsTest.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerExceptionsTest.kt @@ -43,18 +43,21 @@ class EvaluatingCompilerExceptionsTest : EvaluatorTestBase() { """CAST(12 AS FLOAT(1))""", ErrorCode.SEMANTIC_FLOAT_PRECISION_UNSUPPORTED, sourceLocationProperties(1, 13), - compOptions = CompOptions.TYPED_OP_BEHAVIOR_HONOR_PARAMS), + compOptions = CompOptions.TYPED_OP_BEHAVIOR_HONOR_PARAMS + ), EvaluatorErrorTestCase( """CAN_CAST(12 AS FLOAT(1))""", ErrorCode.SEMANTIC_FLOAT_PRECISION_UNSUPPORTED, sourceLocationProperties(1, 17), - compOptions = CompOptions.TYPED_OP_BEHAVIOR_HONOR_PARAMS), + compOptions = CompOptions.TYPED_OP_BEHAVIOR_HONOR_PARAMS + ), EvaluatorErrorTestCase( """12 IS FLOAT(1)""", ErrorCode.SEMANTIC_FLOAT_PRECISION_UNSUPPORTED, sourceLocationProperties(1, 8), - compOptions = CompOptions.TYPED_OP_BEHAVIOR_HONOR_PARAMS) - ) + compOptions = CompOptions.TYPED_OP_BEHAVIOR_HONOR_PARAMS + ) + ) } @Test @@ -69,7 +72,8 @@ class EvaluatingCompilerExceptionsTest : EvaluatorTestBase() { """, "Cannot compare values: 'APPLE', 2", NodeMetadata(4, 19), - "<<>>") + "<<>>" + ) @Test fun notBetweenIncompatiblePredicate() = assertThrows( @@ -80,70 +84,81 @@ class EvaluatingCompilerExceptionsTest : EvaluatorTestBase() { """, "Cannot compare values: 'APPLE', 1", NodeMetadata(4, 19), - "<<>>") + "<<>>" + ) @Test fun shadowedVariables() = assertThrows( """SELECT VALUE a FROM `[{v:5}]` AS item, @item.v AS a, @item.v AS a""", "Multiple matches were found for the specified identifier", NodeMetadata(1, 14), - "<>") + "<>" + ) @Test fun topLevelCountStar() = assertThrows("""COUNT(*)""", "COUNT(*) is not allowed in this context", NodeMetadata(1, 1)) - @Test fun selectValueCountStar() = assertThrows( """SELECT VALUE COUNT(*) FROM numbers""", "COUNT(*) is not allowed in this context", - NodeMetadata(1, 14)) + NodeMetadata(1, 14) + ) @Test fun selectListNestedAggregateCall() = assertThrows( """SELECT SUM(AVG(n)) FROM <> AS n""", "The arguments of an aggregate function cannot contain aggregate functions", - NodeMetadata(1, 12)) + NodeMetadata(1, 12) + ) private val sqlWithUndefinedVariable = "SELECT VALUE y FROM << 'el1' >> AS x" @Test fun badAlias() { - //Note that the current default for CompileOptions.undefinedVariable is UndefinedVariableBehavior.ERROR + // Note that the current default for CompileOptions.undefinedVariable is UndefinedVariableBehavior.ERROR checkInputThrowingEvaluationException( sqlWithUndefinedVariable, ErrorCode.EVALUATOR_BINDING_DOES_NOT_EXIST, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 14L, - Property.BINDING_NAME to "y"), - expectedPermissiveModeResult = "<>") + Property.BINDING_NAME to "y" + ), + expectedPermissiveModeResult = "<>" + ) } @Test fun missingAlias() = - //Same query as previous test--but DO NOT throw exception this time because of UndefinedVariableBehavior.MISSING - assertEval(sqlWithUndefinedVariable, "[null]", - compileOptions = CompileOptions.build { undefinedVariable(UndefinedVariableBehavior.MISSING) }) + // Same query as previous test--but DO NOT throw exception this time because of UndefinedVariableBehavior.MISSING + assertEval( + sqlWithUndefinedVariable, "[null]", + compileOptions = CompileOptions.build { undefinedVariable(UndefinedVariableBehavior.MISSING) } + ) private val sqlWithUndefinedQuotedVariable = "SELECT VALUE \"y\" FROM << 'el1' >> AS x" @Test fun badQuotedAlias() { - //Note that the current default for CompileOptions.undefinedVariable is UndefinedVariableBehavior.ERROR + // Note that the current default for CompileOptions.undefinedVariable is UndefinedVariableBehavior.ERROR checkInputThrowingEvaluationException( sqlWithUndefinedQuotedVariable, ErrorCode.EVALUATOR_QUOTED_BINDING_DOES_NOT_EXIST, mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 14L, - Property.BINDING_NAME to "y"), - expectedPermissiveModeResult = "<>") + Property.BINDING_NAME to "y" + ), + expectedPermissiveModeResult = "<>" + ) } @Test fun missingQuotedAlias() = - //Same query as previous test--but DO NOT throw exception this time because of UndefinedVariableBehavior.MISSING - assertEval(sqlWithUndefinedQuotedVariable, "[null]", - compileOptions = CompileOptions.build { undefinedVariable(UndefinedVariableBehavior.MISSING) }) + // Same query as previous test--but DO NOT throw exception this time because of UndefinedVariableBehavior.MISSING + assertEval( + sqlWithUndefinedQuotedVariable, "[null]", + compileOptions = CompileOptions.build { undefinedVariable(UndefinedVariableBehavior.MISSING) } + ) @Test fun wrongArityExists() = assertThrows("exists()", "exists takes a single argument, received: 0", NodeMetadata(1, 1)) @@ -155,13 +170,15 @@ class EvaluatingCompilerExceptionsTest : EvaluatorTestBase() { fun rightJoin() = assertThrows( "SELECT * FROM animals AS a RIGHT CROSS JOIN animal_types AS a_type WHERE a.type = a_type.id", "RIGHT and FULL JOIN not supported", - NodeMetadata(1, 28)) + NodeMetadata(1, 28) + ) @Test fun outerJoin() = assertThrows( "SELECT * FROM animals AS a OUTER CROSS JOIN animal_types AS a_type WHERE a.type = a_type.id", "RIGHT and FULL JOIN not supported", - NodeMetadata(1, 28)) + NodeMetadata(1, 28) + ) @Test fun addingWrongTypes() = assertThrows("1 + 2 + 4 + 'a' + 5", "Expected number: \"a\"", NodeMetadata(1, 11), "MISSING") @@ -171,14 +188,16 @@ class EvaluatingCompilerExceptionsTest : EvaluatorTestBase() { "CAST('a' as int) > 0", ErrorCode.EVALUATOR_CAST_FAILED, sourceLocationProperties(1, 1) + mapOf(Property.CAST_FROM to "STRING", Property.CAST_TO to "INT"), - expectedPermissiveModeResult = "MISSING") + expectedPermissiveModeResult = "MISSING" + ) @Test fun badCastInSelectToInt() = checkInputThrowingEvaluationException( "SELECT * FROM `[{_1: a, _2: 1}, {_1: a, _2: 'a'}, {_1: a, _2: 3}]` WHERE CAST(_2 as INT) > 0", ErrorCode.EVALUATOR_CAST_FAILED, sourceLocationProperties(1, 75) + mapOf(Property.CAST_FROM to "SYMBOL", Property.CAST_TO to "INT"), - expectedPermissiveModeResult = "<<{'_1': `a`, '_2': 1}, {'_1': `a`, '_2': 3}>>") + expectedPermissiveModeResult = "<<{'_1': `a`, '_2': 1}, {'_1': `a`, '_2': 3}>>" + ) @Test fun badCastToDecimal() = checkInputThrowingEvaluationException( @@ -186,7 +205,8 @@ class EvaluatingCompilerExceptionsTest : EvaluatorTestBase() { ErrorCode.EVALUATOR_CAST_FAILED, sourceLocationProperties(1, 1) + mapOf(Property.CAST_FROM to "STRING", Property.CAST_TO to "DECIMAL"), NumberFormatException::class, - expectedPermissiveModeResult = "MISSING") + expectedPermissiveModeResult = "MISSING" + ) @Test fun badCastToTimestamp() = checkInputThrowingEvaluationException( @@ -194,42 +214,48 @@ class EvaluatingCompilerExceptionsTest : EvaluatorTestBase() { ErrorCode.EVALUATOR_CAST_FAILED, sourceLocationProperties(1, 1) + mapOf(Property.CAST_FROM to "STRING", Property.CAST_TO to "TIMESTAMP"), IllegalArgumentException::class, - expectedPermissiveModeResult = "MISSING") + expectedPermissiveModeResult = "MISSING" + ) @Test fun divideByZero() = checkInputThrowingEvaluationException( "1 / 0", ErrorCode.EVALUATOR_DIVIDE_BY_ZERO, sourceLocationProperties(1, 3), - expectedPermissiveModeResult = "MISSING") + expectedPermissiveModeResult = "MISSING" + ) @Test fun divideByZeroDecimal() = checkInputThrowingEvaluationException( "1.0 / 0.0", ErrorCode.EVALUATOR_DIVIDE_BY_ZERO, sourceLocationProperties(1, 5), - expectedPermissiveModeResult = "MISSING") + expectedPermissiveModeResult = "MISSING" + ) @Test fun moduloByZero() = checkInputThrowingEvaluationException( "1 % 0", ErrorCode.EVALUATOR_MODULO_BY_ZERO, sourceLocationProperties(1, 3), - expectedPermissiveModeResult = "MISSING") + expectedPermissiveModeResult = "MISSING" + ) @Test fun moduloByZeroDecimal() = checkInputThrowingEvaluationException( "1.0 % 0.0", ErrorCode.EVALUATOR_MODULO_BY_ZERO, sourceLocationProperties(1, 5), - expectedPermissiveModeResult = "MISSING") + expectedPermissiveModeResult = "MISSING" + ) @Test fun divideByZeroInSelect() = assertThrows( "SELECT * FROM `[{_1: a, _2: 1}, {_1: a, _2: 2}, {_1: a, _2: 3}]` WHERE _2 / 0 > 0", "/ by zero", NodeMetadata(1, 76), - "<<>>") + "<<>>" + ) @Test fun utcnowWithArgument() = assertThrows("utcnow(1)", "utcnow takes exactly 0 arguments, received: 1", NodeMetadata(1, 1)) @@ -239,26 +265,30 @@ class EvaluatingCompilerExceptionsTest : EvaluatorTestBase() { """ select "repeated" from `[{repeated:1, repeated:2}]` """, ErrorCode.EVALUATOR_AMBIGUOUS_BINDING, sourceLocationProperties(1, 9) + mapOf(Property.BINDING_NAME to "repeated", Property.BINDING_NAME_MATCHES to "repeated, repeated"), - expectedPermissiveModeResult = "<<{}>>") + expectedPermissiveModeResult = "<<{}>>" + ) @Test fun ambiguousFieldOnStructCaseInsensitiveLookup() = checkInputThrowingEvaluationException( """ select REPEATED from `[{repeated:1, repeated:2}]` """, ErrorCode.EVALUATOR_AMBIGUOUS_BINDING, sourceLocationProperties(1, 9) + mapOf(Property.BINDING_NAME to "REPEATED", Property.BINDING_NAME_MATCHES to "repeated, repeated"), - expectedPermissiveModeResult = "<<{}>>") + expectedPermissiveModeResult = "<<{}>>" + ) @Test fun invalidEscapeSequenceInLike() = checkInputThrowingEvaluationException( """ '' like '^1' escape '^' """, ErrorCode.EVALUATOR_LIKE_PATTERN_INVALID_ESCAPE_SEQUENCE, - sourceLocationProperties(1, 10) + mapOf(Property.LIKE_ESCAPE to "^", Property.LIKE_PATTERN to "^1")) + sourceLocationProperties(1, 10) + mapOf(Property.LIKE_ESCAPE to "^", Property.LIKE_PATTERN to "^1") + ) @Test fun unboundParameters() = checkInputThrowingEvaluationException( """SELECT ? FROM <<1>>""", ErrorCode.EVALUATOR_UNBOUND_PARAMETER, - sourceLocationProperties(1, 8) + mapOf(Property.EXPECTED_PARAMETER_ORDINAL to 1, Property.BOUND_PARAMETER_COUNT to 0)) + sourceLocationProperties(1, 8) + mapOf(Property.EXPECTED_PARAMETER_ORDINAL to 1, Property.BOUND_PARAMETER_COUNT to 0) + ) @Test fun searchedCaseNonBooleanPredicate() = checkInputThrowingEvaluationException( @@ -267,7 +297,8 @@ class EvaluatingCompilerExceptionsTest : EvaluatorTestBase() { // TODO: the call to .booleanValue in the thunk does not have access to metas, so the EvaluationException // is reported to be at the line & column of the CASE statement, not the predicate, unfortunately. expectErrorContextValues = sourceLocationProperties(1, 1), - expectedPermissiveModeResult = "'permissive mode result'") + expectedPermissiveModeResult = "'permissive mode result'" + ) @Test fun structWithStringAndIntegerKey() = checkInputThrowingEvaluationException( @@ -308,7 +339,8 @@ class EvaluatingCompilerExceptionsTest : EvaluatorTestBase() { input = "{ 'valid_key': 42, null: 2 }", errorCode = ErrorCode.SEMANTIC_NON_TEXT_STRUCT_FIELD_KEY, expectErrorContextValues = sourceLocationProperties(1, 1), - expectedPermissiveModeResult = "{ 'valid_key': 42 }") + expectedPermissiveModeResult = "{ 'valid_key': 42 }" + ) @Test fun structWithMissingKey() = checkInputThrowingEvaluationException( @@ -342,7 +374,8 @@ class EvaluatingCompilerExceptionsTest : EvaluatorTestBase() { Property.COLUMN_NUMBER to 1L, Property.ACTUAL_TYPE to "NULL" ), - expectedPermissiveModeResult = "{ 'validVarKey': 2 }") + expectedPermissiveModeResult = "{ 'validVarKey': 2 }" + ) @Test fun nestedStructWithIntegerKey() = checkInputThrowingEvaluationException( @@ -373,8 +406,10 @@ class EvaluatingCompilerExceptionsTest : EvaluatorTestBase() { expectErrorContextValues = mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 15L, - Property.BINDING_NAME to "both"), - expectedPermissiveModeResult = "<<{'_1':1}>>") + Property.BINDING_NAME to "both" + ), + expectedPermissiveModeResult = "<<{'_1':1}>>" + ) @Test fun trimSpecKeywordLeadingNotUsedInTrim() = checkInputThrowingEvaluationException( @@ -383,8 +418,10 @@ class EvaluatingCompilerExceptionsTest : EvaluatorTestBase() { expectErrorContextValues = mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 15L, - Property.BINDING_NAME to "leading"), - expectedPermissiveModeResult = "<<{'_1':1}>>") + Property.BINDING_NAME to "leading" + ), + expectedPermissiveModeResult = "<<{'_1':1}>>" + ) @Test fun trimSpecKeywordTrailingNotUsedInTrim() = checkInputThrowingEvaluationException( @@ -393,8 +430,10 @@ class EvaluatingCompilerExceptionsTest : EvaluatorTestBase() { expectErrorContextValues = mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 15L, - Property.BINDING_NAME to "trailing"), - expectedPermissiveModeResult = "<<{'_1':1}>>") + Property.BINDING_NAME to "trailing" + ), + expectedPermissiveModeResult = "<<{'_1':1}>>" + ) @Test fun trimSpecKeywordLeadingUsedAsSecondArgInTrim() = checkInputThrowingEvaluationException( @@ -403,8 +442,10 @@ class EvaluatingCompilerExceptionsTest : EvaluatorTestBase() { expectErrorContextValues = mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 11L, - Property.BINDING_NAME to "leading"), - expectedPermissiveModeResult = "MISSING") + Property.BINDING_NAME to "leading" + ), + expectedPermissiveModeResult = "MISSING" + ) // TODO: ORDER BY node is missing metas https://github.com/partiql/partiql-lang-kotlin/issues/516 hence the // incorrect source location in the reported error diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerExecTests.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerExecTests.kt index 365cf2150a..b60cecd1a2 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerExecTests.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerExecTests.kt @@ -24,7 +24,7 @@ private fun createWrongSProcErrorContext(arg: ExprValue, expectedArgTypes: Strin /** * Simple stored procedure that takes no arguments and outputs 0. */ -private class ZeroArgProcedure(val valueFactory: ExprValueFactory): StoredProcedure { +private class ZeroArgProcedure(val valueFactory: ExprValueFactory) : StoredProcedure { override val signature = StoredProcedureSignature("zero_arg_procedure", 0) override fun call(session: EvaluationSession, args: List): ExprValue { @@ -36,7 +36,7 @@ private class ZeroArgProcedure(val valueFactory: ExprValueFactory): StoredProced * Simple stored procedure that takes no arguments and outputs -1. Used to show that added stored procedures of the * same name will be overridden. */ -private class OverriddenZeroArgProcedure(val valueFactory: ExprValueFactory): StoredProcedure { +private class OverriddenZeroArgProcedure(val valueFactory: ExprValueFactory) : StoredProcedure { override val signature = StoredProcedureSignature("zero_arg_procedure", 0) override fun call(session: EvaluationSession, args: List): ExprValue { @@ -47,17 +47,19 @@ private class OverriddenZeroArgProcedure(val valueFactory: ExprValueFactory): St /** * Simple stored procedure that takes one integer argument and outputs that argument back. */ -private class OneArgProcedure(val valueFactory: ExprValueFactory): StoredProcedure { +private class OneArgProcedure(val valueFactory: ExprValueFactory) : StoredProcedure { override val signature = StoredProcedureSignature("one_arg_procedure", 1) override fun call(session: EvaluationSession, args: List): ExprValue { val arg = args.first() if (arg.type != ExprValueType.INT) { val errorContext = createWrongSProcErrorContext(arg, "INT", signature.name) - throw EvaluationException("invalid first argument", + throw EvaluationException( + "invalid first argument", ErrorCode.EVALUATOR_INCORRECT_TYPE_OF_ARGUMENTS_TO_PROCEDURE_CALL, errorContext, - internal = false) + internal = false + ) } return arg } @@ -67,26 +69,30 @@ private class OneArgProcedure(val valueFactory: ExprValueFactory): StoredProcedu * Simple stored procedure that takes two integer arguments and outputs the args as a string separated by * a space. */ -private class TwoArgProcedure(val valueFactory: ExprValueFactory): StoredProcedure { +private class TwoArgProcedure(val valueFactory: ExprValueFactory) : StoredProcedure { override val signature = StoredProcedureSignature("two_arg_procedure", 2) override fun call(session: EvaluationSession, args: List): ExprValue { val arg1 = args.first() if (arg1.type != ExprValueType.INT) { val errorContext = createWrongSProcErrorContext(arg1, "INT", signature.name) - throw EvaluationException("invalid first argument", + throw EvaluationException( + "invalid first argument", ErrorCode.EVALUATOR_INCORRECT_TYPE_OF_ARGUMENTS_TO_PROCEDURE_CALL, errorContext, - internal = false) + internal = false + ) } val arg2 = args[1] if (arg2.type != ExprValueType.INT) { val errorContext = createWrongSProcErrorContext(arg2, "INT", signature.name) - throw EvaluationException("invalid second argument", + throw EvaluationException( + "invalid second argument", ErrorCode.EVALUATOR_INCORRECT_TYPE_OF_ARGUMENTS_TO_PROCEDURE_CALL, errorContext, - internal = false) + internal = false + ) } return valueFactory.newString("$arg1 $arg2") } @@ -96,20 +102,22 @@ private class TwoArgProcedure(val valueFactory: ExprValueFactory): StoredProcedu * Simple stored procedure that takes one string argument and checks if the binding (case-insensitive) is in the * current session's global bindings. If so, returns the value associated with that binding. Otherwise, returns missing. */ -private class OutputBindingProcedure(val valueFactory: ExprValueFactory): StoredProcedure { +private class OutputBindingProcedure(val valueFactory: ExprValueFactory) : StoredProcedure { override val signature = StoredProcedureSignature("output_binding", 1) override fun call(session: EvaluationSession, args: List): ExprValue { val arg = args.first() if (arg.type != ExprValueType.STRING) { val errorContext = createWrongSProcErrorContext(arg, "STRING", signature.name) - throw EvaluationException("invalid first argument", + throw EvaluationException( + "invalid first argument", ErrorCode.EVALUATOR_INCORRECT_TYPE_OF_ARGUMENTS_TO_PROCEDURE_CALL, errorContext, - internal = false) + internal = false + ) } val bindingName = BindingName(arg.stringValue(), BindingCase.INSENSITIVE) - return when(val value = session.globals[bindingName]) { + return when (val value = session.globals[bindingName]) { null -> valueFactory.missingValue else -> value } @@ -147,7 +155,7 @@ class EvaluatingCompilerExecTest : EvaluatorTestBase() { val queryExprValue = evalSProc(tc.sqlUnderTest, session) val expectedExprValue = evalSProc(tc.expectedSql, session) - if(!expectedExprValue.exprEquals(queryExprValue)) { + if (!expectedExprValue.exprEquals(queryExprValue)) { println("Expected ionValue : ${expectedExprValue.ionValue}") println("Actual ionValue : ${queryExprValue.ionValue}") @@ -162,15 +170,15 @@ class EvaluatingCompilerExecTest : EvaluatorTestBase() { private fun checkInputThrowingEvaluationExceptionSProc(tc: EvaluatorErrorTestCase, session: EvaluationSession) { softAssert { try { - val result = evalSProc(tc.sqlUnderTest, session = session).ionValue; - fail("Expected EvaluationException but there was no Exception. " + - "The unexpected result was: \n${result.toPrettyString()}") - } - catch (e: EvaluationException) { + val result = evalSProc(tc.sqlUnderTest, session = session).ionValue + fail( + "Expected EvaluationException but there was no Exception. " + + "The unexpected result was: \n${result.toPrettyString()}" + ) + } catch (e: EvaluationException) { if (tc.cause != null) assertThat(e).hasRootCauseExactlyInstanceOf(tc.cause.java) checkErrorAndErrorContext(tc.errorCode, e, tc.expectErrorContextValues) - } - catch (e: Exception) { + } catch (e: Exception) { fail("Expected EvaluationException but a different exception was thrown:\n\t $e") } } @@ -181,26 +189,31 @@ class EvaluatingCompilerExecTest : EvaluatorTestBase() { // OverriddenZeroArgProcedure w/ same name as ZeroArgProcedure overridden EvaluatorTestCase( "EXEC zero_arg_procedure", - "0"), + "0" + ), EvaluatorTestCase( "EXEC one_arg_procedure 1", - "1"), + "1" + ), EvaluatorTestCase( "EXEC two_arg_procedure 1, 2", - "'1 2'"), + "'1 2'" + ), EvaluatorTestCase( "EXEC output_binding 'A'", - "[{'id':1}]"), + "[{'id':1}]" + ), EvaluatorTestCase( "EXEC output_binding 'B'", - "MISSING")) + "MISSING" + ) + ) } @ParameterizedTest @ArgumentsSource(ArgsProviderValid::class) fun validTests(tc: EvaluatorTestCase) = runSProcTestCase(tc, session) - private class ArgsProviderError : ArgumentsProviderBase() { override fun getParameters(): List = listOf( // call function that is not a stored procedure @@ -210,7 +223,9 @@ class EvaluatingCompilerExecTest : EvaluatorTestBase() { mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, - Property.PROCEDURE_NAME to "utcnow")), + Property.PROCEDURE_NAME to "utcnow" + ) + ), // call function that is not a stored procedure, w/ args EvaluatorErrorTestCase( "EXEC substring 0, 1, 'foo'", @@ -218,7 +233,9 @@ class EvaluatingCompilerExecTest : EvaluatorTestBase() { mapOf( Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, - Property.PROCEDURE_NAME to "substring")), + Property.PROCEDURE_NAME to "substring" + ) + ), // invalid # args to sproc (too many) EvaluatorErrorTestCase( "EXEC zero_arg_procedure 1", @@ -227,7 +244,9 @@ class EvaluatingCompilerExecTest : EvaluatorTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.EXPECTED_ARITY_MIN to 0, - Property.EXPECTED_ARITY_MAX to 0)), + Property.EXPECTED_ARITY_MAX to 0 + ) + ), // invalid # args to sproc (too many) EvaluatorErrorTestCase( "EXEC two_arg_procedure 1, 2, 3", @@ -236,7 +255,9 @@ class EvaluatingCompilerExecTest : EvaluatorTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.EXPECTED_ARITY_MIN to 2, - Property.EXPECTED_ARITY_MAX to 2)), + Property.EXPECTED_ARITY_MAX to 2 + ) + ), // invalid # args to sproc (too few) EvaluatorErrorTestCase( "EXEC one_arg_procedure", @@ -245,7 +266,9 @@ class EvaluatingCompilerExecTest : EvaluatorTestBase() { Property.LINE_NUMBER to 1L, Property.COLUMN_NUMBER to 6L, Property.EXPECTED_ARITY_MIN to 1, - Property.EXPECTED_ARITY_MAX to 1)), + Property.EXPECTED_ARITY_MAX to 1 + ) + ), // invalid first arg type EvaluatorErrorTestCase( "EXEC one_arg_procedure 'foo'", @@ -255,7 +278,9 @@ class EvaluatingCompilerExecTest : EvaluatorTestBase() { Property.COLUMN_NUMBER to 6L, Property.EXPECTED_ARGUMENT_TYPES to "INT", Property.ACTUAL_ARGUMENT_TYPES to "STRING", - Property.FUNCTION_NAME to "one_arg_procedure")), + Property.FUNCTION_NAME to "one_arg_procedure" + ) + ), // invalid second arg type EvaluatorErrorTestCase( "EXEC two_arg_procedure 1, 'two'", @@ -265,7 +290,9 @@ class EvaluatingCompilerExecTest : EvaluatorTestBase() { Property.COLUMN_NUMBER to 6L, Property.EXPECTED_ARGUMENT_TYPES to "INT", Property.ACTUAL_ARGUMENT_TYPES to "STRING", - Property.FUNCTION_NAME to "two_arg_procedure")) + Property.FUNCTION_NAME to "two_arg_procedure" + ) + ) ) } diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerFromLetTests.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerFromLetTests.kt index 33c606c6bf..a33d42cca6 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerFromLetTests.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerFromLetTests.kt @@ -1,6 +1,5 @@ package org.partiql.lang.eval -import org.junit.Test import org.junit.jupiter.params.ParameterizedTest import org.junit.jupiter.params.provider.ArgumentsSource import org.partiql.lang.errors.ErrorCode @@ -10,66 +9,81 @@ import org.partiql.lang.util.to class EvaluatingCompilerFromLetTests : EvaluatorTestBase() { - private val session = mapOf("A" to "[ { id : 1 } ]", + private val session = mapOf( + "A" to "[ { id : 1 } ]", "B" to "[ { id : 100 }, { id : 200 } ]", "C" to """[ { name: 'foo', region: 'NA' }, { name: 'foobar', region: 'EU' }, - { name: 'foobarbaz', region: 'NA' } ]""").toSession() + { name: 'foobarbaz', region: 'NA' } ]""" + ).toSession() class ArgsProviderValid : ArgumentsProviderBase() { override fun getParameters(): List = listOf( // LET used in WHERE EvaluatorTestCase( "SELECT * FROM A LET 1 AS X WHERE X = 1", - """<< {'id': 1} >>"""), + """<< {'id': 1} >>""" + ), // LET used in SELECT EvaluatorTestCase( "SELECT X FROM A LET 1 AS X", - """<< {'X': 1} >>"""), + """<< {'X': 1} >>""" + ), // LET used in GROUP BY EvaluatorTestCase( "SELECT * FROM C LET region AS X GROUP BY X", - """<< {'X': `EU`}, {'X': `NA`} >>"""), + """<< {'X': `EU`}, {'X': `NA`} >>""" + ), // LET used in projection after GROUP BY EvaluatorTestCase( "SELECT foo FROM B LET 100 AS foo GROUP BY B.id, foo", - """<< {'foo': 100}, {'foo': 100} >>"""), + """<< {'foo': 100}, {'foo': 100} >>""" + ), // LET used in HAVING after GROUP BY EvaluatorTestCase( "SELECT B.id FROM B LET 100 AS foo GROUP BY B.id, foo HAVING B.id > foo", - """<< {'id': 200} >>"""), + """<< {'id': 200} >>""" + ), // LET shadowed binding EvaluatorTestCase( "SELECT X FROM A LET 1 AS X, 2 AS X", - """<< {'X': 2} >>"""), + """<< {'X': 2} >>""" + ), // LET shadowing FROM binding EvaluatorTestCase( "SELECT * FROM A LET 100 AS A", - """<< {'_1': 100} >>"""), + """<< {'_1': 100} >>""" + ), // LET using other variables EvaluatorTestCase( "SELECT X, Y FROM A LET 1 AS X, X + 1 AS Y", - """<< {'X': 1, 'Y': 2} >>"""), + """<< {'X': 1, 'Y': 2} >>""" + ), // LET recursive binding EvaluatorTestCase( "SELECT X FROM A LET 1 AS X, X AS X", - """<< {'X': 1} >>"""), + """<< {'X': 1} >>""" + ), // LET calling function EvaluatorTestCase( "SELECT X FROM A LET upper('foo') AS X", - """<< {'X': 'FOO'} >>"""), + """<< {'X': 'FOO'} >>""" + ), // LET calling function on each row EvaluatorTestCase( "SELECT nameLength FROM C LET char_length(C.name) AS nameLength", - """<< {'nameLength': 3}, {'nameLength': 6}, {'nameLength': 9} >>"""), + """<< {'nameLength': 3}, {'nameLength': 6}, {'nameLength': 9} >>""" + ), // LET calling function with GROUP BY and aggregation EvaluatorTestCase( "SELECT C.region, MAX(nameLength) AS maxLen FROM C LET char_length(C.name) AS nameLength GROUP BY C.region", - """<< {'region': `EU`, 'maxLen': 6}, {'region': `NA`, 'maxLen': 9} >>"""), + """<< {'region': `EU`, 'maxLen': 6}, {'region': `NA`, 'maxLen': 9} >>""" + ), // LET outer query has correct value EvaluatorTestCase( "SELECT X FROM (SELECT VALUE X FROM A LET 1 AS X) LET 2 AS X", - """<< {'X': 2} >>""") + """<< {'X': 2} >>""" + ) ) } @@ -84,9 +98,9 @@ class EvaluatingCompilerFromLetTests : EvaluatorTestBase() { "SELECT X FROM A LET Y AS X", ErrorCode.EVALUATOR_BINDING_DOES_NOT_EXIST, mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 21L, - Property.BINDING_NAME to "Y" + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 21L, + Property.BINDING_NAME to "Y" ), expectedPermissiveModeResult = "<<{}>>" ), @@ -95,9 +109,9 @@ class EvaluatingCompilerFromLetTests : EvaluatorTestBase() { "SELECT X FROM A LET 1 AS X, Y AS Z, 3 AS Y", ErrorCode.EVALUATOR_BINDING_DOES_NOT_EXIST, mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 29L, - Property.BINDING_NAME to "Y" + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 29L, + Property.BINDING_NAME to "Y" ), expectedPermissiveModeResult = "<<{'X': 1}>>" ), @@ -107,9 +121,9 @@ class EvaluatingCompilerFromLetTests : EvaluatorTestBase() { "SELECT X FROM (SELECT VALUE X FROM A LET 1 AS X)", ErrorCode.EVALUATOR_BINDING_DOES_NOT_EXIST, mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 8L, - Property.BINDING_NAME to "X" + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 8L, + Property.BINDING_NAME to "X" ), expectedPermissiveModeResult = "<<{}>>" ), @@ -118,9 +132,9 @@ class EvaluatingCompilerFromLetTests : EvaluatorTestBase() { "SELECT Z FROM A LET (SELECT 1 FROM A LET 1 AS X) AS Y, X AS Z", ErrorCode.EVALUATOR_BINDING_DOES_NOT_EXIST, mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 56L, - Property.BINDING_NAME to "X" + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 56L, + Property.BINDING_NAME to "X" ), expectedPermissiveModeResult = "<<{}>>" ), @@ -129,9 +143,9 @@ class EvaluatingCompilerFromLetTests : EvaluatorTestBase() { "SELECT B.id FROM B LET 100 AS foo GROUP BY B.id HAVING B.id > foo", ErrorCode.EVALUATOR_VARIABLE_NOT_INCLUDED_IN_GROUP_BY, mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 63L, - Property.BINDING_NAME to "foo" + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 63L, + Property.BINDING_NAME to "foo" ) ), // LET binding referenced in projection not in GROUP BY @@ -139,9 +153,9 @@ class EvaluatingCompilerFromLetTests : EvaluatorTestBase() { "SELECT foo FROM B LET 100 AS foo GROUP BY B.id", ErrorCode.EVALUATOR_VARIABLE_NOT_INCLUDED_IN_GROUP_BY, mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 8L, - Property.BINDING_NAME to "foo" + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 8L, + Property.BINDING_NAME to "foo" ) ) ) diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerFromSourceByTests.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerFromSourceByTests.kt index a9c1915a72..39da9873f6 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerFromSourceByTests.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerFromSourceByTests.kt @@ -5,7 +5,7 @@ import org.junit.Test class EvaluatingCompilerFromSourceByTests : EvaluatorTestBase() { - class AddressedExprValue(val value: Long, val valueFactory: ExprValueFactory): BaseExprValue(), Scalar, Addressed { + class AddressedExprValue(val value: Long, val valueFactory: ExprValueFactory) : BaseExprValue(), Scalar, Addressed { override val ionValue: IonValue get() = valueFactory.ion.newInt(value) @@ -23,16 +23,26 @@ class EvaluatingCompilerFromSourceByTests : EvaluatorTestBase() { } val session = EvaluationSession.build { - globals(Bindings.ofMap( - mapOf( - "someList" to valueFactory.newList(sequenceOf( - AddressedExprValue(1, valueFactory), - AddressedExprValue(2, valueFactory), - AddressedExprValue(3, valueFactory))), - "someBag" to valueFactory.newBag(sequenceOf( - AddressedExprValue(11, valueFactory), - AddressedExprValue(12, valueFactory), - AddressedExprValue(13, valueFactory)))))) + globals( + Bindings.ofMap( + mapOf( + "someList" to valueFactory.newList( + sequenceOf( + AddressedExprValue(1, valueFactory), + AddressedExprValue(2, valueFactory), + AddressedExprValue(3, valueFactory) + ) + ), + "someBag" to valueFactory.newBag( + sequenceOf( + AddressedExprValue(11, valueFactory), + AddressedExprValue(12, valueFactory), + AddressedExprValue(13, valueFactory) + ) + ) + ) + ) + ) } @Test @@ -77,4 +87,4 @@ class EvaluatingCompilerFromSourceByTests : EvaluatorTestBase() { """[[null, null, 1001], [null, null, 1002], [null, null, 1003]]""", session ) -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerGroupByTest.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerGroupByTest.kt index 0aa16257ad..3bf44d4bbd 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerGroupByTest.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerGroupByTest.kt @@ -22,7 +22,7 @@ import org.partiql.lang.errors.Property class EvaluatingCompilerGroupByTest : EvaluatorTestBase() { - private val session = mapOf( + private val session = mapOf( "simple_1_col_1_group" to "[{col1: 1}, {col1: 1}]", "simple_1_col_2_groups" to "[{col1: 1}, {col1: 2}, {col1: 1}, {col1: 2}]", "simple_2_col_1_group" to "[{col1: 1, col2: 10}, {col1: 1, col2: 10}]", @@ -83,13 +83,14 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { { name: 'Chandler', age: 27, manager: { name: 'Rocky', address: { city: 'Seattle' } } }, { name: 'Ross', age: 22, manager: { 'name': 'Alex', address: { city: 'Chicago' } } } ]""" - ).toSession() + ).toSession() companion object { private class SqlTemplate( val sql: String, - val compilationOptions: List = CompOptions.values().toList()) + val compilationOptions: List = CompOptions.values().toList() + ) /** * Creates one [EvaluatorTestCase] for each of the specified `expectedResultFor*` arguments and @@ -118,7 +119,7 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { } } - if(cases.size == 0) { + if (cases.size == 0) { fail("At least one expected result must be specified.") } return cases @@ -140,13 +141,14 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { expectedResultForSum, expectedResultForMin, expectedResultForMax, - expectedResultForAvg) + expectedResultForAvg + ) private fun createGroupByTestCases( query: String, expected: String, compilationOptions: List = CompOptions.values().toList() - ) = compilationOptions.map { co -> EvaluatorTestCase(query, expected, co)} + ) = compilationOptions.map { co -> EvaluatorTestCase(query, expected, co) } private fun createGroupByTestCases(queries: List, expected: String) = queries.flatMap { q -> @@ -164,159 +166,229 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { // GROUP BY over empty createGroupByTestCases( - queries = listOf("SELECT * FROM [] GROUP BY doesntMatterWontBeEvaluated", - "SELECT VALUE { } FROM [] GROUP BY doesntMatterWontBeEvaluated "), - expected = "<< >>") + - - createGroupByTestCases( - query = "SELECT * FROM simple_1_col_1_group GROUP BY col1", - expected = "<<{'col1': 1 }>>") + - - createGroupByTestCases( - query = "SELECT * FROM simple_2_col_1_group GROUP BY col1", - expected = "<<{'col1': 1 }>>") + - createGroupByTestCases( - query = "SELECT * FROM simple_2_col_1_group GROUP BY col2", - expected = "<<{'col2': 10 }>>") + - - createGroupByTestCases( - queries = listOf("SELECT col1 FROM simple_1_col_1_group GROUP BY col1", - "SELECT VALUE { 'col1': 1 } FROM simple_1_col_1_group GROUP BY col1"), - expected = "<<{'col1': 1 }>>") + - createGroupByTestCases( - queries = listOf("SELECT col1 FROM simple_2_col_1_group GROUP BY col1", - "SELECT VALUE { 'col1': 1 } FROM simple_2_col_1_group GROUP BY col1"), - expected = "<<{'col1': 1 }>>") + - createGroupByTestCases( - queries = listOf("SELECT col2 FROM simple_2_col_1_group GROUP BY col2", - "SELECT VALUE { 'col2': col2 } FROM simple_2_col_1_group GROUP BY col2"), - expected = "<<{'col2': 10 }>>") + - - createGroupByTestCases( - query = "SELECT * FROM simple_1_col_2_groups GROUP BY col1", - expected = "<<{'col1': 1 }, {'col1': 2 }>>") + - createGroupByTestCases( - query = "SELECT * FROM simple_2_col_2_groups GROUP BY col1", - expected = "<<{'col1': 1 }, {'col1': 11 }>>") + - createGroupByTestCases( - query = "SELECT * FROM simple_2_col_2_groups GROUP BY col2", - expected = "<<{'col2': 10 }, {'col2': 110 }>>") + - - createGroupByTestCases( - queries = listOf("SELECT col1 FROM simple_1_col_2_groups GROUP BY col1", - "SELECT VALUE { 'col1': col1 } FROM simple_1_col_2_groups GROUP BY col1"), - expected = "<<{'col1': 1 }, {'col1': 2}>>") + - createGroupByTestCases( - queries = listOf("SELECT col1 FROM simple_2_col_2_groups GROUP BY col1", - "SELECT VALUE { 'col1': col1 } FROM simple_2_col_2_groups GROUP BY col1"), - expected = "<<{'col1': 1 }, {'col1': 11}>>") + - createGroupByTestCases( - queries = listOf("SELECT col2 FROM simple_2_col_2_groups GROUP BY col2", - "SELECT VALUE { 'col2': col2 } FROM simple_2_col_2_groups GROUP BY col2"), - expected = "<<{'col2': 10 }, { 'col2': 110}>>") + - - // GROUP BY other expressions - createGroupByTestCases( - queries = listOf("SELECT * FROM simple_1_col_1_group GROUP BY col1 + 1", - "SELECT _1 FROM simple_1_col_1_group GROUP BY col1 + 1", - "SELECT VALUE { '_1': _1 } FROM simple_1_col_1_group GROUP BY col1 + 1"), - expected = "<< { '_1': 2 } >>") + - createGroupByTestCases( - queries = listOf("SELECT * FROM string_groups GROUP BY col1 || 'a'", - "SELECT _1 FROM string_groups GROUP BY col1 || 'a'", - "SELECT VALUE { '_1': _1 } FROM string_groups GROUP BY col1 || 'a'"), - expected = "<< { '_1': 'aa' } >>") + - createGroupByTestCases( - queries = listOf("SELECT * FROM string_numbers GROUP BY CAST(num AS INT)", - "SELECT num FROM string_numbers GROUP BY CAST(num AS INT)", - "SELECT VALUE { 'num': num } FROM string_numbers GROUP BY CAST(num AS INT)"), - expected = "<< { 'num': 1 }, { 'num': 2 } >>") + - - createGroupByTestCases( - queries = listOf("SELECT * FROM simple_1_col_1_group GROUP BY col1 + 1 AS someGBE", - "SELECT someGBE FROM simple_1_col_1_group GROUP BY col1 + 1 AS someGBE", - "SELECT VALUE { 'someGBE': someGBE } FROM simple_1_col_1_group GROUP BY col1 + 1 AS someGBE"), - expected = "<< { 'someGBE': 2 } >>") + - createGroupByTestCases( - queries = listOf("SELECT * FROM string_groups GROUP BY col1 || 'a' AS someGBE", - "SELECT someGBE FROM string_groups GROUP BY col1 || 'a' AS someGBE", - "SELECT VALUE { 'someGBE': someGBE } FROM string_groups GROUP BY col1 || 'a' AS someGBE"), - expected = "<< { 'someGBE': 'aa' } >>") + - createGroupByTestCases( - queries = listOf("SELECT * FROM string_numbers GROUP BY CAST(num AS INT) AS someGBE", - "SELECT someGBE FROM string_numbers GROUP BY CAST(num AS INT) AS someGBE", - "SELECT VALUE { 'someGBE': someGBE } FROM string_numbers GROUP BY CAST(num AS INT) AS someGBE"), - expected = "<< { 'someGBE': 1 }, { 'someGBE': 2 } >>") + + queries = listOf( + "SELECT * FROM [] GROUP BY doesntMatterWontBeEvaluated", + "SELECT VALUE { } FROM [] GROUP BY doesntMatterWontBeEvaluated " + ), + expected = "<< >>" + ) + - // GROUP BY NULL/MISSING cases - createGroupByTestCases( - queries = listOf("SELECT * FROM simple_1_col_1_group GROUP BY NULL AS someNull", - "SELECT someNull FROM simple_1_col_1_group GROUP BY NULL AS someNull", - "SELECT VALUE { 'someNull': someNull } FROM simple_1_col_1_group GROUP BY NULL AS someNull"), - expected = "<< { 'someNull': null } >>") + - createGroupByTestCases( - queries = listOf("SELECT * FROM simple_1_col_1_group GROUP BY MISSING AS someMissing", - "SELECT someMissing FROM simple_1_col_1_group GROUP BY MISSING AS someMissing", - "SELECT VALUE { 'someMissing': someMissing } FROM simple_1_col_1_group GROUP BY MISSING AS someMissing"), - // must explicitly specify MISSING here because https://github.com/partiql/partiql-lang-kotlin/issues/36 - expected = "<< { 'someMissing': MISSING } >>") + - createGroupByTestCases( - queries = listOf("SELECT * FROM simple_1_col_1_group GROUP BY NULL AS groupExp", - "SELECT groupExp FROM simple_1_col_1_group GROUP BY NULL AS groupExp", - "SELECT VALUE { 'groupExp': groupExp } FROM simple_1_col_1_group GROUP BY NULL AS groupExp"), - expected = "<< { 'groupExp': null } >>") + - createGroupByTestCases( - queries = listOf("SELECT * FROM simple_1_col_1_group GROUP BY MISSING AS groupExp", - "SELECT groupExp FROM simple_1_col_1_group GROUP BY MISSING AS groupExp", - "SELECT VALUE { 'groupExp': groupExp } FROM simple_1_col_1_group GROUP BY MISSING AS groupExp"), - expected = "<< { 'groupExp': MISSING } >>") + - createGroupByTestCases( - queries = listOf("SELECT * FROM products_sparse p GROUP BY p.supplierId_nulls", - "SELECT supplierId_nulls FROM products_sparse p GROUP BY p.supplierId_nulls", - "SELECT VALUE { 'supplierId_nulls': supplierId_nulls } FROM products_sparse p GROUP BY p.supplierId_nulls"), - expected = """<< + createGroupByTestCases( + query = "SELECT * FROM simple_1_col_1_group GROUP BY col1", + expected = "<<{'col1': 1 }>>" + ) + + + createGroupByTestCases( + query = "SELECT * FROM simple_2_col_1_group GROUP BY col1", + expected = "<<{'col1': 1 }>>" + ) + + createGroupByTestCases( + query = "SELECT * FROM simple_2_col_1_group GROUP BY col2", + expected = "<<{'col2': 10 }>>" + ) + + + createGroupByTestCases( + queries = listOf( + "SELECT col1 FROM simple_1_col_1_group GROUP BY col1", + "SELECT VALUE { 'col1': 1 } FROM simple_1_col_1_group GROUP BY col1" + ), + expected = "<<{'col1': 1 }>>" + ) + + createGroupByTestCases( + queries = listOf( + "SELECT col1 FROM simple_2_col_1_group GROUP BY col1", + "SELECT VALUE { 'col1': 1 } FROM simple_2_col_1_group GROUP BY col1" + ), + expected = "<<{'col1': 1 }>>" + ) + + createGroupByTestCases( + queries = listOf( + "SELECT col2 FROM simple_2_col_1_group GROUP BY col2", + "SELECT VALUE { 'col2': col2 } FROM simple_2_col_1_group GROUP BY col2" + ), + expected = "<<{'col2': 10 }>>" + ) + + + createGroupByTestCases( + query = "SELECT * FROM simple_1_col_2_groups GROUP BY col1", + expected = "<<{'col1': 1 }, {'col1': 2 }>>" + ) + + createGroupByTestCases( + query = "SELECT * FROM simple_2_col_2_groups GROUP BY col1", + expected = "<<{'col1': 1 }, {'col1': 11 }>>" + ) + + createGroupByTestCases( + query = "SELECT * FROM simple_2_col_2_groups GROUP BY col2", + expected = "<<{'col2': 10 }, {'col2': 110 }>>" + ) + + + createGroupByTestCases( + queries = listOf( + "SELECT col1 FROM simple_1_col_2_groups GROUP BY col1", + "SELECT VALUE { 'col1': col1 } FROM simple_1_col_2_groups GROUP BY col1" + ), + expected = "<<{'col1': 1 }, {'col1': 2}>>" + ) + + createGroupByTestCases( + queries = listOf( + "SELECT col1 FROM simple_2_col_2_groups GROUP BY col1", + "SELECT VALUE { 'col1': col1 } FROM simple_2_col_2_groups GROUP BY col1" + ), + expected = "<<{'col1': 1 }, {'col1': 11}>>" + ) + + createGroupByTestCases( + queries = listOf( + "SELECT col2 FROM simple_2_col_2_groups GROUP BY col2", + "SELECT VALUE { 'col2': col2 } FROM simple_2_col_2_groups GROUP BY col2" + ), + expected = "<<{'col2': 10 }, { 'col2': 110}>>" + ) + + + // GROUP BY other expressions + createGroupByTestCases( + queries = listOf( + "SELECT * FROM simple_1_col_1_group GROUP BY col1 + 1", + "SELECT _1 FROM simple_1_col_1_group GROUP BY col1 + 1", + "SELECT VALUE { '_1': _1 } FROM simple_1_col_1_group GROUP BY col1 + 1" + ), + expected = "<< { '_1': 2 } >>" + ) + + createGroupByTestCases( + queries = listOf( + "SELECT * FROM string_groups GROUP BY col1 || 'a'", + "SELECT _1 FROM string_groups GROUP BY col1 || 'a'", + "SELECT VALUE { '_1': _1 } FROM string_groups GROUP BY col1 || 'a'" + ), + expected = "<< { '_1': 'aa' } >>" + ) + + createGroupByTestCases( + queries = listOf( + "SELECT * FROM string_numbers GROUP BY CAST(num AS INT)", + "SELECT num FROM string_numbers GROUP BY CAST(num AS INT)", + "SELECT VALUE { 'num': num } FROM string_numbers GROUP BY CAST(num AS INT)" + ), + expected = "<< { 'num': 1 }, { 'num': 2 } >>" + ) + + + createGroupByTestCases( + queries = listOf( + "SELECT * FROM simple_1_col_1_group GROUP BY col1 + 1 AS someGBE", + "SELECT someGBE FROM simple_1_col_1_group GROUP BY col1 + 1 AS someGBE", + "SELECT VALUE { 'someGBE': someGBE } FROM simple_1_col_1_group GROUP BY col1 + 1 AS someGBE" + ), + expected = "<< { 'someGBE': 2 } >>" + ) + + createGroupByTestCases( + queries = listOf( + "SELECT * FROM string_groups GROUP BY col1 || 'a' AS someGBE", + "SELECT someGBE FROM string_groups GROUP BY col1 || 'a' AS someGBE", + "SELECT VALUE { 'someGBE': someGBE } FROM string_groups GROUP BY col1 || 'a' AS someGBE" + ), + expected = "<< { 'someGBE': 'aa' } >>" + ) + + createGroupByTestCases( + queries = listOf( + "SELECT * FROM string_numbers GROUP BY CAST(num AS INT) AS someGBE", + "SELECT someGBE FROM string_numbers GROUP BY CAST(num AS INT) AS someGBE", + "SELECT VALUE { 'someGBE': someGBE } FROM string_numbers GROUP BY CAST(num AS INT) AS someGBE" + ), + expected = "<< { 'someGBE': 1 }, { 'someGBE': 2 } >>" + ) + + + // GROUP BY NULL/MISSING cases + createGroupByTestCases( + queries = listOf( + "SELECT * FROM simple_1_col_1_group GROUP BY NULL AS someNull", + "SELECT someNull FROM simple_1_col_1_group GROUP BY NULL AS someNull", + "SELECT VALUE { 'someNull': someNull } FROM simple_1_col_1_group GROUP BY NULL AS someNull" + ), + expected = "<< { 'someNull': null } >>" + ) + + createGroupByTestCases( + queries = listOf( + "SELECT * FROM simple_1_col_1_group GROUP BY MISSING AS someMissing", + "SELECT someMissing FROM simple_1_col_1_group GROUP BY MISSING AS someMissing", + "SELECT VALUE { 'someMissing': someMissing } FROM simple_1_col_1_group GROUP BY MISSING AS someMissing" + ), + // must explicitly specify MISSING here because https://github.com/partiql/partiql-lang-kotlin/issues/36 + expected = "<< { 'someMissing': MISSING } >>" + ) + + createGroupByTestCases( + queries = listOf( + "SELECT * FROM simple_1_col_1_group GROUP BY NULL AS groupExp", + "SELECT groupExp FROM simple_1_col_1_group GROUP BY NULL AS groupExp", + "SELECT VALUE { 'groupExp': groupExp } FROM simple_1_col_1_group GROUP BY NULL AS groupExp" + ), + expected = "<< { 'groupExp': null } >>" + ) + + createGroupByTestCases( + queries = listOf( + "SELECT * FROM simple_1_col_1_group GROUP BY MISSING AS groupExp", + "SELECT groupExp FROM simple_1_col_1_group GROUP BY MISSING AS groupExp", + "SELECT VALUE { 'groupExp': groupExp } FROM simple_1_col_1_group GROUP BY MISSING AS groupExp" + ), + expected = "<< { 'groupExp': MISSING } >>" + ) + + createGroupByTestCases( + queries = listOf( + "SELECT * FROM products_sparse p GROUP BY p.supplierId_nulls", + "SELECT supplierId_nulls FROM products_sparse p GROUP BY p.supplierId_nulls", + "SELECT VALUE { 'supplierId_nulls': supplierId_nulls } FROM products_sparse p GROUP BY p.supplierId_nulls" + ), + expected = """<< { 'supplierId_nulls': 10 }, { 'supplierId_nulls': 11 }, { 'supplierId_nulls': null } - >>""") + - createGroupByTestCases( - queries = listOf("SELECT * FROM products_sparse p GROUP BY p.supplierId_missings", - "SELECT p.supplierId_missings FROM products_sparse p GROUP BY p.supplierId_missings", - "SELECT VALUE { 'supplierId_missings' : p.supplierId_missings } FROM products_sparse p GROUP BY p.supplierId_missings"), - expected = """<< + >>""" + ) + + createGroupByTestCases( + queries = listOf( + "SELECT * FROM products_sparse p GROUP BY p.supplierId_missings", + "SELECT p.supplierId_missings FROM products_sparse p GROUP BY p.supplierId_missings", + "SELECT VALUE { 'supplierId_missings' : p.supplierId_missings } FROM products_sparse p GROUP BY p.supplierId_missings" + ), + expected = """<< { 'supplierId_missings': 10 }, { 'supplierId_missings': 11 }, --must explicitly include the missing value here because of https://github.com/partiql/partiql-lang-kotlin/issues/36 { 'supplierId_missings': missing } >>""" - ) + - createGroupByTestCases( - queries = listOf("SELECT * FROM products_sparse p GROUP BY p.supplierId_mixed", - "SELECT p.supplierId_mixed FROM products_sparse p GROUP BY p.supplierId_mixed", - "SELECT VALUE { 'supplierId_mixed' : p.supplierId_mixed } FROM products_sparse p GROUP BY p.supplierId_mixed"), - expected = """<< + ) + + createGroupByTestCases( + queries = listOf( + "SELECT * FROM products_sparse p GROUP BY p.supplierId_mixed", + "SELECT p.supplierId_mixed FROM products_sparse p GROUP BY p.supplierId_mixed", + "SELECT VALUE { 'supplierId_mixed' : p.supplierId_mixed } FROM products_sparse p GROUP BY p.supplierId_mixed" + ), + expected = """<< { 'supplierId_mixed': 10 }, { 'supplierId_mixed': 11 }, --must explicitly include the missing value here because of https://github.com/partiql/partiql-lang-kotlin/issues/363 and https://github.com/partiql/partiql-lang-kotlin/issues/35 { 'supplierId_mixed': missing } - >>""") + - createGroupByTestCases( - queries = listOf("SELECT * FROM products_sparse p GROUP BY p.regionId, p.supplierId_nulls", - "SELECT regionId, supplierId_nulls FROM products_sparse p GROUP BY p.regionId, p.supplierId_nulls", - "SELECT VALUE { 'regionId': regionId, 'supplierId_nulls': supplierId_nulls } FROM products_sparse p GROUP BY p.regionId, p.supplierId_nulls"), - expected = """<< + >>""" + ) + + createGroupByTestCases( + queries = listOf( + "SELECT * FROM products_sparse p GROUP BY p.regionId, p.supplierId_nulls", + "SELECT regionId, supplierId_nulls FROM products_sparse p GROUP BY p.regionId, p.supplierId_nulls", + "SELECT VALUE { 'regionId': regionId, 'supplierId_nulls': supplierId_nulls } FROM products_sparse p GROUP BY p.regionId, p.supplierId_nulls" + ), + expected = """<< { 'regionId': 100, 'supplierId_nulls': 10 }, { 'regionId': 100, 'supplierId_nulls': 11 }, { 'regionId': 100, 'supplierId_nulls': null }, { 'regionId': 200, 'supplierId_nulls': 10 }, { 'regionId': 200, 'supplierId_nulls': 11 }, { 'regionId': 200, 'supplierId_nulls': null } - >>""") + - createGroupByTestCases( - queries = listOf("SELECT * FROM products_sparse p GROUP BY p.regionId, p.supplierId_missings", - "SELECT p.regionId, p.supplierId_missings FROM products_sparse p GROUP BY p.regionId, p.supplierId_missings", - "SELECT VALUE { 'regionId': p.regionId, 'supplierId_missings': p.supplierId_missings } FROM products_sparse p GROUP BY p.regionId, p.supplierId_missings"), - expected = """<< + >>""" + ) + + createGroupByTestCases( + queries = listOf( + "SELECT * FROM products_sparse p GROUP BY p.regionId, p.supplierId_missings", + "SELECT p.regionId, p.supplierId_missings FROM products_sparse p GROUP BY p.regionId, p.supplierId_missings", + "SELECT VALUE { 'regionId': p.regionId, 'supplierId_missings': p.supplierId_missings } FROM products_sparse p GROUP BY p.regionId, p.supplierId_missings" + ), + expected = """<< --must explicitly include the missing values here because of https://github.com/partiql/partiql-lang-kotlin/issues/36 { 'regionId': 100, 'supplierId_missings': 10 }, { 'regionId': 100, 'supplierId_missings': 11 }, @@ -324,12 +396,15 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { { 'regionId': 200, 'supplierId_missings': 10 }, { 'regionId': 200, 'supplierId_missings': 11 }, { 'regionId': 200, 'supplierId_missings': missing } - >>""") + - createGroupByTestCases( - queries = listOf("SELECT * FROM products_sparse p GROUP BY p.regionId, p.supplierId_mixed", - "SELECT regionId, p.supplierId_mixed FROM products_sparse p GROUP BY p.regionId, p.supplierId_mixed", - "SELECT VALUE { 'regionId': p.regionId, 'supplierId_mixed': p.supplierId_mixed } FROM products_sparse p GROUP BY p.regionId, p.supplierId_mixed"), - expected = """<< + >>""" + ) + + createGroupByTestCases( + queries = listOf( + "SELECT * FROM products_sparse p GROUP BY p.regionId, p.supplierId_mixed", + "SELECT regionId, p.supplierId_mixed FROM products_sparse p GROUP BY p.regionId, p.supplierId_mixed", + "SELECT VALUE { 'regionId': p.regionId, 'supplierId_mixed': p.supplierId_mixed } FROM products_sparse p GROUP BY p.regionId, p.supplierId_mixed" + ), + expected = """<< --must explicitly include the missing values here because of https://github.com/partiql/partiql-lang-kotlin/issues/36 { 'regionId': 100, 'supplierId_mixed': 10 }, { 'regionId': 100, 'supplierId_mixed': 11 }, @@ -342,7 +417,8 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { { 'regionId': 200, 'supplierId_mixed': 11 }, --for this group, the null value encountered first { 'regionId': 200, 'supplierId_mixed': null } - >>""") + >>""" + ) @Test @Parameters @@ -356,317 +432,339 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { groupName = "literal argument", sqlStrings = listOf("SELECT {{agg}}(5) FROM products"), expectedResultForCount = "<< { '_1': 5 } >>", - expectedResultForSum = "<< { '_1': 25 } >>", - expectedResultForMin = "<< { '_1': 5 } >>", - expectedResultForMax = "<< { '_1': 5 } >>", - expectedResultForAvg = "<< { '_1': 5 } >>" - ) + - createAggregateTestCasesFromSqlStrings( - groupName = "variable argument", - sqlStrings = listOf("SELECT {{agg}}(numInStock) AS agg FROM products", - "SELECT {{agg}}(p.numInStock) AS agg FROM products AS p"), - - expectedResultForCount = "<< { 'agg': 5 } >>", - expectedResultForSum = "<< { 'agg': 11111 } >>", - expectedResultForMin = "<< { 'agg': 1 } >>", - expectedResultForMax = "<< { 'agg': 10000 } >>", - expectedResultForAvg = "<< { 'agg': 2222.2 } >>" - ) + - createAggregateTestCasesFromSqlStrings( - groupName = "binary expression argument", - sqlStrings = listOf("SELECT {{agg}}( numInStock + 1) AS agg FROM products", - "SELECT {{agg}}(p.numInStock + 1) AS agg FROM products as p"), - - expectedResultForCount = "<< { 'agg': 5 } >>", - expectedResultForSum = "<< { 'agg': 11116 } >>", - expectedResultForMin = "<< { 'agg': 2 } >>", - expectedResultForMax = "<< { 'agg': 10001 } >>", - expectedResultForAvg = "<< { 'agg': 2223.2 } >>" - ) + - createAggregateTestCasesFromSqlStrings( - groupName = "as part of binary expression", - sqlStrings = listOf("SELECT {{agg}}( numInStock) + 2 AS agg FROM products", - "SELECT {{agg}}(p.numInStock) + 2 AS agg FROM products as p"), - - expectedResultForCount = "<< { 'agg': 7 } >>", - expectedResultForSum = "<< { 'agg': 11113 } >>", - expectedResultForMin = "<< { 'agg': 3 } >>", - expectedResultForMax = "<< { 'agg': 10002 } >>", - expectedResultForAvg = "<< { 'agg': 2224.2 } >>" - ) + - createAggregateTestCasesFromSqlStrings( - groupName = "variable or path argument and WHERE clause (1)", - sqlStrings = listOf("SELECT {{agg}}(numInStock) AS agg FROM products WHERE supplierId = 10", - "SELECT {{agg}}(p.numInStock) AS agg FROM products AS p WHERE supplierId = 10"), - - expectedResultForCount = "<< { 'agg': 3 } >>", - expectedResultForSum = "<< { 'agg': 111 } >>", - expectedResultForMin = "<< { 'agg': 1 } >>", - expectedResultForMax = "<< { 'agg': 100 } >>", - expectedResultForAvg = "<< { 'agg': 37 } >>" - ) + - createAggregateTestCasesFromSqlStrings( - groupName = "variable or path argument and WHERE clause (2)", - sqlStrings = listOf("SELECT {{agg}}( numInStock) AS agg FROM products WHERE supplierId = 11", - "SELECT {{agg}}(p.numInStock) AS agg FROM products AS p WHERE supplierId = 11"), - - expectedResultForCount = "<< { 'agg': 2 } >>", - expectedResultForSum = "<< { 'agg': 11000 } >>", - expectedResultForMin = "<< { 'agg': 1000 } >>", - expectedResultForMax = "<< { 'agg': 10000 } >>", - expectedResultForAvg = "<< { 'agg': 5500 } >>" - ) + - createAggregateTestCasesFromSqlStrings( - groupName = "variable or path argument and WHERE clause (3)", - sqlStrings = listOf("SELECT {{agg}}( numInStock) AS agg FROM products WHERE categoryId = 20", - "SELECT {{agg}}(p.numInStock) AS agg FROM products AS p WHERE p.categoryId = 20"), - - expectedResultForCount = "<< { 'agg': 2 } >>", - expectedResultForSum = "<< { 'agg': 11 } >>", - expectedResultForMin = "<< { 'agg': 1 } >>", - expectedResultForMax = "<< { 'agg': 10 } >>", - expectedResultForAvg = "<< { 'agg': 5.5 } >>" - ) + - createAggregateTestCasesFromSqlStrings( - groupName = "variable or path argument and WHERE clause (4)", - sqlStrings = listOf("SELECT {{agg}}( numInStock) AS agg FROM products WHERE categoryId = 21", - "SELECT {{agg}}(p.numInStock) AS agg FROM products AS p WHERE categoryId = 21"), - - expectedResultForCount = "<< { 'agg': 3 } >>", - expectedResultForSum = "<< { 'agg': 11100 } >>", - expectedResultForMin = "<< { 'agg': 100 } >>", - expectedResultForMax = "<< { 'agg': 10000 } >>", - expectedResultForAvg = "<< { 'agg': 3700 } >>" + expectedResultForSum = "<< { '_1': 25 } >>", + expectedResultForMin = "<< { '_1': 5 } >>", + expectedResultForMax = "<< { '_1': 5 } >>", + expectedResultForAvg = "<< { '_1': 5 } >>" ) + - createAggregateTestCasesFromSqlStrings( - groupName = "GROUP BY (1 column) (#1)", - sqlStrings = listOf("SELECT supplierId, {{agg}}( numInStock) AS agg FROM products GROUP BY supplierId", - "SELECT supplierId, {{agg}}(p.numInStock) AS agg FROM products AS p GROUP BY p.supplierId", - "SELECT p.supplierId, {{agg}}(p.numInStock) AS agg FROM products AS p GROUP BY p.supplierId"), - - expectedResultForCount = """<< + createAggregateTestCasesFromSqlStrings( + groupName = "variable argument", + sqlStrings = listOf( + "SELECT {{agg}}(numInStock) AS agg FROM products", + "SELECT {{agg}}(p.numInStock) AS agg FROM products AS p" + ), + + expectedResultForCount = "<< { 'agg': 5 } >>", + expectedResultForSum = "<< { 'agg': 11111 } >>", + expectedResultForMin = "<< { 'agg': 1 } >>", + expectedResultForMax = "<< { 'agg': 10000 } >>", + expectedResultForAvg = "<< { 'agg': 2222.2 } >>" + ) + + createAggregateTestCasesFromSqlStrings( + groupName = "binary expression argument", + sqlStrings = listOf( + "SELECT {{agg}}( numInStock + 1) AS agg FROM products", + "SELECT {{agg}}(p.numInStock + 1) AS agg FROM products as p" + ), + + expectedResultForCount = "<< { 'agg': 5 } >>", + expectedResultForSum = "<< { 'agg': 11116 } >>", + expectedResultForMin = "<< { 'agg': 2 } >>", + expectedResultForMax = "<< { 'agg': 10001 } >>", + expectedResultForAvg = "<< { 'agg': 2223.2 } >>" + ) + + createAggregateTestCasesFromSqlStrings( + groupName = "as part of binary expression", + sqlStrings = listOf( + "SELECT {{agg}}( numInStock) + 2 AS agg FROM products", + "SELECT {{agg}}(p.numInStock) + 2 AS agg FROM products as p" + ), + + expectedResultForCount = "<< { 'agg': 7 } >>", + expectedResultForSum = "<< { 'agg': 11113 } >>", + expectedResultForMin = "<< { 'agg': 3 } >>", + expectedResultForMax = "<< { 'agg': 10002 } >>", + expectedResultForAvg = "<< { 'agg': 2224.2 } >>" + ) + + createAggregateTestCasesFromSqlStrings( + groupName = "variable or path argument and WHERE clause (1)", + sqlStrings = listOf( + "SELECT {{agg}}(numInStock) AS agg FROM products WHERE supplierId = 10", + "SELECT {{agg}}(p.numInStock) AS agg FROM products AS p WHERE supplierId = 10" + ), + + expectedResultForCount = "<< { 'agg': 3 } >>", + expectedResultForSum = "<< { 'agg': 111 } >>", + expectedResultForMin = "<< { 'agg': 1 } >>", + expectedResultForMax = "<< { 'agg': 100 } >>", + expectedResultForAvg = "<< { 'agg': 37 } >>" + ) + + createAggregateTestCasesFromSqlStrings( + groupName = "variable or path argument and WHERE clause (2)", + sqlStrings = listOf( + "SELECT {{agg}}( numInStock) AS agg FROM products WHERE supplierId = 11", + "SELECT {{agg}}(p.numInStock) AS agg FROM products AS p WHERE supplierId = 11" + ), + + expectedResultForCount = "<< { 'agg': 2 } >>", + expectedResultForSum = "<< { 'agg': 11000 } >>", + expectedResultForMin = "<< { 'agg': 1000 } >>", + expectedResultForMax = "<< { 'agg': 10000 } >>", + expectedResultForAvg = "<< { 'agg': 5500 } >>" + ) + + createAggregateTestCasesFromSqlStrings( + groupName = "variable or path argument and WHERE clause (3)", + sqlStrings = listOf( + "SELECT {{agg}}( numInStock) AS agg FROM products WHERE categoryId = 20", + "SELECT {{agg}}(p.numInStock) AS agg FROM products AS p WHERE p.categoryId = 20" + ), + + expectedResultForCount = "<< { 'agg': 2 } >>", + expectedResultForSum = "<< { 'agg': 11 } >>", + expectedResultForMin = "<< { 'agg': 1 } >>", + expectedResultForMax = "<< { 'agg': 10 } >>", + expectedResultForAvg = "<< { 'agg': 5.5 } >>" + ) + + createAggregateTestCasesFromSqlStrings( + groupName = "variable or path argument and WHERE clause (4)", + sqlStrings = listOf( + "SELECT {{agg}}( numInStock) AS agg FROM products WHERE categoryId = 21", + "SELECT {{agg}}(p.numInStock) AS agg FROM products AS p WHERE categoryId = 21" + ), + + expectedResultForCount = "<< { 'agg': 3 } >>", + expectedResultForSum = "<< { 'agg': 11100 } >>", + expectedResultForMin = "<< { 'agg': 100 } >>", + expectedResultForMax = "<< { 'agg': 10000 } >>", + expectedResultForAvg = "<< { 'agg': 3700 } >>" + ) + + createAggregateTestCasesFromSqlStrings( + groupName = "GROUP BY (1 column) (#1)", + sqlStrings = listOf( + "SELECT supplierId, {{agg}}( numInStock) AS agg FROM products GROUP BY supplierId", + "SELECT supplierId, {{agg}}(p.numInStock) AS agg FROM products AS p GROUP BY p.supplierId", + "SELECT p.supplierId, {{agg}}(p.numInStock) AS agg FROM products AS p GROUP BY p.supplierId" + ), + + expectedResultForCount = """<< { 'supplierId': 10, 'agg': 3 }, { 'supplierId': 11, 'agg': 2 } >>""", - expectedResultForSum = """<< + expectedResultForSum = """<< { 'supplierId': 10, 'agg': 111 }, { 'supplierId': 11, 'agg': 11000 } >>""", - expectedResultForMin = """<< + expectedResultForMin = """<< { 'supplierId': 10, 'agg': 1 }, { 'supplierId': 11, 'agg': 1000 } >>""", - expectedResultForMax ="""<< + expectedResultForMax = """<< { 'supplierId': 10, 'agg': 100 }, { 'supplierId': 11, 'agg': 10000 } >>""", - expectedResultForAvg = """<< + expectedResultForAvg = """<< { 'supplierId': 10, 'agg': 37 }, { 'supplierId': 11, 'agg': 5500 } >>""" - ) + - createAggregateTestCasesFromSqlStrings( - groupName = "GROUP BY (1 column) (#2)", - sqlStrings = listOf("SELECT categoryId, {{agg}}( numInStock) AS agg FROM products GROUP BY categoryId", - "SELECT categoryId, {{agg}}(p.numInStock) AS agg FROM products AS p GROUP BY p.categoryId", - "SELECT p.categoryId, {{agg}}(p.numInStock) AS agg FROM products AS p GROUP BY p.categoryId"), - - expectedResultForCount = """<< + ) + + createAggregateTestCasesFromSqlStrings( + groupName = "GROUP BY (1 column) (#2)", + sqlStrings = listOf( + "SELECT categoryId, {{agg}}( numInStock) AS agg FROM products GROUP BY categoryId", + "SELECT categoryId, {{agg}}(p.numInStock) AS agg FROM products AS p GROUP BY p.categoryId", + "SELECT p.categoryId, {{agg}}(p.numInStock) AS agg FROM products AS p GROUP BY p.categoryId" + ), + + expectedResultForCount = """<< { 'categoryId': 20, 'agg': 2 }, { 'categoryId': 21, 'agg': 3 } >>""", - expectedResultForSum = """<< + expectedResultForSum = """<< { 'categoryId': 20, 'agg': 11 }, { 'categoryId': 21, 'agg': 11100 } >>""", - expectedResultForMin = """<< + expectedResultForMin = """<< { 'categoryId': 20, 'agg': 1 }, { 'categoryId': 21, 'agg': 100 } >>""", - expectedResultForMax ="""<< + expectedResultForMax = """<< { 'categoryId': 20, 'agg': 10 }, { 'categoryId': 21, 'agg': 10000 } >>""", - expectedResultForAvg = """<< + expectedResultForAvg = """<< { 'categoryId': 20, 'agg': 5.5 }, { 'categoryId': 21, 'agg': 3700 } >>""" - ) + - createAggregateTestCasesFromSqlStrings( - groupName = "GROUP BY (1 column) and WHERE (#1)", - sqlStrings = listOf( - "SELECT supplierId, {{agg}}( numInStock) AS agg FROM products WHERE price >= 10 GROUP BY supplierId", - "SELECT supplierId, {{agg}}(p.numInStock) AS agg FROM products AS p WHERE price >= 10 GROUP BY p.supplierId", - "SELECT p.supplierId, {{agg}}(p.numInStock) AS agg FROM products AS p WHERE price >= 10 GROUP BY p.supplierId"), - - expectedResultForCount = """<< + ) + + createAggregateTestCasesFromSqlStrings( + groupName = "GROUP BY (1 column) and WHERE (#1)", + sqlStrings = listOf( + "SELECT supplierId, {{agg}}( numInStock) AS agg FROM products WHERE price >= 10 GROUP BY supplierId", + "SELECT supplierId, {{agg}}(p.numInStock) AS agg FROM products AS p WHERE price >= 10 GROUP BY p.supplierId", + "SELECT p.supplierId, {{agg}}(p.numInStock) AS agg FROM products AS p WHERE price >= 10 GROUP BY p.supplierId" + ), + + expectedResultForCount = """<< { 'supplierId': 10, 'agg': 2 }, { 'supplierId': 11, 'agg': 1 } >>""", - expectedResultForSum = """<< + expectedResultForSum = """<< { 'supplierId': 10, 'agg': 110 }, { 'supplierId': 11, 'agg': 10000 } >>""", - expectedResultForMin = """<< + expectedResultForMin = """<< { 'supplierId': 10, 'agg': 10 }, { 'supplierId': 11, 'agg': 10000 } >>""", - expectedResultForMax ="""<< + expectedResultForMax = """<< { 'supplierId': 10, 'agg': 100 }, { 'supplierId': 11, 'agg': 10000 } >>""", - expectedResultForAvg = """<< + expectedResultForAvg = """<< { 'supplierId': 10, 'agg': 55 }, { 'supplierId': 11, 'agg': 10000 } >>""" - ) + - createAggregateTestCasesFromSqlStrings( - groupName = "GROUP BY (1 column) and WHERE (#2)", - sqlStrings = listOf( - "SELECT categoryId, {{agg}}( numInStock) AS agg FROM products WHERE price >= 10 GROUP BY categoryId", - "SELECT categoryId, {{agg}}(p.numInStock) AS agg FROM products AS p WHERE price >= 10 GROUP BY p.categoryId", - "SELECT p.categoryId, {{agg}}(p.numInStock) AS agg FROM products AS p WHERE price >= 10 GROUP BY p.categoryId"), - - expectedResultForCount = """<< + ) + + createAggregateTestCasesFromSqlStrings( + groupName = "GROUP BY (1 column) and WHERE (#2)", + sqlStrings = listOf( + "SELECT categoryId, {{agg}}( numInStock) AS agg FROM products WHERE price >= 10 GROUP BY categoryId", + "SELECT categoryId, {{agg}}(p.numInStock) AS agg FROM products AS p WHERE price >= 10 GROUP BY p.categoryId", + "SELECT p.categoryId, {{agg}}(p.numInStock) AS agg FROM products AS p WHERE price >= 10 GROUP BY p.categoryId" + ), + + expectedResultForCount = """<< { 'categoryId': 20, 'agg': 1 }, { 'categoryId': 21, 'agg': 2 } >>""", - expectedResultForSum = """<< + expectedResultForSum = """<< { 'categoryId': 20, 'agg': 10 }, { 'categoryId': 21, 'agg': 10100 } >>""", - expectedResultForMin = """<< + expectedResultForMin = """<< { 'categoryId': 20, 'agg': 10 }, { 'categoryId': 21, 'agg': 100 } >>""", - expectedResultForMax ="""<< + expectedResultForMax = """<< { 'categoryId': 20, 'agg': 10 }, { 'categoryId': 21, 'agg': 10000 } >>""", - expectedResultForAvg = """<< + expectedResultForAvg = """<< { 'categoryId': 20, 'agg': 10 }, { 'categoryId': 21, 'agg': 5050 } >>""" - ) + - createAggregateTestCasesFromSqlStrings( - groupName = "GROUP BY (2 columns)", - sqlStrings = listOf( - "SELECT supplierId, categoryId, {{agg}}( numInStock) AS agg FROM products GROUP BY supplierId, categoryId", - "SELECT supplierId, categoryId, {{agg}}(p.numInStock) AS agg FROM products AS p GROUP BY p.supplierId, p.categoryId", - "SELECT p.supplierId, p.categoryId, {{agg}}(p.numInStock) AS agg FROM products AS p GROUP BY p.supplierId, p.categoryId"), - - expectedResultForCount = """<< + ) + + createAggregateTestCasesFromSqlStrings( + groupName = "GROUP BY (2 columns)", + sqlStrings = listOf( + "SELECT supplierId, categoryId, {{agg}}( numInStock) AS agg FROM products GROUP BY supplierId, categoryId", + "SELECT supplierId, categoryId, {{agg}}(p.numInStock) AS agg FROM products AS p GROUP BY p.supplierId, p.categoryId", + "SELECT p.supplierId, p.categoryId, {{agg}}(p.numInStock) AS agg FROM products AS p GROUP BY p.supplierId, p.categoryId" + ), + + expectedResultForCount = """<< { 'supplierId': 10, 'categoryId': 20, 'agg': 2 }, { 'supplierId': 10, 'categoryId': 21, 'agg': 1 }, { 'supplierId': 11, 'categoryId': 21, 'agg': 2 } >>""", - expectedResultForSum = """<< + expectedResultForSum = """<< { 'supplierId': 10, 'categoryId': 20, 'agg': 11 }, { 'supplierId': 10, 'categoryId': 21, 'agg': 100 }, { 'supplierId': 11, 'categoryId': 21, 'agg': 11000 } >>""", - expectedResultForMin = """<< + expectedResultForMin = """<< { 'supplierId': 10, 'categoryId': 20, 'agg': 1 }, { 'supplierId': 10, 'categoryId': 21, 'agg': 100 }, { 'supplierId': 11, 'categoryId': 21, 'agg': 1000 } >>""", - expectedResultForMax ="""<< + expectedResultForMax = """<< { 'supplierId': 10, 'categoryId': 20, 'agg': 10 }, { 'supplierId': 10, 'categoryId': 21, 'agg': 100 }, { 'supplierId': 11, 'categoryId': 21, 'agg': 10000 } >>""", - expectedResultForAvg = """<< + expectedResultForAvg = """<< { 'supplierId': 10, 'categoryId': 20, 'agg': 5.5 }, { 'supplierId': 10, 'categoryId': 21, 'agg': 100 }, { 'supplierId': 11, 'categoryId': 21, 'agg': 5500 } >>""" - ) + - createAggregateTestCasesFromSqlStrings( - groupName = "GROUP BY (2 columns) with WHERE", - sqlStrings = listOf( - "SELECT supplierId, categoryId, {{agg}}( numInStock) AS agg FROM products WHERE price < 15 GROUP BY supplierId, categoryId", - "SELECT supplierId, categoryId, {{agg}}(p.numInStock) AS agg FROM products AS p WHERE p.price < 15 GROUP BY p.supplierId, p.categoryId", - "SELECT p.supplierId, p.categoryId, {{agg}}(p.numInStock) AS agg FROM products AS p WHERE p.price < 15 GROUP BY p.supplierId, p.categoryId"), - expectedResultForCount = """<< + ) + + createAggregateTestCasesFromSqlStrings( + groupName = "GROUP BY (2 columns) with WHERE", + sqlStrings = listOf( + "SELECT supplierId, categoryId, {{agg}}( numInStock) AS agg FROM products WHERE price < 15 GROUP BY supplierId, categoryId", + "SELECT supplierId, categoryId, {{agg}}(p.numInStock) AS agg FROM products AS p WHERE p.price < 15 GROUP BY p.supplierId, p.categoryId", + "SELECT p.supplierId, p.categoryId, {{agg}}(p.numInStock) AS agg FROM products AS p WHERE p.price < 15 GROUP BY p.supplierId, p.categoryId" + ), + expectedResultForCount = """<< { 'supplierId': 10, 'categoryId': 20, 'agg': 2 }, { 'supplierId': 11, 'categoryId': 21, 'agg': 1 } >>""", - expectedResultForSum = """<< + expectedResultForSum = """<< { 'supplierId': 10, 'categoryId': 20, 'agg': 11 }, { 'supplierId': 11, 'categoryId': 21, 'agg': 1000 } >>""", - expectedResultForMin = """<< + expectedResultForMin = """<< { 'supplierId': 10, 'categoryId': 20, 'agg': 1 }, { 'supplierId': 11, 'categoryId': 21, 'agg': 1000 } >>""", - expectedResultForMax ="""<< + expectedResultForMax = """<< { 'supplierId': 10, 'categoryId': 20, 'agg': 10 }, { 'supplierId': 11, 'categoryId': 21, 'agg': 1000 } >>""", - expectedResultForAvg = """<< + expectedResultForAvg = """<< { 'supplierId': 10, 'categoryId': 20, 'agg': 5.5 }, { 'supplierId': 11, 'categoryId': 21, 'agg': 1000 } >>""" - ) + - createAggregateTestCasesFromSqlTemplates( - groupName = "null and missing aggregate arguments", - sqlTemplates = listOf( - SqlTemplate("SELECT COUNT(1) AS the_count, {{agg}}( price_nulls) AS the_agg FROM products_sparse"), - SqlTemplate("SELECT COUNT(1) AS the_count, {{agg}}( price_missings) AS the_agg FROM products_sparse AS p", CompOptions.onlyUndefinedVariableBehaviorMissing), - SqlTemplate("SELECT COUNT(1) AS the_count, {{agg}}( price_mixed) AS the_agg FROM products_sparse AS p", CompOptions.onlyUndefinedVariableBehaviorMissing), - - SqlTemplate("SELECT COUNT(1) AS the_count, {{agg}}(p.price_nulls) AS the_agg FROM products_sparse AS p"), - SqlTemplate("SELECT COUNT(1) AS the_count, {{agg}}(p.price_missings) AS the_agg FROM products_sparse AS p"), - SqlTemplate("SELECT COUNT(1) AS the_count, {{agg}}(p.price_mixed) AS the_agg FROM products_sparse AS p") - ), - expectedResultForCount = "<< { 'the_count': 10, 'the_agg': 5 } >>", - expectedResultForSum = "<< { 'the_count': 10, 'the_agg': 15 } >>", - expectedResultForMin = "<< { 'the_count': 10, 'the_agg': 1 } >>", - expectedResultForMax = "<< { 'the_count': 10, 'the_agg': 5 } >>", - expectedResultForAvg = "<< { 'the_count': 10, 'the_agg': 3 } >>" - ) + - createAggregateTestCasesFromSqlTemplates( - groupName = "null and missing aggregate arguments with GROUP BY", - sqlTemplates = listOf( - // Templates below which reference `price_missings` and `price_mixed` will only work with UndefinedVariableBehavior.MISSING - SqlTemplate("SELECT categoryId, COUNT(1) AS the_count, {{agg}}( price_nulls) AS the_agg FROM products_sparse AS p GROUP BY categoryId"), - - SqlTemplate("SELECT categoryId, COUNT(1) AS the_count, {{agg}}( price_missings) AS the_agg FROM products_sparse AS p GROUP BY categoryId", CompOptions.onlyUndefinedVariableBehaviorMissing), - SqlTemplate("SELECT categoryId, COUNT(1) AS the_count, {{agg}}( price_mixed) AS the_agg FROM products_sparse AS p GROUP BY categoryId", CompOptions.onlyUndefinedVariableBehaviorMissing), - - SqlTemplate("SELECT categoryId, COUNT(1) AS the_count, {{agg}}(p.price_nulls) AS the_agg FROM products_sparse AS p GROUP BY categoryId"), - SqlTemplate("SELECT categoryId, COUNT(1) AS the_count, {{agg}}(p.price_missings) AS the_agg FROM products_sparse AS p GROUP BY categoryId", CompOptions.onlyUndefinedVariableBehaviorMissing), - SqlTemplate("SELECT categoryId, COUNT(1) AS the_count, {{agg}}(p.price_mixed) AS the_agg FROM products_sparse AS p GROUP BY categoryId", CompOptions.onlyUndefinedVariableBehaviorMissing), - - SqlTemplate("SELECT p.categoryId, COUNT(1) AS the_count, {{agg}}( price_nulls) AS the_agg FROM products_sparse AS p GROUP BY p.categoryId"), - SqlTemplate("SELECT p.categoryId, COUNT(1) AS the_count, {{agg}}( price_missings) AS the_agg FROM products_sparse AS p GROUP BY p.categoryId", CompOptions.onlyUndefinedVariableBehaviorMissing), - SqlTemplate("SELECT p.categoryId, COUNT(1) AS the_count, {{agg}}( price_mixed) AS the_agg FROM products_sparse AS p GROUP BY p.categoryId", CompOptions.onlyUndefinedVariableBehaviorMissing), - - SqlTemplate("SELECT p.categoryId, COUNT(1) AS the_count, {{agg}}(p.price_nulls) AS the_agg FROM products_sparse AS p GROUP BY p.categoryId"), - SqlTemplate("SELECT p.categoryId, COUNT(1) AS the_count, {{agg}}(p.price_missings) AS the_agg FROM products_sparse AS p GROUP BY p.categoryId", CompOptions.onlyUndefinedVariableBehaviorMissing), - SqlTemplate("SELECT p.categoryId, COUNT(1) AS the_count, {{agg}}(p.price_mixed) AS the_agg FROM products_sparse AS p GROUP BY p.categoryId", CompOptions.onlyUndefinedVariableBehaviorMissing) - ), - expectedResultForCount = """<< + ) + + createAggregateTestCasesFromSqlTemplates( + groupName = "null and missing aggregate arguments", + sqlTemplates = listOf( + SqlTemplate("SELECT COUNT(1) AS the_count, {{agg}}( price_nulls) AS the_agg FROM products_sparse"), + SqlTemplate("SELECT COUNT(1) AS the_count, {{agg}}( price_missings) AS the_agg FROM products_sparse AS p", CompOptions.onlyUndefinedVariableBehaviorMissing), + SqlTemplate("SELECT COUNT(1) AS the_count, {{agg}}( price_mixed) AS the_agg FROM products_sparse AS p", CompOptions.onlyUndefinedVariableBehaviorMissing), + + SqlTemplate("SELECT COUNT(1) AS the_count, {{agg}}(p.price_nulls) AS the_agg FROM products_sparse AS p"), + SqlTemplate("SELECT COUNT(1) AS the_count, {{agg}}(p.price_missings) AS the_agg FROM products_sparse AS p"), + SqlTemplate("SELECT COUNT(1) AS the_count, {{agg}}(p.price_mixed) AS the_agg FROM products_sparse AS p") + ), + expectedResultForCount = "<< { 'the_count': 10, 'the_agg': 5 } >>", + expectedResultForSum = "<< { 'the_count': 10, 'the_agg': 15 } >>", + expectedResultForMin = "<< { 'the_count': 10, 'the_agg': 1 } >>", + expectedResultForMax = "<< { 'the_count': 10, 'the_agg': 5 } >>", + expectedResultForAvg = "<< { 'the_count': 10, 'the_agg': 3 } >>" + ) + + createAggregateTestCasesFromSqlTemplates( + groupName = "null and missing aggregate arguments with GROUP BY", + sqlTemplates = listOf( + // Templates below which reference `price_missings` and `price_mixed` will only work with UndefinedVariableBehavior.MISSING + SqlTemplate("SELECT categoryId, COUNT(1) AS the_count, {{agg}}( price_nulls) AS the_agg FROM products_sparse AS p GROUP BY categoryId"), + + SqlTemplate("SELECT categoryId, COUNT(1) AS the_count, {{agg}}( price_missings) AS the_agg FROM products_sparse AS p GROUP BY categoryId", CompOptions.onlyUndefinedVariableBehaviorMissing), + SqlTemplate("SELECT categoryId, COUNT(1) AS the_count, {{agg}}( price_mixed) AS the_agg FROM products_sparse AS p GROUP BY categoryId", CompOptions.onlyUndefinedVariableBehaviorMissing), + + SqlTemplate("SELECT categoryId, COUNT(1) AS the_count, {{agg}}(p.price_nulls) AS the_agg FROM products_sparse AS p GROUP BY categoryId"), + SqlTemplate("SELECT categoryId, COUNT(1) AS the_count, {{agg}}(p.price_missings) AS the_agg FROM products_sparse AS p GROUP BY categoryId", CompOptions.onlyUndefinedVariableBehaviorMissing), + SqlTemplate("SELECT categoryId, COUNT(1) AS the_count, {{agg}}(p.price_mixed) AS the_agg FROM products_sparse AS p GROUP BY categoryId", CompOptions.onlyUndefinedVariableBehaviorMissing), + + SqlTemplate("SELECT p.categoryId, COUNT(1) AS the_count, {{agg}}( price_nulls) AS the_agg FROM products_sparse AS p GROUP BY p.categoryId"), + SqlTemplate("SELECT p.categoryId, COUNT(1) AS the_count, {{agg}}( price_missings) AS the_agg FROM products_sparse AS p GROUP BY p.categoryId", CompOptions.onlyUndefinedVariableBehaviorMissing), + SqlTemplate("SELECT p.categoryId, COUNT(1) AS the_count, {{agg}}( price_mixed) AS the_agg FROM products_sparse AS p GROUP BY p.categoryId", CompOptions.onlyUndefinedVariableBehaviorMissing), + + SqlTemplate("SELECT p.categoryId, COUNT(1) AS the_count, {{agg}}(p.price_nulls) AS the_agg FROM products_sparse AS p GROUP BY p.categoryId"), + SqlTemplate("SELECT p.categoryId, COUNT(1) AS the_count, {{agg}}(p.price_missings) AS the_agg FROM products_sparse AS p GROUP BY p.categoryId", CompOptions.onlyUndefinedVariableBehaviorMissing), + SqlTemplate("SELECT p.categoryId, COUNT(1) AS the_count, {{agg}}(p.price_mixed) AS the_agg FROM products_sparse AS p GROUP BY p.categoryId", CompOptions.onlyUndefinedVariableBehaviorMissing) + ), + expectedResultForCount = """<< { 'categoryId': 20, 'the_count': 4, 'the_agg': 3 }, { 'categoryId': 21, 'the_count': 6, 'the_agg': 2 } >>""", - expectedResultForSum = """<< + expectedResultForSum = """<< { 'categoryId': 20, 'the_count': 4, 'the_agg': 6 }, { 'categoryId': 21, 'the_count': 6, 'the_agg': 9 } >>""", - expectedResultForMin = """<< + expectedResultForMin = """<< { 'categoryId': 20, 'the_count': 4, 'the_agg': 1 }, { 'categoryId': 21, 'the_count': 6, 'the_agg': 4 } >>""", - expectedResultForMax = """<< + expectedResultForMax = """<< { 'categoryId': 20, 'the_count': 4, 'the_agg': 3 }, { 'categoryId': 21, 'the_count': 6, 'the_agg': 5 } >>""", - expectedResultForAvg = """<< + expectedResultForAvg = """<< { 'categoryId': 20, 'the_count': 4, 'the_agg': 2 }, { 'categoryId': 21, 'the_count': 6, 'the_agg': 4.5 } >>""" - ) + ) @Test @Parameters @@ -698,15 +796,18 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { '_4': null, '_5': null } - >>"""), + >>""" + ), EvaluatorTestCase( "Expression with multiple subqueriees containing aggregates", "CAST((SELECT COUNT(1) FROM products) AS LIST)[0]._1 / CAST((SELECT COUNT(1) FROM suppliers) AS LIST)[0]._1", - "2"), + "2" + ), EvaluatorTestCase( "Aggregates with subquery containing another aggregate", "SELECT COUNT(1) + CAST((SELECT SUM(numInStock) FROM products) AS LIST)[0]._1 as a_number FROM products", - "<<{ 'a_number': 11116 }>>"), + "<<{ 'a_number': 11116 }>>" + ), EvaluatorTestCase( "GROUP BY with JOIN", """ @@ -718,19 +819,23 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { """<< { 'supplierName': 'Umbrella', 'the_count': 3 }, { 'supplierName': 'Initech', 'the_count': 2 } - >>"""), + >>""" + ), EvaluatorTestCase( "`COUNT(*)`, should be equivalent to `COUNT(1)", "SELECT COUNT(*) AS the_count_1, COUNT(1) AS the_count_2 FROM products", - "<< { 'the_count_1': 5, 'the_count_2': 5 } >>"), + "<< { 'the_count_1': 5, 'the_count_2': 5 } >>" + ), EvaluatorTestCase( "SELECT VALUE with nested aggregates", "SELECT VALUE (SELECT SUM(outerFromSource.col1) AS the_sum FROM <<1>>) FROM simple_1_col_1_group as outerFromSource", - "<< << { 'the_sum': 1 } >>, << { 'the_sum': 1 } >> >>"), + "<< << { 'the_sum': 1 } >>, << { 'the_sum': 1 } >> >>" + ), EvaluatorTestCase( "SELECT with GROUP BY path expression having more than 1 component.", "SELECT avg(age) as avg_employee_age, manager.address.city FROM employees GROUP BY manager.address.city", - "<<{'avg_employee_age': 22, 'city': 'Chicago'}, {'avg_employee_age': 26, 'city': 'Seattle'}>>") + "<<{'avg_employee_age': 22, 'city': 'Chicago'}, {'avg_employee_age': 26, 'city': 'Seattle'}>>" + ) ) @Test @@ -740,10 +845,12 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { fun parametersForGroupByGroupAsTest() = // GROUP BY with GROUP AS (the same as above but with "GROUP AS g") createGroupByTestCases( - listOf("SELECT * FROM simple_1_col_1_group GROUP BY col1 GROUP AS g", - "SELECT col1, g FROM simple_1_col_1_group GROUP BY col1 GROUP AS g", - "SELECT VALUE { 'col1': col1, 'g': g } FROM simple_1_col_1_group GROUP BY col1 GROUP AS g"), - """<< + listOf( + "SELECT * FROM simple_1_col_1_group GROUP BY col1 GROUP AS g", + "SELECT col1, g FROM simple_1_col_1_group GROUP BY col1 GROUP AS g", + "SELECT VALUE { 'col1': col1, 'g': g } FROM simple_1_col_1_group GROUP BY col1 GROUP AS g" + ), + """<< { 'col1': 1, 'g': << @@ -752,13 +859,16 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { >> } >> - """) + + """ + ) + - createGroupByTestCases( - listOf("SELECT * FROM simple_2_col_1_group GROUP BY col1 GROUP AS g", - "SELECT col1, g FROM simple_2_col_1_group GROUP BY col1 GROUP AS g", - "SELECT VALUE { 'col1': col1, 'g': g } FROM simple_2_col_1_group GROUP BY col1 GROUP AS g"), - """<< + createGroupByTestCases( + listOf( + "SELECT * FROM simple_2_col_1_group GROUP BY col1 GROUP AS g", + "SELECT col1, g FROM simple_2_col_1_group GROUP BY col1 GROUP AS g", + "SELECT VALUE { 'col1': col1, 'g': g } FROM simple_2_col_1_group GROUP BY col1 GROUP AS g" + ), + """<< { 'col1': 1, 'g': << @@ -767,13 +877,16 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { >> } >> - """) + - - createGroupByTestCases( - listOf("SELECT * FROM simple_2_col_1_group GROUP BY col2 GROUP AS g", - "SELECT col2, g FROM simple_2_col_1_group GROUP BY col2 GROUP AS g", - "SELECT VALUE { 'col2': col2, 'g': g } FROM simple_2_col_1_group GROUP BY col2 GROUP AS g"), - """<< + """ + ) + + + createGroupByTestCases( + listOf( + "SELECT * FROM simple_2_col_1_group GROUP BY col2 GROUP AS g", + "SELECT col2, g FROM simple_2_col_1_group GROUP BY col2 GROUP AS g", + "SELECT VALUE { 'col2': col2, 'g': g } FROM simple_2_col_1_group GROUP BY col2 GROUP AS g" + ), + """<< { 'col2': 10, 'g': << @@ -782,13 +895,16 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { >> } >> - """) + - - createGroupByTestCases( - listOf("SELECT * FROM simple_1_col_2_groups GROUP BY col1 GROUP AS g", - "SELECT col1, g FROM simple_1_col_2_groups GROUP BY col1 GROUP AS g", - "SELECT VALUE { 'col1': col1, 'g': g } FROM simple_1_col_2_groups GROUP BY col1 GROUP AS g"), - """<< + """ + ) + + + createGroupByTestCases( + listOf( + "SELECT * FROM simple_1_col_2_groups GROUP BY col1 GROUP AS g", + "SELECT col1, g FROM simple_1_col_2_groups GROUP BY col1 GROUP AS g", + "SELECT VALUE { 'col1': col1, 'g': g } FROM simple_1_col_2_groups GROUP BY col1 GROUP AS g" + ), + """<< { 'col1': 1, 'g': << @@ -804,13 +920,15 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { >> } >> - """) + - createGroupByTestCases( - listOf("SELECT * FROM simple_2_col_2_groups GROUP BY col1 GROUP AS g", - "SELECT col1, g FROM simple_2_col_2_groups GROUP BY col1 GROUP AS g", - "SELECT VALUE { 'col1': col1, 'g': g } FROM simple_2_col_2_groups GROUP BY col1 GROUP AS g" - ), - """<< + """ + ) + + createGroupByTestCases( + listOf( + "SELECT * FROM simple_2_col_2_groups GROUP BY col1 GROUP AS g", + "SELECT col1, g FROM simple_2_col_2_groups GROUP BY col1 GROUP AS g", + "SELECT VALUE { 'col1': col1, 'g': g } FROM simple_2_col_2_groups GROUP BY col1 GROUP AS g" + ), + """<< { 'col1': 1, 'g': << @@ -826,13 +944,16 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { >> } >> - """) + - - createGroupByTestCases( - listOf("SELECT * FROM simple_2_col_2_groups GROUP BY col2 GROUP AS g", - "SELECT col2, g FROM simple_2_col_2_groups GROUP BY col2 GROUP AS g", - "SELECT VALUE { 'col2': col2, 'g': g } FROM simple_2_col_2_groups GROUP BY col2 GROUP AS g"), - """<< + """ + ) + + + createGroupByTestCases( + listOf( + "SELECT * FROM simple_2_col_2_groups GROUP BY col2 GROUP AS g", + "SELECT col2, g FROM simple_2_col_2_groups GROUP BY col2 GROUP AS g", + "SELECT VALUE { 'col2': col2, 'g': g } FROM simple_2_col_2_groups GROUP BY col2 GROUP AS g" + ), + """<< { 'col2': 10, 'g': << @@ -848,14 +969,17 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { >> } >> - """) + - - // GROUP BY with GROUP AS and a JOIN - createGroupByTestCases( - listOf("SELECT * FROM simple_1_col_1_group, join_me GROUP BY col1 GROUP AS g", - "SELECT col1, g FROM simple_1_col_1_group, join_me GROUP BY col1 GROUP AS g", - "SELECT VALUE { 'col1': col1, 'g': g } FROM simple_1_col_1_group, join_me GROUP BY col1 GROUP AS g"), - """<< + """ + ) + + + // GROUP BY with GROUP AS and a JOIN + createGroupByTestCases( + listOf( + "SELECT * FROM simple_1_col_1_group, join_me GROUP BY col1 GROUP AS g", + "SELECT col1, g FROM simple_1_col_1_group, join_me GROUP BY col1 GROUP AS g", + "SELECT VALUE { 'col1': col1, 'g': g } FROM simple_1_col_1_group, join_me GROUP BY col1 GROUP AS g" + ), + """<< { 'col1': 1, 'g': @@ -867,12 +991,15 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { >> } >> - """) + - createGroupByTestCases( - listOf("SELECT * FROM simple_1_col_1_group, different_types_per_row GROUP BY col1 GROUP AS g", - "SELECT col1, g FROM simple_1_col_1_group, different_types_per_row GROUP BY col1 GROUP AS g", - "SELECT VALUE { 'col1': col1, 'g': g } FROM simple_1_col_1_group, different_types_per_row GROUP BY col1 GROUP AS g"), - """<< + """ + ) + + createGroupByTestCases( + listOf( + "SELECT * FROM simple_1_col_1_group, different_types_per_row GROUP BY col1 GROUP AS g", + "SELECT col1, g FROM simple_1_col_1_group, different_types_per_row GROUP BY col1 GROUP AS g", + "SELECT VALUE { 'col1': col1, 'g': g } FROM simple_1_col_1_group, different_types_per_row GROUP BY col1 GROUP AS g" + ), + """<< { 'col1': 1, 'g': @@ -886,8 +1013,8 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { >> } >> - """) - + """ + ) @Test @Parameters @@ -907,8 +1034,8 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { """, "<< { 'categoryId': 1, 'from_widgets_b': <<{ 'name': 'Thingy' }>> }>>" ) + - createGroupByTestCases( - """ + createGroupByTestCases( + """ SELECT a.categoryId, ( @@ -919,8 +1046,8 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { FROM widgets_a AS a GROUP BY a.categoryId """, - "<< { 'categoryId': 1, 'from_widgets_b': <<{ 'name': 'Thingy' }>> }>>" - ) + "<< { 'categoryId': 1, 'from_widgets_b': <<{ 'name': 'Thingy' }>> }>>" + ) @Test @Parameters @@ -935,15 +1062,14 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { """, "<< { 'dup': 10 }, { 'dup': 11 } >>" ) + - createGroupByTestCases( - """ + createGroupByTestCases( + """ SELECT * FROM suppliers AS s GROUP BY s.supplierId AS dup, s.supplierName as dup """, - """<< { 'dup': 10, 'dup': 'Umbrella' }, { 'dup': 11, 'dup': 'Initech' } >>""" - ) - + """<< { 'dup': 10, 'dup': 'Umbrella' }, { 'dup': 11, 'dup': 'Initech' } >>""" + ) @Test fun cannotGroupBySelectListItemAliasTest() { @@ -951,12 +1077,13 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { "SELECT foo AS someSelectListAlias FROM <<{ 'a': 1 }>> GROUP BY someSelectListAlias", ErrorCode.EVALUATOR_BINDING_DOES_NOT_EXIST, mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 64L, - Property.BINDING_NAME to "someSelectListAlias" + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 64L, + Property.BINDING_NAME to "someSelectListAlias" ), null, - expectedPermissiveModeResult = "<<{}>>") + expectedPermissiveModeResult = "<<{}>>" + ) } @Test @@ -965,9 +1092,9 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { "SELECT MAX(@v2), @v2 FROM `[1, 2.0, 3e0, 4, 5d0]` AS v2", ErrorCode.EVALUATOR_VARIABLE_NOT_INCLUDED_IN_GROUP_BY, mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 19L, - Property.BINDING_NAME to "v2" + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 19L, + Property.BINDING_NAME to "v2" ) ) } @@ -978,9 +1105,9 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { "SELECT * FROM << {'a': 1 } >> AS f GROUP BY f.a HAVING f.id = 1", ErrorCode.EVALUATOR_VARIABLE_NOT_INCLUDED_IN_GROUP_BY, mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 56L, - Property.BINDING_NAME to "f" + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 56L, + Property.BINDING_NAME to "f" ) ) } @@ -991,9 +1118,9 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { "SELECT VALUE f.id FROM << {'a': 'b' } >> AS f GROUP BY f.a", ErrorCode.EVALUATOR_VARIABLE_NOT_INCLUDED_IN_GROUP_BY, mapOf( - Property.LINE_NUMBER to 1L, - Property.COLUMN_NUMBER to 14L, - Property.BINDING_NAME to "f" + Property.LINE_NUMBER to 1L, + Property.COLUMN_NUMBER to 14L, + Property.BINDING_NAME to "f" ) ) } @@ -1008,9 +1135,9 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { session, ErrorCode.EVALUATOR_VARIABLE_NOT_INCLUDED_IN_GROUP_BY, mapOf( - Property.LINE_NUMBER to 2L, - Property.COLUMN_NUMBER to 20L, - Property.BINDING_NAME to "O" + Property.LINE_NUMBER to 2L, + Property.COLUMN_NUMBER to 20L, + Property.BINDING_NAME to "O" ) ) } @@ -1025,9 +1152,9 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { session, ErrorCode.EVALUATOR_QUOTED_BINDING_DOES_NOT_EXIST, mapOf( - Property.LINE_NUMBER to 2L, - Property.COLUMN_NUMBER to 20L, - Property.BINDING_NAME to "O" + Property.LINE_NUMBER to 2L, + Property.COLUMN_NUMBER to 20L, + Property.BINDING_NAME to "O" ), expectedPermissiveModeResult = "<<{'_2': 10}>>" ) @@ -1044,9 +1171,9 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { session, ErrorCode.EVALUATOR_VARIABLE_NOT_INCLUDED_IN_GROUP_BY, mapOf( - Property.LINE_NUMBER to 2L, - Property.COLUMN_NUMBER to 33L, - Property.BINDING_NAME to "c" + Property.LINE_NUMBER to 2L, + Property.COLUMN_NUMBER to 33L, + Property.BINDING_NAME to "c" ) ) } @@ -1063,9 +1190,9 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { session, ErrorCode.EVALUATOR_VARIABLE_NOT_INCLUDED_IN_GROUP_BY, mapOf( - Property.LINE_NUMBER to 2L, - Property.COLUMN_NUMBER to 33L, - Property.BINDING_NAME to "o" + Property.LINE_NUMBER to 2L, + Property.COLUMN_NUMBER to 33L, + Property.BINDING_NAME to "o" ) ) } @@ -1086,9 +1213,9 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { session, ErrorCode.EVALUATOR_VARIABLE_NOT_INCLUDED_IN_GROUP_BY, mapOf( - Property.LINE_NUMBER to 2L, - Property.COLUMN_NUMBER to 33L, - Property.BINDING_NAME to "o" + Property.LINE_NUMBER to 2L, + Property.COLUMN_NUMBER to 33L, + Property.BINDING_NAME to "o" ) ) } @@ -1107,10 +1234,10 @@ EvaluatingCompilerGroupByTest : EvaluatorTestBase() { session, ErrorCode.EVALUATOR_VARIABLE_NOT_INCLUDED_IN_GROUP_BY, mapOf( - Property.LINE_NUMBER to 4L, - Property.COLUMN_NUMBER to 24L, - Property.BINDING_NAME to "o" + Property.LINE_NUMBER to 4L, + Property.COLUMN_NUMBER to 24L, + Property.BINDING_NAME to "o" ) ) } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerHavingTest.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerHavingTest.kt index 61b34d98f3..04a0207134 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerHavingTest.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerHavingTest.kt @@ -47,7 +47,8 @@ class EvaluatingCompilerHavingTest : EvaluatorTestBase() { { thingId: 19, attributeId: null }, { thingId: 20, attributeId: null }, { thingId: 21, attributeId: null }, - ]""").toSession() + ]""" + ).toSession() @Test @Parameters @@ -70,8 +71,9 @@ class EvaluatingCompilerHavingTest : EvaluatorTestBase() { { 'attributeId': 40, 'the_count': 4 }, { 'attributeId': 50, 'the_count': 5 }, { 'attributeId': null, 'the_count': 6 } - >>"""), - EvaluatorTestCase( + >>""" + ), + EvaluatorTestCase( "GROUP BY with HAVING and WHERE", """ SELECT attributeId, COUNT(*) as the_count @@ -84,7 +86,8 @@ class EvaluatingCompilerHavingTest : EvaluatorTestBase() { { 'attributeId': 40, 'the_count': 2 }, { 'attributeId': 50, 'the_count': 5 }, { 'attributeId': null, 'the_count': 6 } - >>"""), + >>""" + ), EvaluatorTestCase( "GROUP BY with HAVING - no rows", """ @@ -93,7 +96,8 @@ class EvaluatingCompilerHavingTest : EvaluatorTestBase() { GROUP BY attributeId GROUP AS g HAVING 1 = 0 """, - """<<>>"""), + """<<>>""" + ), EvaluatorTestCase( "GROUP BY with HAVING", """ @@ -105,7 +109,8 @@ class EvaluatingCompilerHavingTest : EvaluatorTestBase() { """<< { 'attributeId': 40, 'the_count': 4 }, { 'attributeId': 50, 'the_count': 5 } - >>"""), + >>""" + ), EvaluatorTestCase( "GROUP BY with HAVING and WHERE", """ @@ -118,7 +123,8 @@ class EvaluatingCompilerHavingTest : EvaluatorTestBase() { """<< { 'attributeId': 40, 'the_count': 2 }, { 'attributeId': 50, 'the_count': 5 } - >>"""), + >>""" + ), EvaluatorTestCase( "GROUP BY with HAVING that calls COUNT(*)", """ @@ -132,7 +138,8 @@ class EvaluatingCompilerHavingTest : EvaluatorTestBase() { { 'attributeId': 40, 'the_count': 4 }, { 'attributeId': 50, 'the_count': 5 }, { 'attributeId': null, 'the_count': 6 } - >>"""), + >>""" + ), EvaluatorTestCase( "GROUP BY with HAVING that calls SUM(*)", """ @@ -144,7 +151,8 @@ class EvaluatingCompilerHavingTest : EvaluatorTestBase() { """<< { 'attributeId': 40, 'the_count': 160 }, { 'attributeId': 50, 'the_count': 250 } - >>"""), + >>""" + ), EvaluatorTestCase( "GROUP BY with HAVING that references GROUP AS variable", """ @@ -160,8 +168,9 @@ class EvaluatingCompilerHavingTest : EvaluatorTestBase() { { 'attributeId': 40, 'the_count': 4 }, { 'attributeId': 50, 'the_count': 5 }, { 'attributeId': null, 'the_count': 6 } - >>""")) - + >>""" + ) + ) @Test fun havingWithoutGroupBy() { @@ -169,4 +178,4 @@ class EvaluatingCompilerHavingTest : EvaluatorTestBase() { voidEval("SELECT foo.bar FROM bat HAVING 1 = 1") } } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerInTests.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerInTests.kt index b534c39c70..c57fefb3cf 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerInTests.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerInTests.kt @@ -43,12 +43,14 @@ class EvaluatingCompilerInTests : EvaluatorTestBase() { groupName = "IN--right operand not a sequence (TypingMode.LEGACY)", sqlUnderTest = "1 IN 'so long'", expectedSql = "false", - compOptions = CompOptions.STANDARD), + compOptions = CompOptions.STANDARD + ), EvaluatorTestCase( groupName = "IN--right operand not a sequence (TypingMode.PERMISSIVE)", sqlUnderTest = "1 IN 'thanks for all the fish'", expectedSql = "MISSING", - compOptions = CompOptions.PERMISSIVE) + compOptions = CompOptions.PERMISSIVE + ) ) } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerIntTest.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerIntTest.kt index e25c8a0eb9..4bdd4a56a6 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerIntTest.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerIntTest.kt @@ -20,7 +20,6 @@ import org.junit.Test import java.math.BigInteger import java.util.Random - /** * This class tests evaluation-time behavior for integer and integer overflows that existed *prior* to the * introduction of [StaticType]. The behavior described in these tests is still how the we should handle @@ -68,10 +67,10 @@ class EvaluatingCompilerIntTest : EvaluatorTestBase() { } @Test - fun bigInt() = assertThrows("$bigInt", "Int overflow or underflow at compile time", NodeMetadata(1,1)) + fun bigInt() = assertThrows("$bigInt", "Int overflow or underflow at compile time", NodeMetadata(1, 1)) @Test - fun negativeBigInt() = assertThrows("$negativeBigInt", "Int overflow or underflow at compile time", NodeMetadata(1,2)) + fun negativeBigInt() = assertThrows("$negativeBigInt", "Int overflow or underflow at compile time", NodeMetadata(1, 2)) @Test @Parameters @@ -95,7 +94,7 @@ class EvaluatingCompilerIntTest : EvaluatorTestBase() { } @Test - fun plusOverflow() = assertThrows("$closeToMaxLong + $closeToMaxLong", "Int overflow or underflow", NodeMetadata(1,21), "MISSING") + fun plusOverflow() = assertThrows("$closeToMaxLong + $closeToMaxLong", "Int overflow or underflow", NodeMetadata(1, 21), "MISSING") @Test @Parameters @@ -119,7 +118,7 @@ class EvaluatingCompilerIntTest : EvaluatorTestBase() { } @Test - fun minusUnderflow() = assertThrows("$closeToMinLong - $closeToMaxLong", "Int overflow or underflow", NodeMetadata(1,22), "MISSING") + fun minusUnderflow() = assertThrows("$closeToMinLong - $closeToMaxLong", "Int overflow or underflow", NodeMetadata(1, 22), "MISSING") @Test @Parameters @@ -132,8 +131,8 @@ class EvaluatingCompilerIntTest : EvaluatorTestBase() { val parameters = mutableListOf>() (1..40).map { i -> - var left = RANDOM.nextInt(1_000).toLong() - if(i % 2 == 0) left = -left + var left = RANDOM.nextInt(1_000).toLong() + if (i % 2 == 0) left = -left val right = RANDOM.nextInt(1_000).toLong() @@ -146,10 +145,10 @@ class EvaluatingCompilerIntTest : EvaluatorTestBase() { } @Test - fun timesOverflow() = assertThrows("$closeToMaxLong * 2", "Int overflow or underflow", NodeMetadata(1,21), "MISSING") + fun timesOverflow() = assertThrows("$closeToMaxLong * 2", "Int overflow or underflow", NodeMetadata(1, 21), "MISSING") @Test - fun timesUnderflow() = assertThrows("${Long.MIN_VALUE} * -1", "Int overflow or underflow", NodeMetadata(1,22), "MISSING") + fun timesUnderflow() = assertThrows("${Long.MIN_VALUE} * -1", "Int overflow or underflow", NodeMetadata(1, 22), "MISSING") @Test @Parameters @@ -162,8 +161,8 @@ class EvaluatingCompilerIntTest : EvaluatorTestBase() { val parameters = mutableListOf>() (1..40).map { i -> - var left = RANDOM.nextInt(1_000).toLong() - if(i % 2 == 0) left = -left + var left = RANDOM.nextInt(1_000).toLong() + if (i % 2 == 0) left = -left val right = RANDOM.nextInt(1_000).toLong() + 1 // to avoid being 0 @@ -176,7 +175,7 @@ class EvaluatingCompilerIntTest : EvaluatorTestBase() { } @Test - fun divisionUnderflow() = assertThrows("${Long.MIN_VALUE} / -1", "Int overflow or underflow", NodeMetadata(1,22), "MISSING") + fun divisionUnderflow() = assertThrows("${Long.MIN_VALUE} / -1", "Int overflow or underflow", NodeMetadata(1, 22), "MISSING") @Test fun castBigInt() = assertThrows("cast('$bigInt' as int)", "Int overflow or underflow", NodeMetadata(1, 1), "MISSING") diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerIsTests.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerIsTests.kt index dc38d88d68..936b4b88a6 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerIsTests.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerIsTests.kt @@ -84,7 +84,6 @@ private fun isIntDecimalTypeTestCase( EvaluatorTestCase(sql, expectedHonorParamsResult, CompOptions.TYPED_OP_BEHAVIOR_HONOR_PARAMS) ) - /** Tests for `IS` operator. */ class EvaluatingCompilerIsTests : EvaluatorTestBase() { @@ -150,16 +149,19 @@ class EvaluatingCompilerIsTests : EvaluatorTestBase() { isIntDecimalTypeTestCase( "1 IS $typeName", expectedLegacyResult = "TRUE", - expectedHonorParamsResult = "TRUE"), + expectedHonorParamsResult = "TRUE" + ), isIntDecimalTypeTestCase( "-1 IS $typeName", expectedLegacyResult = "TRUE", - expectedHonorParamsResult = "TRUE"), + expectedHonorParamsResult = "TRUE" + ), isIntDecimalTypeTestCase( "$minValue IS $typeName", expectedLegacyResult = "TRUE", - expectedHonorParamsResult = "TRUE"), + expectedHonorParamsResult = "TRUE" + ), isIntDecimalTypeTestCase( "${minValue - 1} IS $typeName", expectedLegacyResult = "TRUE", @@ -169,7 +171,8 @@ class EvaluatingCompilerIsTests : EvaluatorTestBase() { isIntDecimalTypeTestCase( "$maxValue IS $typeName", expectedLegacyResult = "TRUE", - expectedHonorParamsResult = "TRUE"), + expectedHonorParamsResult = "TRUE" + ), isIntDecimalTypeTestCase( "${maxValue + 1} IS $typeName", @@ -245,7 +248,8 @@ class EvaluatingCompilerIsTests : EvaluatorTestBase() { "話家身圧", "💋💋💋💋", "a💩😸💋", - "\u00A2\u0039\uD55C\uD800\uDF48") + "\u00A2\u0039\uD55C\uD800\uDF48" + ) return listOf( isUnicodeStringTestCase( @@ -280,7 +284,8 @@ class EvaluatingCompilerIsTests : EvaluatorTestBase() { expectedResult = "TRUE", expectedIsCharHonorParamsSql = "FALSE", expectedIsVarcharHonorParamsSql = "TRUE" - ), isUnicodeStringTestCase( + ), + isUnicodeStringTestCase( strings = listOf("a", "💩"), sqlTemplate = "'' IS {TYPE}(1)", expectedResult = "TRUE" @@ -438,6 +443,3 @@ class EvaluatingCompilerIsTests : EvaluatorTestBase() { ).flatten() } } - - - diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerLimitTests.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerLimitTests.kt index 28b083bc5e..c701dea3bf 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerLimitTests.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerLimitTests.kt @@ -54,14 +54,16 @@ class EvaluatingCompilerLimitTests : EvaluatorTestBase() { checkInputThrowingEvaluationException( """ select * from <<1>> limit -1 """, ErrorCode.EVALUATOR_NEGATIVE_LIMIT, - sourceLocationProperties(1, 29)) + sourceLocationProperties(1, 29) + ) @Test fun `non-integer value should throw exception`() = checkInputThrowingEvaluationException( """ select * from <<1>> limit 'this won''t work' """, ErrorCode.EVALUATOR_NON_INT_LIMIT_VALUE, - sourceLocationProperties(1, 28) + mapOf(Property.ACTUAL_TYPE to "STRING")) + sourceLocationProperties(1, 28) + mapOf(Property.ACTUAL_TYPE to "STRING") + ) @Test fun `LIMIT applied after GROUP BY`() = @@ -69,4 +71,4 @@ class EvaluatingCompilerLimitTests : EvaluatorTestBase() { "SELECT g FROM `[{foo: 1, bar: 10}, {foo: 1, bar: 11}]` AS f GROUP BY f.foo GROUP AS g LIMIT 1", """[ { 'g': [ { 'f': { 'foo': 1, 'bar': 10 } }, { 'f': { 'foo': 1, 'bar': 11 } } ] } ]""" ) -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerNAryIntOverflowTests.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerNAryIntOverflowTests.kt index a0701d2da4..0455c5dbc5 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerNAryIntOverflowTests.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerNAryIntOverflowTests.kt @@ -35,16 +35,16 @@ class EvaluatingCompilerNAryIntOverflowTests : EvaluatorTestBase() { object : Bindings { override fun get(bindingName: BindingName): StaticType? = globals.firstOrNull { bindingName.isEquivalentTo(it.name) }?.type - // this is a unit test so we don't care, but we don't handle the case - // of multiple ambiguous matches here. + // this is a unit test so we don't care, but we don't handle the case + // of multiple ambiguous matches here. } val valueBindings get() = object : Bindings { override fun get(bindingName: BindingName): ExprValue? = globals.firstOrNull { bindingName.isEquivalentTo(it.name) }?.value - // this is a unit test so we don't care, but we don't handle the case - // of multiple ambiguous matches here. + // this is a unit test so we don't care, but we don't handle the case + // of multiple ambiguous matches here. } } @@ -94,7 +94,8 @@ class EvaluatingCompilerNAryIntOverflowTests : EvaluatorTestBase() { prefix = "int2", type = IntType(IntType.IntRangeConstraint.SHORT), minValue = Short.MIN_VALUE.toLong(), - maxValue = Short.MAX_VALUE.toLong()), + maxValue = Short.MAX_VALUE.toLong() + ), createVariablesForInt( prefix = "int4", type = IntType(IntType.IntRangeConstraint.INT4), @@ -118,7 +119,8 @@ class EvaluatingCompilerNAryIntOverflowTests : EvaluatorTestBase() { prefix = "int2_4", type = StaticType.unionOf( IntType(IntType.IntRangeConstraint.SHORT), - IntType(IntType.IntRangeConstraint.INT4)), + IntType(IntType.IntRangeConstraint.INT4) + ), minValue = Int.MIN_VALUE.toLong(), maxValue = Int.MAX_VALUE.toLong() ), @@ -126,7 +128,8 @@ class EvaluatingCompilerNAryIntOverflowTests : EvaluatorTestBase() { prefix = "int2_u", type = StaticType.unionOf( IntType(IntType.IntRangeConstraint.INT4), - IntType(IntType.IntRangeConstraint.UNCONSTRAINED)), + IntType(IntType.IntRangeConstraint.UNCONSTRAINED) + ), minValue = Long.MIN_VALUE, maxValue = Long.MAX_VALUE ), @@ -147,14 +150,17 @@ class EvaluatingCompilerNAryIntOverflowTests : EvaluatorTestBase() { Variable( name = "int2_or_string_string", type = StaticType.unionOf(StaticType.INT2, StaticType.STRING), - value = valueFactory.newString("foo")), + value = valueFactory.newString("foo") + ), // This variable has the type of `any_of(int2, string) and has a value that is a integer Variable( name = "int2_or_string_int", type = StaticType.unionOf(StaticType.INT2, StaticType.STRING), - value = valueFactory.newInt(1)) + value = valueFactory.newInt(1) + ) ) - ).flatten()) + ).flatten() + ) @ParameterizedTest @ArgumentsSource(IntOverflowTestCases::class) @@ -168,13 +174,15 @@ class EvaluatingCompilerNAryIntOverflowTests : EvaluatorTestBase() { StaticTypeVisitorTransform( ion = ion, globalBindings = defaultEnv.typeBindings, - constraints = emptySet()).transformStatement(it) + constraints = emptySet() + ).transformStatement(it) }.let { astStatement -> // [StaticTypeInferenceVisitorTransform] currently requires that [StaticTypeVisitorTransform] is run first. StaticTypeInferenceVisitorTransform( globalBindings = defaultEnv.typeBindings, customFunctionSignatures = emptyList(), - customTypedOpParameters = mapOf()).transformStatement(astStatement) + customTypedOpParameters = mapOf() + ).transformStatement(astStatement) } val expression = compiler.compile(transformedAst) @@ -239,7 +247,7 @@ class EvaluatingCompilerNAryIntOverflowTests : EvaluatorTestBase() { // Unary negation TestCase("-${prefix}_max", "${prefix}_minPlus1"), // https://github.com/partiql/partiql-lang-kotlin/issues/513 - //TestCase("-${prefix}_min", "MISSING"), + // TestCase("-${prefix}_min", "MISSING"), TestCase("-${prefix}_1", "${prefix}_neg1"), TestCase("-${prefix}_neg1", "${prefix}_1") ) @@ -266,6 +274,4 @@ class EvaluatingCompilerNAryIntOverflowTests : EvaluatorTestBase() { ).flatten() } } - } - diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerNAryTests.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerNAryTests.kt index c630814155..2a39917771 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerNAryTests.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerNAryTests.kt @@ -29,7 +29,7 @@ import org.partiql.lang.domains.PartiqlAst * Currently, the parser does not ever instantiate these with an arity > 2 so this is the only way to test this. */ @RunWith(JUnitParamsRunner::class) -class EvaluatingCompilerNAryTests: EvaluatorTestBase() { +class EvaluatingCompilerNAryTests : EvaluatorTestBase() { private val session = EvaluationSession.standard() private fun Boolean?.toIonValue(): IonValue = @@ -66,50 +66,49 @@ class EvaluatingCompilerNAryTests: EvaluatorTestBase() { */ data class ArithmeticTestCase(val op: ArithmeticOp, val arg1: Long?, val arg2: Long?, val arg3: Long?, val expectedResult: Long?) - fun parametersForTernaryArithmeticTest() = listOf( - //Null propagation for ADD + // Null propagation for ADD ArithmeticTestCase(ArithmeticOp.Plus, null, 2, 3, null), ArithmeticTestCase(ArithmeticOp.Plus, 1, null, 3, null), ArithmeticTestCase(ArithmeticOp.Plus, 1, 2, null, null), - //ADD is commutative + // ADD is commutative ArithmeticTestCase(ArithmeticOp.Plus, 1, 2, 3, 6), ArithmeticTestCase(ArithmeticOp.Plus, 3, 1, 2, 6), - //Null propagation for SUB + // Null propagation for SUB ArithmeticTestCase(ArithmeticOp.Minus, null, 1, 2, null), ArithmeticTestCase(ArithmeticOp.Minus, 10, null, 2, null), ArithmeticTestCase(ArithmeticOp.Minus, 10, 1, null, null), - //SUB is noncommutative + // SUB is noncommutative ArithmeticTestCase(ArithmeticOp.Minus, 10, 1, 2, 7), ArithmeticTestCase(ArithmeticOp.Minus, 1, 2, 10, -11), - //Null propagation for MUL + // Null propagation for MUL ArithmeticTestCase(ArithmeticOp.Times, null, 2, 3, null), ArithmeticTestCase(ArithmeticOp.Times, 10, null, 3, null), ArithmeticTestCase(ArithmeticOp.Times, 10, 2, null, null), - //MUL is commutative + // MUL is commutative ArithmeticTestCase(ArithmeticOp.Times, 10, 2, 3, 60), ArithmeticTestCase(ArithmeticOp.Times, 2, 3, 10, 60), - //Null propagation for DIV + // Null propagation for DIV ArithmeticTestCase(ArithmeticOp.Divide, null, 2, 3, null), ArithmeticTestCase(ArithmeticOp.Divide, 10, null, 3, null), ArithmeticTestCase(ArithmeticOp.Divide, 10, 2, null, null), - //DIV is noncommutative + // DIV is noncommutative ArithmeticTestCase(ArithmeticOp.Divide, 60, 2, 3, 10), ArithmeticTestCase(ArithmeticOp.Divide, 2, 3, 10, 0), - //Null propagation for MOD + // Null propagation for MOD ArithmeticTestCase(ArithmeticOp.Modulo, null, 2, 3, null), ArithmeticTestCase(ArithmeticOp.Modulo, 10, null, 3, null), ArithmeticTestCase(ArithmeticOp.Modulo, 10, 2, null, null), - //MOD is noncommutative + // MOD is noncommutative ArithmeticTestCase(ArithmeticOp.Modulo, 19, 5, 3, 1), ArithmeticTestCase(ArithmeticOp.Modulo, 5, 3, 19, 2) ) @@ -142,14 +141,14 @@ class EvaluatingCompilerNAryTests: EvaluatorTestBase() { private fun assertEvalStatement( astExpr: PartiqlAst.Statement, - expectedExprValue: ExprValue) { + expectedExprValue: ExprValue + ) { val pipeline = CompilerPipeline.standard(ion) val expr = pipeline.compile(astExpr) val result = expr.eval(session) assertEquals(expectedExprValue.ionValue, result.ionValue) } - /** * A test case for comparison operators. * @@ -188,18 +187,18 @@ class EvaluatingCompilerNAryTests: EvaluatorTestBase() { possibleFuncs.forEach { func -> for (arity in 2..4) { val argumentPermutationCount = possibleArgumentValues.size.pow(arity) - for(i in 0..argumentPermutationCount) { + for (i in 0..argumentPermutationCount) { val baseN = i.toStringZeroPadded(arity, possibleArgumentValues.size) val args = baseN.map { possibleArgumentValues[it] }.toList() - //determine the expected value + // determine the expected value var current = args.first() val rest = args.drop(1) var expected: Boolean? = true - loop@for(it in rest) { + loop@for (it in rest) { when (func.block(current, it)) { - null -> { + null -> { expected = null break@loop } @@ -240,7 +239,6 @@ class EvaluatingCompilerNAryTests: EvaluatorTestBase() { data class LogicalOperatorsTestCase(val op: LogicalOp, val b1: Boolean?, val b2: Boolean?, val b3: Boolean?, val expectedResult: Boolean?) - fun parametersForLogicalOperatorsTest() = listOf( // AND tests // true, false arguments @@ -272,7 +270,7 @@ class EvaluatingCompilerNAryTests: EvaluatorTestBase() { LogicalOperatorsTestCase(LogicalOp.Or, false, false, null, null), LogicalOperatorsTestCase(LogicalOp.Or, false, null, false, null), LogicalOperatorsTestCase(LogicalOp.Or, null, false, false, null), - + // true, false, null arguments LogicalOperatorsTestCase(LogicalOp.Or, true, false, null, true), LogicalOperatorsTestCase(LogicalOp.Or, true, null, false, true), @@ -297,8 +295,8 @@ class EvaluatingCompilerNAryTests: EvaluatorTestBase() { ) } - val expectedExprValue = tc.expectedResult?.let { valueFactory.newBoolean(it)} ?: valueFactory.nullValue + val expectedExprValue = tc.expectedResult?.let { valueFactory.newBoolean(it) } ?: valueFactory.nullValue assertEvalStatement(query, expectedExprValue) } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerOffsetTests.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerOffsetTests.kt index 4b4f9f5e4b..8291e5f126 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerOffsetTests.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerOffsetTests.kt @@ -7,10 +7,10 @@ import org.partiql.lang.errors.Property import org.partiql.lang.util.ArgumentsProviderBase import org.partiql.lang.util.to -class EvaluatingCompilerOffsetTests: EvaluatorTestBase() { +class EvaluatingCompilerOffsetTests : EvaluatorTestBase() { private val session = mapOf("foo" to "[ { 'a': 1 }, { 'a': 2 }, { 'a': 3 }, { 'a': 4 }, { 'a': 5 } ]").toSession() - class ArgsProviderValid: ArgumentsProviderBase() { + class ArgsProviderValid : ArgumentsProviderBase() { override fun getParameters(): List = listOf( // OFFSET 0 should not affect results EvaluatorTestCase( @@ -158,4 +158,4 @@ class EvaluatingCompilerOffsetTests: EvaluatorTestBase() { @ParameterizedTest @ArgumentsSource(ArgsProviderError::class) fun errorTests(tc: EvaluatorErrorTestCase) = checkInputThrowingEvaluationException(tc, session) -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerSelectStarTests.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerSelectStarTests.kt index e2ac453e4a..4a0a499325 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerSelectStarTests.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerSelectStarTests.kt @@ -14,10 +14,14 @@ class EvaluatingCompilerSelectStarTests : EvaluatorTestBase() { sequenceOf( createExprValue("""{ name: "fido" }""", 100, "addr0"), createExprValue("""{ name: "bella" }""", 101, "addr1"), - createExprValue("""{ name: "max" }""", 102, "addr2")))))) + createExprValue("""{ name: "max" }""", 102, "addr2") + ) + ) + ) + ) + ) } - class AddressedExprValue( private val innerExprValue: ExprValue, override val name: ExprValue, @@ -33,7 +37,8 @@ class EvaluatingCompilerSelectStarTests : EvaluatorTestBase() { AddressedExprValue( IonExprValue(valueFactory, ion.singleValue(ionText)), valueFactory.newInt(index), - valueFactory.newString(address)) + valueFactory.newString(address) + ) @Test @Parameters @@ -49,7 +54,8 @@ class EvaluatingCompilerSelectStarTests : EvaluatorTestBase() { { 'name': 'fido', 'idx': 100 }, { 'name': 'bella', 'idx': 101 }, { 'name': 'max', 'idx': 102 } - >>"""), + >>""" + ), // SELECT * with BY projects the BY binding, EvaluatorTestCase( query = "SELECT * FROM dogs BY addr", @@ -57,7 +63,8 @@ class EvaluatingCompilerSelectStarTests : EvaluatorTestBase() { { 'name': 'fido', 'addr': 'addr0' }, { 'name': 'bella', 'addr': 'addr1' }, { 'name': 'max', 'addr': 'addr2' } - >>"""), + >>""" + ), // SELECT * with both AT and BY projects both, EvaluatorTestCase( query = "SELECT * FROM dogs AT idx BY addr", @@ -65,18 +72,18 @@ class EvaluatingCompilerSelectStarTests : EvaluatorTestBase() { { 'name': 'fido', 'addr': 'addr0', 'idx': 100 }, { 'name': 'bella', 'addr': 'addr1', 'idx': 101 }, { 'name': 'max', 'addr': 'addr2', 'idx': 102 } - >>""") + >>""" + ) ) @Test - fun `select * over table with mixed types` () { + fun `select * over table with mixed types`() { runTestCaseInLegacyAndPermissiveModes( EvaluatorTestCase( query = "select f.* from << { 'bar': 1 }, 10, << 11, 12 >> >> as f", - expectedSql = """<< { 'bar': 1 } ,{ '_1': 10 }, { '_1': <<11, 12>> } >>"""), - session = EvaluationSession.standard()) + expectedSql = """<< { 'bar': 1 } ,{ '_1': 10 }, { '_1': <<11, 12>> } >>""" + ), + session = EvaluationSession.standard() + ) } - } - - diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerUnknownValuesTest.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerUnknownValuesTest.kt index 29b70db198..f1f59e7393 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerUnknownValuesTest.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerUnknownValuesTest.kt @@ -23,7 +23,6 @@ import org.partiql.lang.types.UnknownArguments import org.partiql.lang.util.ArgumentsProviderBase import org.partiql.lang.util.crossMap - /** Test cases for PartiQL unknown values `MISSING` and `NULL`, including their propagation. */ class EvaluatingCompilerUnknownValuesTest : EvaluatorTestBase() { @@ -43,13 +42,15 @@ class EvaluatingCompilerUnknownValuesTest : EvaluatorTestBase() { returnType = StaticType.INT8, // NOTE: we do not test UnknownArguments.PASS_THRU in this test class // (this path is covered by [CoalesceEvaluationTest]). - unknownArguments = UnknownArguments.PROPAGATE) + unknownArguments = UnknownArguments.PROPAGATE + ) override fun callWithRequired(env: Environment, required: List): ExprValue = valueFactory.newInt(required.map { it.numberValue().toLong() }.sum()) } ) - }) + } + ) /** Generates a few hundred test cases for most NAry operators as they relate to propagation of unknown values. */ class NAryUnknownPropagationCases : ArgumentsProviderBase() { @@ -120,12 +121,15 @@ class EvaluatingCompilerUnknownValuesTest : EvaluatorTestBase() { // in [Typing] mode, missing values are propagated as // null. Swapping this here means we don't need to specify a legacy mode value separately. expectedSql = expectedResult.replace("missing", "null"), - compOptions = CompOptions.STANDARD), + compOptions = CompOptions.STANDARD + ), EvaluatorTestCase( groupName = "$testCaseGroup : PERMISSIVE", sqlUnderTest = sqlUnderTest, expectedSql = expectedResult, - compOptions = CompOptions.PERMISSIVE)) + compOptions = CompOptions.PERMISSIVE + ) + ) } private val nullResult = "<< { 'result': null } >>" @@ -278,7 +282,8 @@ class EvaluatingCompilerUnknownValuesTest : EvaluatorTestBase() { testCaseGroup = "string concatenation", expression = "i.x || i.y", input = input, - expectedResult = "<< { 'result': $expectedResult } >>") + expectedResult = "<< { 'result': $expectedResult } >>" + ) return listOf( testCases("""{'x': 'a', 'y': 'b'}""", "'ab'"), @@ -503,11 +508,8 @@ class EvaluatingCompilerUnknownValuesTest : EvaluatorTestBase() { return cases } - } // end NAryUnknownPropagationCases - - private val nullSample = mapOf( "nullSample" to """ [ @@ -515,7 +517,8 @@ class EvaluatingCompilerUnknownValuesTest : EvaluatorTestBase() { {val: "B", control: false, n: null}, {val: "C", control: null, n: 3}, ] - """).toSession() + """ + ).toSession() private val missingSample = mapOf( "missingSample" to """ @@ -524,9 +527,8 @@ class EvaluatingCompilerUnknownValuesTest : EvaluatorTestBase() { {val: "B", control: false, n: 2}, {val: "C" ,}, ] - """).toSession() - - + """ + ).toSession() private val missingAndNullSample = mapOf( "missingAndNullSample" to """ @@ -536,7 +538,8 @@ class EvaluatingCompilerUnknownValuesTest : EvaluatorTestBase() { {val: "C", int:3}, {val: "D", control: null, n:5}, ] - """).toSession() + """ + ).toSession() private val boolsWithUnknowns = mapOf( "boolsWithUnknowns" to """ @@ -558,110 +561,132 @@ class EvaluatingCompilerUnknownValuesTest : EvaluatorTestBase() { {y: null}, {} ] - """).toSession() + """ + ).toSession() @Test fun andShortCircuits() = assertEvalExprValue( "SELECT s.x FROM [{'x': '1.1'},{'x': '2'},{'x': '3'},{'x': '4'},{'x': '5'}] as s WHERE FALSE AND CAST(s.x as INT)", "<<>>", - boolsWithUnknowns) + boolsWithUnknowns + ) @Test fun andWithNullDoesNotShortCircuits() = assertThrows( "SELECT s.x FROM [{'x': '1.1'},{'x': '2'},{'x': '3'},{'x': '4'},{'x': '5'}] as s WHERE NULL AND CAST(s.x as INT)", "can't convert string value to INT", NodeMetadata(1, 96), - "<<>>") + "<<>>" + ) @Test fun andWithMissingDoesNotShortCircuits() = assertThrows( "SELECT s.x FROM [{'x': '1.1'},{'x': '2'},{'x': '3'},{'x': '4'},{'x': '5'}] as s WHERE MISSING AND CAST(s.x as INT)", "can't convert string value to INT", NodeMetadata(1, 99), - "<<>>") + "<<>>" + ) - ////////////////////////////////////////////////// + // //////////////////////////////////////////////// // Where-clause - ////////////////////////////////////////////////// + // //////////////////////////////////////////////// @Test fun whereClauseExprEvalsToNull() = assertEvalExprValue( "SELECT VALUE D.val from nullSample as D WHERE D.control", "<<'A'>>", - nullSample) + nullSample + ) @Test fun whereClauseExprEvalsToMissing() = assertEvalExprValue( "SELECT VALUE D.val from missingSample as D WHERE D.control", "<<'A'>>", - missingSample) + missingSample + ) @Test fun whereClauseExprEvalsToNullAndMissing() = assertEvalExprValue( "SELECT VALUE D.val from missingAndNullSample as D WHERE D.control", "<<'A'>>", - missingAndNullSample) + missingAndNullSample + ) - ////////////////////////////////////////////////// + // //////////////////////////////////////////////// // Aggregates - ////////////////////////////////////////////////// + // //////////////////////////////////////////////// @Test fun aggregateSumWithNull() = assertEval("SELECT sum(x.n) from nullSample as x", "[{_1: 4}]", nullSample) @Test - fun aggregateSumWithMissing() = assertEval("SELECT sum(x.n) from missingSample as x", + fun aggregateSumWithMissing() = assertEval( + "SELECT sum(x.n) from missingSample as x", "[{_1: 3}]", - missingSample) + missingSample + ) @Test - fun aggregateSumWithMissingAndNull() = assertEval("SELECT sum(x.n) from missingAndNullSample as x", + fun aggregateSumWithMissingAndNull() = assertEval( + "SELECT sum(x.n) from missingAndNullSample as x", "[{_1: 9}]", - missingAndNullSample) - + missingAndNullSample + ) @Test fun aggregateMinWithNull() = assertEval("SELECT min(x.n) from nullSample as x", "[{_1: 1}]", nullSample) @Test - fun aggregateMinWithMissing() = assertEval("SELECT min(x.n) from missingSample as x", + fun aggregateMinWithMissing() = assertEval( + "SELECT min(x.n) from missingSample as x", "[{_1: 1}]", - missingSample) + missingSample + ) @Test - fun aggregateMinWithMissingAndNull() = assertEval("SELECT min(x.n) from missingAndNullSample as x", + fun aggregateMinWithMissingAndNull() = assertEval( + "SELECT min(x.n) from missingAndNullSample as x", "[{_1: 2}]", - missingAndNullSample) - + missingAndNullSample + ) @Test fun aggregateAvgWithNull() = assertEval("SELECT avg(x.n) from nullSample as x", "[{_1: 2.}]", nullSample) @Test - fun aggregateAvgWithMissing() = assertEval("SELECT avg(x.n) from missingSample as x", + fun aggregateAvgWithMissing() = assertEval( + "SELECT avg(x.n) from missingSample as x", "[{_1: 1.5}]", - missingSample) + missingSample + ) @Test - fun aggregateAvgWithMissingAndNull() = assertEval("SELECT avg(x.n) from missingAndNullSample as x", + fun aggregateAvgWithMissingAndNull() = assertEval( + "SELECT avg(x.n) from missingAndNullSample as x", "[{_1: 3.}]", - missingAndNullSample) - + missingAndNullSample + ) @Test - fun aggregateCountWithNull() = assertEval("SELECT count(x.n) from nullSample as x", + fun aggregateCountWithNull() = assertEval( + "SELECT count(x.n) from nullSample as x", "[{_1: 2}]", - nullSample) + nullSample + ) @Test - fun aggregateCountWithMissing() = assertEval("SELECT count(x.n) from missingSample as x", + fun aggregateCountWithMissing() = assertEval( + "SELECT count(x.n) from missingSample as x", "[{_1: 2}]", - missingSample) + missingSample + ) @Test - fun aggregateCountWithMissingAndNull() = assertEval("SELECT count(x.n) from missingAndNullSample as x", + fun aggregateCountWithMissingAndNull() = assertEval( + "SELECT count(x.n) from missingAndNullSample as x", "[{_1: 3}]", - missingAndNullSample) + missingAndNullSample + ) @Test fun countEmpty() = assertEval("SELECT count(*) from `[]`", "[{_1: 0}]") @@ -669,7 +694,6 @@ class EvaluatingCompilerUnknownValuesTest : EvaluatorTestBase() { @Test fun countEmptyTuple() = assertEval("SELECT count(*) from `[{}]`", "[{_1: 1}]") - @Test fun sumEmpty() = assertEval("SELECT sum(x.i) from `[]` as x", "[{_1: null}]") @@ -683,20 +707,28 @@ class EvaluatingCompilerUnknownValuesTest : EvaluatorTestBase() { fun avgEmptyTuple() = assertEval("SELECT avg(x.i) from `[{}]` as x", "[{_1: null}]") @Test - fun avgSomeEmptyTuples() = assertEval("SELECT avg(x.i) from `[{i: 1}, {}, {i:3}]` as x", - "[{_1: 2.}]") + fun avgSomeEmptyTuples() = assertEval( + "SELECT avg(x.i) from `[{i: 1}, {}, {i:3}]` as x", + "[{_1: 2.}]" + ) @Test - fun avgSomeEmptyAndNullTuples() = assertEval("SELECT avg(x.i) from `[{i: 1}, {}, {i:null}, {i:3}]` as x", - "[{_1: 2.}]") + fun avgSomeEmptyAndNullTuples() = assertEval( + "SELECT avg(x.i) from `[{i: 1}, {}, {i:null}, {i:3}]` as x", + "[{_1: 2.}]" + ) @Test - fun minSomeEmptyTuples() = assertEval("SELECT min(x.i) from `[{i: null}, {}, {i:3}]` as x", - "[{_1: 3}]") + fun minSomeEmptyTuples() = assertEval( + "SELECT min(x.i) from `[{i: null}, {}, {i:3}]` as x", + "[{_1: 3}]" + ) @Test - fun maxSomeEmptyTuples() = assertEval("SELECT max(x.i) from `[{i: null}, {}, {i:3}, {i:10}]` as x", - "[{_1: 10}]") + fun maxSomeEmptyTuples() = assertEval( + "SELECT max(x.i) from `[{i: null}, {}, {i:3}, {i:10}]` as x", + "[{_1: 10}]" + ) @Test fun minEmpty() = assertEval("SELECT min(x.i) from `[]` as x", "[{_1: null}]") @@ -710,30 +742,35 @@ class EvaluatingCompilerUnknownValuesTest : EvaluatorTestBase() { fun maxEmptyTuple() = assertEval("SELECT max(x.i) from `[{}]` as x", "[{_1: null}]") @Test - fun maxSomeEmptyTuple() = assertEval("SELECT max(x.i) from `[{}, {i:1}, {}, {i:2}]` as x", - "[{_1: 2}]") + fun maxSomeEmptyTuple() = assertEval( + "SELECT max(x.i) from `[{}, {i:1}, {}, {i:2}]` as x", + "[{_1: 2}]" + ) @Test - fun minSomeEmptyTuple() = assertEval("SELECT min(x.i) from `[{}, {i:1}, {}, {i:2}]` as x", - "[{_1: 1}]") + fun minSomeEmptyTuple() = assertEval( + "SELECT min(x.i) from `[{}, {i:1}, {}, {i:2}]` as x", + "[{_1: 1}]" + ) @Test - fun sumSomeEmptyTuple() = assertEval("SELECT sum(x.i) from `[{}, {i:1}, {}, {i:2}]` as x", - "[{_1: 3}]") + fun sumSomeEmptyTuple() = assertEval( + "SELECT sum(x.i) from `[{}, {i:1}, {}, {i:2}]` as x", + "[{_1: 3}]" + ) @Test - fun countSomeEmptyTuple() = assertEval("SELECT count(x.i) from `[{}, {i:1}, {}, {i:2}]` as x", - "[{_1: 2}]") + fun countSomeEmptyTuple() = assertEval( + "SELECT count(x.i) from `[{}, {i:1}, {}, {i:2}]` as x", + "[{_1: 2}]" + ) @Test - fun countStar() = assertEval("SELECT count(*) from `[{}, {i:1}, {}, {i:2}]` as x", - "[{_1: 4}]") + fun countStar() = assertEval( + "SELECT count(*) from `[{}, {i:1}, {}, {i:2}]` as x", + "[{_1: 4}]" + ) @Test fun countLiteral() = assertEval("SELECT count(1) from `[{}, {}, {}, {}]` as x", "[{_1: 4}]") } - - - - - diff --git a/lang/test/org/partiql/lang/eval/EvaluationSessionTest.kt b/lang/test/org/partiql/lang/eval/EvaluationSessionTest.kt index 5ea65e51ca..1d0dfd7da9 100644 --- a/lang/test/org/partiql/lang/eval/EvaluationSessionTest.kt +++ b/lang/test/org/partiql/lang/eval/EvaluationSessionTest.kt @@ -52,7 +52,7 @@ class EvaluationSessionTest { @Test fun settingGlobals() { val globals = Bindings.empty() - val block: () -> EvaluationSession = { EvaluationSession.build { globals(globals) }} + val block: () -> EvaluationSession = { EvaluationSession.build { globals(globals) } } val session = block.invoke() assertEquals(globals, session.globals) @@ -62,7 +62,7 @@ class EvaluationSessionTest { @Test fun settingNow() { val now = Timestamp.forMillis(10, 0) - val session = EvaluationSession.build { now(now) } + val session = EvaluationSession.build { now(now) } assertEquals(Bindings.empty(), session.globals) assertEquals(now, session.now) @@ -73,9 +73,9 @@ class EvaluationSessionTest { val now = Timestamp.forMillis(10, 10) val utcNow = now.withLocalOffset(0) - val session = EvaluationSession.build { now(now) } + val session = EvaluationSession.build { now(now) } assertEquals(Bindings.empty(), session.globals) assertEquals(utcNow, session.now) } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/EvaluatorErrorTestCase.kt b/lang/test/org/partiql/lang/eval/EvaluatorErrorTestCase.kt index e168c1eafd..652f36a925 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatorErrorTestCase.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatorErrorTestCase.kt @@ -56,7 +56,7 @@ data class EvaluatorErrorTestCase( ) : this(null, input, errorCode, expectErrorContextValues, cause, expectedPermissiveModeResult, compOptions) /** This will show up in the IDE's test runner. */ - override fun toString() : String { + override fun toString(): String { val groupNameString = if (groupName == null) "" else "$groupName" val causeString = if (cause == null) "" else ": $cause" return "$groupNameString $sqlUnderTest : $errorCode : $expectErrorContextValues $causeString" diff --git a/lang/test/org/partiql/lang/eval/EvaluatorStaticTypeTests.kt b/lang/test/org/partiql/lang/eval/EvaluatorStaticTypeTests.kt index 85c11bc209..8ad382acc5 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatorStaticTypeTests.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatorStaticTypeTests.kt @@ -6,7 +6,6 @@ import org.partiql.lang.ION import org.partiql.lang.util.testdsl.IonResultTestCase import org.partiql.lang.util.testdsl.runTestCase - /** * This test class is effectively the same as [EvaluatorTests] however it: * @@ -162,8 +161,6 @@ class EvaluatorStaticTypeTests { "pivotLiteralFieldNameFrom", "pivotUnpivotWithWhereLimit", - - // STIR does not support `CompilePipeline.undefinedVariableBehavior` // (these are likely to be a permanent entries to this list since STR/STIR will probably // never support undefined variables). @@ -183,12 +180,14 @@ class EvaluatorStaticTypeTests { fun evaluatorStaticTypeTests() = EVALUATOR_TEST_SUITE.getAllTests( EvaluatorTests.SKIP_LIST.union(FAILING_TESTS) ).map { - it.copy(compileOptions = CompileOptions.build(it.compileOptions) { - // set permissive mode - typingMode(TypingMode.PERMISSIVE) - // enable evaluation time type checking - evaluationTimeTypeChecks(ThunkReturnTypeAssertions.ENABLED) - }) + it.copy( + compileOptions = CompileOptions.build(it.compileOptions) { + // set permissive mode + typingMode(TypingMode.PERMISSIVE) + // enable evaluation time type checking + evaluationTimeTypeChecks(ThunkReturnTypeAssertions.ENABLED) + } + ) } } @@ -199,6 +198,6 @@ class EvaluatorStaticTypeTests { valueFactory = valueFactory, db = mockDb, // Enable the static type inferencer for this - pipelineBlock = { this.globalTypeBindings(mockDb.typeBindings) }) - -} \ No newline at end of file + pipelineBlock = { this.globalTypeBindings(mockDb.typeBindings) } + ) +} diff --git a/lang/test/org/partiql/lang/eval/EvaluatorTestBase.kt b/lang/test/org/partiql/lang/eval/EvaluatorTestBase.kt index 348961c9f1..7b38329e44 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatorTestBase.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatorTestBase.kt @@ -14,16 +14,16 @@ package org.partiql.lang.eval +import com.amazon.ion.IonType +import com.amazon.ion.IonValue +import org.partiql.lang.CUSTOM_TEST_TYPES import org.partiql.lang.CompilerPipeline +import org.partiql.lang.SqlException import org.partiql.lang.TestBase import org.partiql.lang.ast.AstDeserializerBuilder import org.partiql.lang.ast.AstSerializer import org.partiql.lang.ast.AstVersion import org.partiql.lang.ast.ExprNode -import com.amazon.ion.IonType -import com.amazon.ion.IonValue -import org.partiql.lang.CUSTOM_TEST_TYPES -import org.partiql.lang.SqlException import org.partiql.lang.ast.toAstStatement import org.partiql.lang.ast.toExprNode import org.partiql.lang.checkErrorAndErrorContext @@ -64,10 +64,12 @@ abstract class EvaluatorTestBase : TestBase() { protected fun Map.toSession() = EvaluationSession.build { globals(this@toSession.toBindings()) } - fun voidEval(source: String, - compileOptions: CompileOptions = CompileOptions.standard(), - session: EvaluationSession = EvaluationSession.standard(), - compilerPipelineBuilderBlock: CompilerPipeline.Builder.() -> Unit = { }) { + fun voidEval( + source: String, + compileOptions: CompileOptions = CompileOptions.standard(), + session: EvaluationSession = EvaluationSession.standard(), + compilerPipelineBuilderBlock: CompilerPipeline.Builder.() -> Unit = { } + ) { // force materialization eval(source, compileOptions, session, compilerPipelineBuilderBlock).ionValue } @@ -85,21 +87,25 @@ abstract class EvaluatorTestBase : TestBase() { * @param compilerPipelineBuilderBlock any additional configuration to the pipeline after the options are set. * @param block function literal with receiver used to plug in custom assertions */ - protected fun assertEval(source: String, - expected: String, - session: EvaluationSession = EvaluationSession.standard(), - compileOptions: CompileOptions = CompileOptions.standard(), - compilerPipelineBuilderBlock: CompilerPipeline.Builder.() -> Unit = { }, - block: AssertExprValue.() -> Unit = { }) { + protected fun assertEval( + source: String, + expected: String, + session: EvaluationSession = EvaluationSession.standard(), + compileOptions: CompileOptions = CompileOptions.standard(), + compilerPipelineBuilderBlock: CompilerPipeline.Builder.() -> Unit = { }, + block: AssertExprValue.() -> Unit = { } + ) { val expectedIon = ion.singleValue(expected) val parser = SqlParser(ion, CUSTOM_TEST_TYPES) val originalAst = parser.parseAstStatement(source) fun evalAndAssert(ast: PartiqlAst.Statement, message: String) { - AssertExprValue(eval(ast, compileOptions, session, compilerPipelineBuilderBlock), - message = "${compileOptions.typedOpBehavior} CAST in ${compileOptions.typingMode} typing mode, " + - "evaluated '$source' with evaluator ($message)").apply { assertIonValue(expectedIon) }.run(block) + AssertExprValue( + eval(ast, compileOptions, session, compilerPipelineBuilderBlock), + message = "${compileOptions.typedOpBehavior} CAST in ${compileOptions.typingMode} typing mode, " + + "evaluated '$source' with evaluator ($message)" + ).apply { assertIonValue(expectedIon) }.run(block) } // Evaluate the ast originally obtained from the parser @@ -115,9 +121,11 @@ abstract class EvaluatorTestBase : TestBase() { * @param source query source to be tested * @param session [EvaluationSession] used for evaluation */ - protected fun assertEvalIsMissing(source: String, - session: EvaluationSession = EvaluationSession.standard(), - compileOptions: CompileOptions = CompileOptions.standard()) { + protected fun assertEvalIsMissing( + source: String, + session: EvaluationSession = EvaluationSession.standard(), + compileOptions: CompileOptions = CompileOptions.standard() + ) { val parser = SqlParser(ion) val deserializer = AstDeserializerBuilder(ion).build() @@ -165,15 +173,15 @@ abstract class EvaluatorTestBase : TestBase() { assertEquals( ast, roundTrippedAst, - "PIG ast resulting from round trip to ExprNode and back should be equivalent.") + "PIG ast resulting from round trip to ExprNode and back should be equivalent." + ) } - protected fun assertExprEquals(expected: ExprValue, actual: ExprValue, message: String) { // exprEquals consider NULL and MISSING to be equivalent so we also check types here val isActuallyEquivalent = expected.type == actual.type && expected.exprEquals(actual) - if(!isActuallyEquivalent) { + if (!isActuallyEquivalent) { println("Expected ionValue: ${ConfigurableExprValueFormatter.pretty.format(expected)} ") println("Actual ionValue : ${ConfigurableExprValueFormatter.pretty.format(actual)} ") fail("$message Expected and actual ExprValue instances are not equivalent") @@ -219,10 +227,12 @@ abstract class EvaluatorTestBase : TestBase() { * @param session [EvaluationSession] used for evaluation * @param compilerPipelineBuilderBlock any additional configuration to the pipeline after the options are set. */ - protected fun eval(source: String, - compileOptions: CompileOptions = CompileOptions.standard(), - session: EvaluationSession = EvaluationSession.standard(), - compilerPipelineBuilderBlock: CompilerPipeline.Builder.() -> Unit = { }): ExprValue { + protected fun eval( + source: String, + compileOptions: CompileOptions = CompileOptions.standard(), + session: EvaluationSession = EvaluationSession.standard(), + compilerPipelineBuilderBlock: CompilerPipeline.Builder.() -> Unit = { } + ): ExprValue { val p = SqlParser(ion, CUSTOM_TEST_TYPES) @@ -238,10 +248,12 @@ abstract class EvaluatorTestBase : TestBase() { * @param session [EvaluationSession] used for evaluation * @param compilerPipelineBuilderBlock any additional configuration to the pipeline after the options are set. */ - protected fun evalForPermissiveMode(source: String, - compileOptions: CompileOptions = CompileOptions.standard(), - session: EvaluationSession = EvaluationSession.standard(), - compilerPipelineBuilderBlock: CompilerPipeline.Builder.() -> Unit = { }): ExprValue { + protected fun evalForPermissiveMode( + source: String, + compileOptions: CompileOptions = CompileOptions.standard(), + session: EvaluationSession = EvaluationSession.standard(), + compilerPipelineBuilderBlock: CompilerPipeline.Builder.() -> Unit = { } + ): ExprValue { val p = SqlParser(ion) @@ -261,10 +273,12 @@ abstract class EvaluatorTestBase : TestBase() { * @param session [EvaluationSession] used for evaluation * @param compilerPipelineBuilderBlock any additional configuration to the pipeline after the options are set. */ - protected fun eval(astStatement: PartiqlAst.Statement, - compileOptions: CompileOptions = CompileOptions.standard(), - session: EvaluationSession = EvaluationSession.standard(), - compilerPipelineBuilderBlock: CompilerPipeline.Builder.() -> Unit = { } ): ExprValue { + protected fun eval( + astStatement: PartiqlAst.Statement, + compileOptions: CompileOptions = CompileOptions.standard(), + session: EvaluationSession = EvaluationSession.standard(), + compilerPipelineBuilderBlock: CompilerPipeline.Builder.() -> Unit = { } + ): ExprValue { // "Sneak" in this little assertion to test that every PIG ast that passes through // this function can be round-tripped to ExprNode and back. @@ -278,13 +292,15 @@ abstract class EvaluatorTestBase : TestBase() { return pipeline.build().compile(astStatement).eval(session) } - private fun assertEvalThrows(query: String, - message: String, - metadata: NodeMetadata? = null, - internal: Boolean = false, - cause: KClass? = null, - session: EvaluationSession = EvaluationSession.standard(), - typingMode: TypingMode = TypingMode.LEGACY): EvaluationException { + private fun assertEvalThrows( + query: String, + message: String, + metadata: NodeMetadata? = null, + internal: Boolean = false, + cause: KClass? = null, + session: EvaluationSession = EvaluationSession.standard(), + typingMode: TypingMode = TypingMode.LEGACY + ): EvaluationException { val compileOptions = when (typingMode) { TypingMode.LEGACY -> CompileOptions.standard() @@ -294,8 +310,7 @@ abstract class EvaluatorTestBase : TestBase() { try { voidEval(query, session = session, compileOptions = compileOptions) fail("didn't throw") - } - catch (e: EvaluationException) { + } catch (e: EvaluationException) { softAssert { if (typingMode == TypingMode.LEGACY) { assertThat(e.message).`as`("error message").isEqualTo(message) @@ -304,11 +319,10 @@ abstract class EvaluatorTestBase : TestBase() { if (cause != null) assertThat(e).hasRootCauseExactlyInstanceOf(cause.java) - if(metadata != null) { + if (metadata != null) { assertThat(e.errorContext!![Property.LINE_NUMBER]!!.longValue()).`as`("line number").isEqualTo(metadata.line) assertThat(e.errorContext!![Property.COLUMN_NUMBER]!!.longValue()).`as`("column number").isEqualTo(metadata.column) - } - else { + } else { assertThat(e.errorContext).isNull() } } @@ -320,27 +334,27 @@ abstract class EvaluatorTestBase : TestBase() { /** * Asserts that [func] throws an [SqlException] with the specified message, line and column number */ - protected fun assertThrows(message: String, - metadata: NodeMetadata? = null, - internal: Boolean = false, - cause: KClass? = null, - func: () -> Unit) { + protected fun assertThrows( + message: String, + metadata: NodeMetadata? = null, + internal: Boolean = false, + cause: KClass? = null, + func: () -> Unit + ) { try { func() fail("didn't throw") - } - catch (e: EvaluationException) { + } catch (e: EvaluationException) { softAssert { assertThat(e.message).`as`("error message").isEqualTo(message) assertThat(e.internal).isEqualTo(internal) if (cause != null) assertThat(e).hasRootCauseExactlyInstanceOf(cause.java) - if(metadata != null) { + if (metadata != null) { assertThat(e.errorContext!![Property.LINE_NUMBER]!!.longValue()).`as`("line number").isEqualTo(metadata.line) assertThat(e.errorContext!![Property.COLUMN_NUMBER]!!.longValue()).`as`("column number").isEqualTo(metadata.column) - } - else { + } else { assertThat(e.errorContext).isNull() } } @@ -353,13 +367,15 @@ abstract class EvaluatorTestBase : TestBase() { * It also verifies the behavior of error in [TypingMode.PERMISSIVE] mode. * This should be used to ensure that the query is tested for both [TypingMode.LEGACY] and [TypingMode.PERMISSIVE] */ - protected fun assertThrows(query: String, - message: String, - metadata: NodeMetadata? = null, - expectedPermissiveModeResult: String? = null, - internal: Boolean = false, - cause: KClass? = null, - session: EvaluationSession = EvaluationSession.standard()) { + protected fun assertThrows( + query: String, + message: String, + metadata: NodeMetadata? = null, + expectedPermissiveModeResult: String? = null, + internal: Boolean = false, + cause: KClass? = null, + session: EvaluationSession = EvaluationSession.standard() + ) { val exception = assertEvalThrows(query, message, metadata, internal, cause, session = session, typingMode = TypingMode.LEGACY) @@ -382,58 +398,64 @@ abstract class EvaluatorTestBase : TestBase() { /** * Asserts that [func] throws an [SqlException], line and column number in [TypingMode.PERMISSIVE] mode */ - protected fun assertThrowsInPermissiveMode(errorCode: ErrorCode, - metadata: NodeMetadata? = null, - cause: KClass? = null, - func: () -> Unit) { + protected fun assertThrowsInPermissiveMode( + errorCode: ErrorCode, + metadata: NodeMetadata? = null, + cause: KClass? = null, + func: () -> Unit + ) { try { func() fail("didn't throw") - } - catch (e: SqlException) { + } catch (e: SqlException) { softAssert { - if(metadata != null) { + if (metadata != null) { assertThat(e.errorContext!![Property.LINE_NUMBER]!!.longValue()).`as`("line number").isEqualTo(metadata.line) assertThat(e.errorContext!![Property.COLUMN_NUMBER]!!.longValue()).`as`("column number").isEqualTo(metadata.column) if (cause != null) assertThat(e).hasRootCauseExactlyInstanceOf(cause.java) } assertEquals(errorCode, e.errorCode, "Error codes should be same") - } } } - protected fun checkInputThrowingEvaluationException(input: String, - errorCode: ErrorCode? = null, - expectErrorContextValues: Map, - cause: KClass? = null, - expectedPermissiveModeResult: String? = null) { + protected fun checkInputThrowingEvaluationException( + input: String, + errorCode: ErrorCode? = null, + expectErrorContextValues: Map, + cause: KClass? = null, + expectedPermissiveModeResult: String? = null + ) { checkInputThrowingEvaluationException( input, EvaluationSession.standard(), errorCode, expectErrorContextValues, cause, - expectedPermissiveModeResult) + expectedPermissiveModeResult + ) } - protected fun checkInputThrowingEvaluationException(input: String, - session: EvaluationSession, - errorCode: ErrorCode? = null, - expectErrorContextValues: Map, - cause: KClass? = null, - expectedPermissiveModeResult: String? = null) { + protected fun checkInputThrowingEvaluationException( + input: String, + session: EvaluationSession, + errorCode: ErrorCode? = null, + expectErrorContextValues: Map, + cause: KClass? = null, + expectedPermissiveModeResult: String? = null + ) { softAssert { try { val result = eval(input, session = session).ionValue - fail("Expected SqlException but there was no Exception. " + - "The unexpected result was: \n${result.toPrettyString()}") - } - catch (e: SqlException) { + fail( + "Expected SqlException but there was no Exception. " + + "The unexpected result was: \n${result.toPrettyString()}" + ) + } catch (e: SqlException) { if (cause != null) assertThat(e).hasRootCauseExactlyInstanceOf(cause.java) checkErrorAndErrorContext(errorCode, e, expectErrorContextValues) - //Error thrown in LEGACY MODE needs to be checked in PERMISSIVE MODE + // Error thrown in LEGACY MODE needs to be checked in PERMISSIVE MODE when (e.errorCode.errorBehaviorInPermissiveMode) { ErrorBehaviorInPermissiveMode.THROW_EXCEPTION -> { assertNull("An expectedPermissiveModeResult must not be specified when ErrorCode.errorBehaviorInPermissiveMode is set to ErrorBehaviorInPermissiveMode.THROW_EXCEPTION", expectedPermissiveModeResult) @@ -449,8 +471,7 @@ abstract class EvaluatorTestBase : TestBase() { assertExprEquals(expectedExprValueForPermissiveMode, originalExprValueForPermissiveMode, "(PERMISSIVE mode)") } } - } - catch (e: Exception) { + } catch (e: Exception) { fail("Expected SqlException but a different exception was thrown:\n\t $e") } } @@ -460,13 +481,14 @@ abstract class EvaluatorTestBase : TestBase() { softAssert { try { val result = eval(tc.sqlUnderTest, compileOptions = tc.compOptions.options, session = session).ionValue - fail("Expected EvaluationException but there was no Exception. " + - "The unepxected result was: \n${result.toPrettyString()}") - } - catch (e: EvaluationException) { + fail( + "Expected EvaluationException but there was no Exception. " + + "The unepxected result was: \n${result.toPrettyString()}" + ) + } catch (e: EvaluationException) { if (tc.cause != null) assertThat(e).hasRootCauseExactlyInstanceOf(tc.cause.java) checkErrorAndErrorContext(tc.errorCode, e, tc.expectErrorContextValues) - //Error thrown in LEGACY MODE needs to be checked in PERMISSIVE MODE + // Error thrown in LEGACY MODE needs to be checked in PERMISSIVE MODE when (e.errorCode.errorBehaviorInPermissiveMode) { ErrorBehaviorInPermissiveMode.THROW_EXCEPTION -> { assertNull("An EvaluatorErrorTestCase.expectedPermissiveModeResult must not be specified when ErrorCode.errorBehaviorInPermissiveMode is set to ErrorBehaviorInPermissiveMode.THROW_EXCEPTION", tc.expectedPermissiveModeResult) @@ -484,8 +506,7 @@ abstract class EvaluatorTestBase : TestBase() { } } } - } - catch (e: Exception) { + } catch (e: Exception) { fail("Expected EvaluationException but a different exception was thrown:\n\t $e") } } @@ -538,7 +559,8 @@ abstract class EvaluatorTestBase : TestBase() { eval( source = tc.expectedSql, compilerPipelineBuilderBlock = compilerPipelineBuilderBlock, - compileOptions = co) + compileOptions = co + ) } catch (e: Throwable) { showTestCase() e.printStackTrace() @@ -551,7 +573,8 @@ abstract class EvaluatorTestBase : TestBase() { source = tc.sqlUnderTest, compilerPipelineBuilderBlock = compilerPipelineBuilderBlock, session = session, - compileOptions = co) + compileOptions = co + ) } catch (e: Throwable) { showTestCase() e.printStackTrace() @@ -569,9 +592,8 @@ abstract class EvaluatorTestBase : TestBase() { } } - internal fun IonValue.removeAnnotations() { - when(this.type) { + when (this.type) { // Remove $partiql_missing annotation from NULL for assertions IonType.NULL -> this.removeTypeAnnotation(MISSING_ANNOTATION) IonType.DATAGRAM, @@ -594,4 +616,4 @@ internal fun IonValue.removeAnnotations() { internal fun IonValue.cloneAndRemoveAnnotations() = this.clone().apply { removeAnnotations() makeReadOnly() -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/EvaluatorTestCase.kt b/lang/test/org/partiql/lang/eval/EvaluatorTestCase.kt index 8e2dcd1664..3639865642 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatorTestCase.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatorTestCase.kt @@ -34,7 +34,8 @@ data class EvaluatorTestCase( /** * The [CompOptions] containing the [CompileOptions] to use when executing [sqlUnderTest] and [expectedSql]. */ - val compOptions: CompOptions = CompOptions.STANDARD) { + val compOptions: CompOptions = CompOptions.STANDARD +) { constructor( query: String, @@ -45,6 +46,6 @@ data class EvaluatorTestCase( /** This will show up in the IDE's test runner. */ override fun toString() = when { groupName != null -> "$groupName : $sqlUnderTest : $compOptions" - else -> "$sqlUnderTest : $compOptions" + else -> "$sqlUnderTest : $compOptions" } } diff --git a/lang/test/org/partiql/lang/eval/EvaluatorTestSuite.kt b/lang/test/org/partiql/lang/eval/EvaluatorTestSuite.kt index 0d2ce3dbcb..984f4a7e18 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatorTestSuite.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatorTestSuite.kt @@ -218,48 +218,55 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { "undefinedUnqualifiedVariableWithUndefinedVariableBehaviorMissing", "undefined_variable", "$partiql_missing::null", - compileOptions = undefinedVariableMisisng) - + compileOptions = undefinedVariableMisisng + ) test( "undefinedUnqualifiedVariableIsNullExprWithUndefinedVariableBehaviorMissing", "undefined_variable IS NULL", "true", - compileOptions = undefinedVariableMisisng) + compileOptions = undefinedVariableMisisng + ) test( "undefinedUnqualifiedVariableIsMissingExprWithUndefinedVariableBehaviorMissing", "undefined_variable IS MISSING", "true", - compileOptions = undefinedVariableMisisng) + compileOptions = undefinedVariableMisisng + ) test( "undefinedUnqualifiedVariableInSelectWithUndefinedVariableBehaviorMissing", "SELECT s.a, s.undefined_variable, s.b FROM `[{a:100, b:200}]` s", "$partiql_bag::[{a:100, b:200}]", - compileOptions = undefinedVariableMisisng) + compileOptions = undefinedVariableMisisng + ) } group("path in from clause") { test( "selectFromScalarAndAtUnpivotWildCardOverScalar", "SELECT VALUE [n, v] FROM (100).* AS v AT n", - """$partiql_bag::[ ["_1", 100] ]""") + """$partiql_bag::[ ["_1", 100] ]""" + ) test( "selectFromListAndAtUnpivotWildCardOverScalar", "SELECT VALUE [n, (SELECT VALUE [i, x] FROM @v AS x AT i)] FROM [100, 200].*.*.* AS v AT n", - """$partiql_bag::[ ["_1", $partiql_bag::[[0, 100], [1, 200]]] ]""") + """$partiql_bag::[ ["_1", $partiql_bag::[[0, 100], [1, 200]]] ]""" + ) test( "selectFromBagAndAtUnpivotWildCardOverScalar", """ SELECT VALUE [n, (SELECT VALUE [i IS MISSING, i, x] FROM @v AS x AT i)] FROM <<100, 200>>.* AS v AT n """, - """$partiql_bag::[["_1",$partiql_bag::[[true,$partiql_missing::null,100],[true,$partiql_missing::null,200]]]]""") + """$partiql_bag::[["_1",$partiql_bag::[[true,$partiql_missing::null,100],[true,$partiql_missing::null,200]]]]""" + ) test( "selectPathUnpivotWildCardOverStructMultiple", "SELECT name, val FROM a.*.*.*.* AS val AT name", - """$partiql_bag::[{name: "e", val: 5}, {name: "f", val: 6}]""") + """$partiql_bag::[{name: "e", val: 5}, {name: "f", val: 6}]""" + ) test( "selectStarSingleSourceHoisted", @@ -276,8 +283,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { test( "ordinalAccessWithNegativeIndexAndBindings", "SELECT temp[-2] FROM [[1,2,3,4]] AS temp", - "$partiql_bag::[{}]") - + "$partiql_bag::[{}]" + ) } group("various types in from clause") { @@ -293,18 +300,21 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { test( "rangeOverBagWithAt", "SELECT VALUE [i, v] FROM <<1, 2, 3>> AS v AT i", - "$partiql_bag::[[$partiql_missing::null, 1], [$partiql_missing::null, 2], [$partiql_missing::null, 3]]") + "$partiql_bag::[[$partiql_missing::null, 1], [$partiql_missing::null, 2], [$partiql_missing::null, 3]]" + ) test( "rangeOverNestedWithAt", "SELECT VALUE [i, v] FROM (SELECT VALUE v FROM `[1, 2, 3]` AS v) AS v AT i", - "$partiql_bag::[[$partiql_missing::null, 1], [$partiql_missing::null, 2], [$partiql_missing::null, 3]]") + "$partiql_bag::[[$partiql_missing::null, 1], [$partiql_missing::null, 2], [$partiql_missing::null, 3]]" + ) } group("select list item") { test( "explicitAliasSelectSingleSource", "SELECT id AS name FROM stores", - """$partiql_bag::[{name:"5"}, {name:"6"}, {name:"7"}]""") + """$partiql_bag::[{name:"5"}, {name:"6"}, {name:"7"}]""" + ) test( "selectImplicitAndExplicitAliasSingleSourceHoisted", @@ -315,19 +325,22 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { {name:"E", price: 9.5}, {name:"F", price: 10.0}, ] - """) + """ + ) test( "syntheticColumnNameInSelect", """SELECT i+1 FROM <<100>> i""", - """$partiql_bag::[{_1: 101}]""") + """$partiql_bag::[{_1: 101}]""" + ) test( "properAliasFromPathInSelect", """ SELECT s.id, s.books[1].title FROM stores AS s WHERE s.id = '5' """, - """$partiql_bag::[ { id: "5", title: "B" } ] """) + """$partiql_bag::[ { id: "5", title: "B" } ] """ + ) test( "selectListWithMissing", @@ -354,7 +367,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { {name: "Lilikoi", type: "unicorn", id: "cat", is_magic: false}, {name: "Lilikoi", type: "unicorn", id: "unicorn", is_magic: true}, ] - """) + """ + ) } group("select-where") { test( @@ -364,14 +378,16 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { $partiql_bag::[ {name: "Kumo", type: "dog"} ] - """) + """ + ) test( "selectWhereStrinEqualsDifferentCase", """SELECT * FROM animals as a WHERE a.name = 'KUMO' """, """ $partiql_bag::[] - """) + """ + ) } group("select-join") { test( @@ -383,7 +399,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { {name: "Mochi", type: "dog", id: "dog", is_magic: false}, {name: "Lilikoi", type: "unicorn", id: "unicorn", is_magic: true}, ] - """) + """ + ) test( "selectCorrelatedJoin", @@ -395,7 +412,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { {id: "6", title: "E"}, {id: "6", title: "F"}, ] - """) + """ + ) test( "selectCorrelatedLeftJoin", """SELECT s.id AS id, b.title AS title FROM stores AS s LEFT CROSS JOIN @s.books AS b WHERE b IS NULL""", @@ -403,7 +421,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { $partiql_bag::[ {id: "7"} ] - """) + """ + ) test( "selectCorrelatedLeftJoinOnClause", @@ -419,7 +438,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { {id: "6", title: "F"}, {id: "7"} ] - """) + """ + ) test( "selectJoinOnClauseScoping", @@ -438,7 +458,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { [3, 1, null], [3, 2, null], ] - """) + """ + ) test( "selectNonCorrelatedJoin", @@ -450,7 +471,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { {id: "6", title: "hello"}, {id: "7", title: "hello"}, ] - """) + """ + ) test( "selectCorrelatedUnpivot", @@ -466,7 +488,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { {n1: "b", n2: "c", n3: "d", n4: "e", val: 5}, {n1: "b", n2: "c", n3: "d", n4: "f", val: 6} ] - """) + """ + ) test( "nestedSelectJoinWithUnpivot", @@ -490,7 +513,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { {col: "type", val: "unicorn"}, {col: "is_magic", val: true}, ] - """) + """ + ) test( "nestedSelectJoinLimit", @@ -507,11 +531,13 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { {col: "type", val: "dog"}, {col: "is_magic", val: false}, ] - """) + """ + ) test( "correlatedJoinWithShadowedAttributes", """SELECT VALUE v FROM `[{v:5}]` AS item, @item.v AS v""", - """$partiql_bag::[5]""") + """$partiql_bag::[5]""" + ) test( "correlatedJoinWithoutLexicalScope", @@ -523,7 +549,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { "joinWithShadowedGlobal", // 'a' is a global variable """SELECT VALUE b FROM `[{b:5}]` AS a, a.b AS b""", - """$partiql_bag::[{c:{d:{e:5, f:6}}}]""") + """$partiql_bag::[{c:{d:{e:5, f:6}}}]""" + ) } group("pivot") { test( @@ -537,7 +564,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { Mochi: "dog", Lilikoi: "unicorn", } - """) + """ + ) test( "pivotLiteralFieldNameFrom", @@ -550,7 +578,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { name: "Mochi", name: "Lilikoi", } - """) + """ + ) test( "pivotBadFieldType", @@ -559,7 +588,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { """, """ {} - """) + """ + ) test( "pivotUnpivotWithWhereLimit", @@ -575,7 +605,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { new_d: 4, new_e: 5, } - """) + """ + ) } group("in") { @@ -584,56 +615,64 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { """ SELECT VALUE b.title FROM stores[*].books[*] AS b WHERE b.price IN (5, `2e0`) """, - """$partiql_bag::["A", "B", "A"]""") + """$partiql_bag::["A", "B", "A"]""" + ) test( "inPredicateSingleItem", """ SELECT VALUE b.title FROM stores[*].books[*] AS b WHERE b.price IN (5) """, - """$partiql_bag::[ "A", "A" ]""") + """$partiql_bag::[ "A", "A" ]""" + ) test( "inPredicateSingleExpr", """ SELECT VALUE b.title FROM stores[*].books[*] AS b WHERE b.price IN 5 """, - "$partiql_bag::[]") + "$partiql_bag::[]" + ) test( "inPredicateSingleItemListVar", """ SELECT VALUE b.title FROM stores[*].books[*] AS b WHERE b.price IN (prices) """, - """$partiql_bag::[]""") + """$partiql_bag::[]""" + ) test( "inPredicateSingleListVar", """ SELECT VALUE b.title FROM stores[*].books[*] AS b WHERE b.price IN prices """, - """$partiql_bag::[ "A", "B", "A" ]""") + """$partiql_bag::[ "A", "B", "A" ]""" + ) test( "inPredicateSubQuerySelectValue", """ SELECT VALUE b.title FROM stores[*].books[*] AS b WHERE b.price IN (SELECT VALUE p FROM prices AS p) """, - """$partiql_bag::[ "A", "B", "A" ]""") + """$partiql_bag::[ "A", "B", "A" ]""" + ) test( "notInPredicate", """ SELECT VALUE b.title FROM stores[*].books[*] AS b WHERE b.price NOT IN (5, `2e0`) """, - """$partiql_bag::["C", "D", "E", "F" ] """) + """$partiql_bag::["C", "D", "E", "F" ] """ + ) test( "notInPredicateSingleItem", """ SELECT VALUE b.title FROM stores[*].books[*] AS b WHERE b.price NOT IN (5) """, - """$partiql_bag::[ "B", "C", "D", "E", "F" ]""") + """$partiql_bag::[ "B", "C", "D", "E", "F" ]""" + ) test( "notInPredicateSingleExpr", @@ -656,17 +695,20 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { test( "notInPredicateSingleItemListVar", """ SELECT VALUE b.title FROM stores[*].books[*] AS b WHERE b.price NOT IN (prices) """, - """ $partiql_bag::[ "A", "B", "C", "D", "A", "E", "F" ] """) + """ $partiql_bag::[ "A", "B", "C", "D", "A", "E", "F" ] """ + ) test( "notInPredicateSingleListVar", """ SELECT VALUE b.title FROM stores[*].books[*] AS b WHERE b.price NOT IN prices """, - """ $partiql_bag::[ "C", "D", "E", "F" ] """) + """ $partiql_bag::[ "C", "D", "E", "F" ] """ + ) test( "notInPredicateSubQuerySelectValue", "SELECT VALUE b.title FROM stores[*].books[*] AS b WHERE b.price NOT IN (SELECT VALUE p FROM prices AS p)", - """ $partiql_bag::[ "C", "D", "E", "F" ] """) + """ $partiql_bag::[ "C", "D", "E", "F" ] """ + ) test( "inPredicateWithTableConstructor", @@ -674,7 +716,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { SELECT VALUE b.title FROM stores[*].books[*] AS b WHERE (b.title, b.price) IN (VALUES ('A', `5e0`), ('B', 3.0), ('X', 9.0)) """, - """ $partiql_bag::[ "A", "A" ] """) + """ $partiql_bag::[ "A", "A" ] """ + ) test( "notInPredicateWithTableConstructor", @@ -682,7 +725,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { SELECT VALUE b.title FROM stores[*].books[*] AS b WHERE (b.title, b.price) NOT IN (VALUES ('A', `5e0`), ('B', 3.0), ('X', 9.0)) """, - """ $partiql_bag::[ "B", "C", "D", "E", "F" ] """) + """ $partiql_bag::[ "B", "C", "D", "E", "F" ] """ + ) test( "inPredicateWithExpressionOnRightSide", @@ -690,7 +734,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { SELECT VALUE b.title FROM stores[*].books[*] AS b WHERE 'comedy' IN b.categories """, - """ $partiql_bag::[ "B", "E" ] """) + """ $partiql_bag::[ "B", "E" ] """ + ) test( "notInPredicateWithExpressionOnRightSide", @@ -698,7 +743,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { SELECT VALUE b.title FROM stores[*].books[*] AS b WHERE 'comedy' NOT IN b.categories """, - """ $partiql_bag::[ "A", "C", "D", "A", "F" ] """) + """ $partiql_bag::[ "A", "C", "D", "A", "F" ] """ + ) } group("case") { test( @@ -717,7 +763,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { """, """ $partiql_bag::[ "TWO", "THREE", "?", "?", "?" ] - """) + """ + ) test( "simpleCaseNoElse", @@ -732,7 +779,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { END FROM << i, f, d, null, missing >> AS x """, - """ $partiql_bag::[ "TWO", "THREE", null, null, null ] """) + """ $partiql_bag::[ "TWO", "THREE", null, null, null ] """ + ) test( "searchedCase", @@ -746,7 +794,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { END FROM << -1.0000, i, f, d, 100e0, null, missing >> AS x """, - """ $partiql_bag::[ "< ONE", "TWO", "?", ">= THREE < 100", "?", "?", "?" ] """) + """ $partiql_bag::[ "< ONE", "TWO", "?", ">= THREE < 100", "?", "?", "?" ] """ + ) test( "searchedCaseNoElse", @@ -759,7 +808,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { END FROM << -1.0000, i, f, d, 100e0, null, missing >> AS x """, - """ $partiql_bag::[ "< ONE", "TWO", null, ">= THREE < 100", null, null, null ] """) + """ $partiql_bag::[ "< ONE", "TWO", null, ">= THREE < 100", null, null, null ] """ + ) } group("between") { test( @@ -769,7 +819,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { FROM << -1.0000, i, f, d, 100e0 >> AS x WHERE x BETWEEN 1.000001 AND 3.0000000 """, - """ $partiql_bag::[ 2e0, 3d0 ] """) + """ $partiql_bag::[ 2e0, 3d0 ] """ + ) test( "notBetweenPredicate", @@ -778,7 +829,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { FROM << -1.0000, i, f, d, 100e0 >> AS x WHERE x NOT BETWEEN 1.000001 AND 3.0000000 """, - """$partiql_bag::[ -1.0000, 1, 100d0 ] """) + """$partiql_bag::[ -1.0000, 1, 100d0 ] """ + ) test( "betweenStringsPredicate", @@ -787,7 +839,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { FROM << 'APPLE', 'AZURE', 'B', 'XZ', 'ZOE', 'YOYO' >> AS x WHERE x BETWEEN 'B' AND 'Y' """, - """ $partiql_bag::[ "B", "XZ" ] """) + """ $partiql_bag::[ "B", "XZ" ] """ + ) test( "notBetweenStringsPredicate", @@ -796,104 +849,124 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { FROM << 'APPLE', 'AZURE', 'B', 'XZ', 'Z', 'ZOE', 'YOYO' >> AS x WHERE x NOT BETWEEN 'B' AND 'Y' """, - """ $partiql_bag::[ "APPLE", "AZURE", "Z", "ZOE", "YOYO" ] """) + """ $partiql_bag::[ "APPLE", "AZURE", "Z", "ZOE", "YOYO" ] """ + ) } group("aggregates") { test( "topLevelCountDistinct", """COUNT(DISTINCT [1,1,1,1,2])""", - """2""") + """2""" + ) test( "topLevelCount", """COUNT(numbers)""", - """5""") + """5""" + ) test( "topLevelAllCount", """COUNT(ALL numbers)""", - """5""") + """5""" + ) test( "topLevelSum", """SUM(numbers)""", - """15.0""") + """15.0""" + ) test( "topLevelAllSum", """SUM(ALL numbers)""", - """15.0""") + """15.0""" + ) test( "topLevelDistinctSum", """SUM(DISTINCT [1,1,1,1,1,1,1,2])""", - """3""") + """3""" + ) test( "topLevelMin", """MIN(numbers)""", - """1""") + """1""" + ) test( "topLevelDistinctMin", """MIN(DISTINCT numbers)""", - """1""") + """1""" + ) test( "topLevelAllMin", """MIN(ALL numbers)""", - """1""") + """1""" + ) test( "topLevelMax", """MAX(numbers)""", - """5d0""") + """5d0""" + ) test( "topLevelDistinctMax", """MAX(DISTINCT numbers)""", - """5d0""") + """5d0""" + ) test( "topLevelAllMax", """MAX(ALL numbers)""", - """5d0""") + """5d0""" + ) test( "topLevelAvg", """AVG(numbers)""", - """3.0""") + """3.0""" + ) test( "topLevelDistinctAvg", """AVG(DISTINCT [1,1,1,1,1,3])""", - """2.""") + """2.""" + ) // AVG of integers should be of type DECIMAL. test( "topLevelAvgOnlyInt", """AVG([2,2,2,4])""", - """2.5""") + """2.5""" + ) test( "selectValueAggregate", // SELECT VALUE does not do legacy aggregation """SELECT VALUE COUNT(v) + SUM(v) FROM <> AS v""", - """$partiql_bag::[20.0, 20.0]""") + """$partiql_bag::[20.0, 20.0]""" + ) test( "selectListCountStar", """SELECT COUNT(*) AS c FROM <> AS v""", - """$partiql_bag::[{c:2}]""") + """$partiql_bag::[{c:2}]""" + ) test( "selectListCountVariable", """SELECT COUNT(v) AS c FROM <> AS v""", - """$partiql_bag::[{c:2}]""") + """$partiql_bag::[{c:2}]""" + ) test( "selectListMultipleAggregates", """SELECT COUNT(*) AS c, AVG(v * 2) + SUM(v + v) AS result FROM numbers AS v""", - "$partiql_bag::[{c:5, result:36.0}]") + "$partiql_bag::[{c:5, result:36.0}]" + ) test( "selectListMultipleAggregatesNestedQuery", @@ -908,7 +981,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { $partiql_bag::[{result:31.}], $partiql_bag::[{result:33.}], $partiql_bag::[{result:35.}], - ]""") + ]""" + ) test( "aggregateInSubqueryOfSelect", @@ -919,7 +993,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { FROM [1, 2, 3]) AS foo """, - "$partiql_bag::[{ 'cnt': 3 }]") + "$partiql_bag::[{ 'cnt': 3 }]" + ) test( "aggregateInSubqueryOfSelectValue", @@ -930,7 +1005,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { FROM [1, 2, 3]) AS foo """, - "$partiql_bag::[3]") + "$partiql_bag::[3]" + ) test( "aggregateWithAliasingInSubqueryOfSelectValue", @@ -941,7 +1017,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { FROM << { 'bar': 1 }, { 'bar': 2 } >> AS baz) AS foo """, - "$partiql_bag::[2]") + "$partiql_bag::[2]" + ) } group("projection iteration behavior unfiltered") { val projectionIterationUnfiltered = CompileOptions.build { projectionIteration(ProjectionIterationBehavior.UNFILTERED) } @@ -960,17 +1037,17 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { "projectionIterationBehaviorUnfiltered_select_list", "select x.someColumn from <<{'someColumn': MISSING}>> AS x", "$partiql_bag::[{someColumn: $partiql_missing::null}]", - compileOptions = projectionIterationUnfiltered) + compileOptions = projectionIterationUnfiltered + ) test( "projectionIterationBehaviorUnfiltered_select_star", "select * from <<{'someColumn': MISSING}>>", "$partiql_bag::[{someColumn: $partiql_missing::null}]", - compileOptions = projectionIterationUnfiltered) + compileOptions = projectionIterationUnfiltered + ) } - - group("ordered names") { test( "wildcardOrderedNames", @@ -979,7 +1056,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { ) { exprValue, _ -> Assert.assertNull( "Ordering of the fields should not be known when '*' is used", - exprValue.first().orderedNames) + exprValue.first().orderedNames + ) } test( @@ -989,7 +1067,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { ) { exprValue, _ -> Assert.assertNull( "Ordering of the fields should not be known when 'alias.*' is used", - exprValue.first().orderedNames) + exprValue.first().orderedNames + ) } test( @@ -999,7 +1078,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { ) { exprValue, _ -> Assert.assertNull( "Ordering of the fields should not be known when an 'alias.*' is used", - exprValue.first().orderedNames) + exprValue.first().orderedNames + ) } test( @@ -1010,7 +1090,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { Assert.assertEquals( "Ordering of the fields should be known when no wildcards are used", listOf("a", "b"), - exprValue.first().orderedNames) + exprValue.first().orderedNames + ) } } @@ -1018,22 +1099,26 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { test( "selectDistinct", """SELECT DISTINCT t.a FROM `[{a: 1}, {a: 2}, {a: 1}]` t""", - """$partiql_bag::[{a: 1}, {a: 2}] """) + """$partiql_bag::[{a: 1}, {a: 2}] """ + ) test( "selectDistinctWithAggregate", """SELECT SUM(DISTINCT t.a) AS a FROM `[{a:10}, {a:1}, {a:10}, {a:3}]` t""", - "$partiql_bag::[{a:14}]") + "$partiql_bag::[{a:14}]" + ) test( "selectDistinctSubQuery", """SELECT * FROM (SELECT DISTINCT t.a FROM `[{a: 1}, {a: 2}, {a: 1}]` t)""", - """$partiql_bag::[{a:1},{a:2}]""") + """$partiql_bag::[{a:1},{a:2}]""" + ) test( "selectDistinctWithSubQuery", """SELECT DISTINCT * FROM (SELECT t.a FROM `[{a: 1}, {a: 2}, {a: 1}]` t)""", - """$partiql_bag::[{a:1},{a:2}]""") + """$partiql_bag::[{a:1},{a:2}]""" + ) test( "selectDistinctAggregationWithGroupBy", @@ -1042,7 +1127,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { FROM `[{a:1, b:10}, {a:1, b:10}, {a:1, b:20}, {a:2, b:10}, {a:2, b:10}]` t GROUP by t.a """, - """$partiql_bag::[{a:1, c:2}, {a:2, c:1}]""") + """$partiql_bag::[{a:1, c:2}, {a:2, c:1}]""" + ) test( "selectDistinctWithGroupBy", @@ -1051,7 +1137,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { FROM `[{a:1, b:10}, {a:1, b:10}, {a:1, b:20}, {a:2, b:10}, {a:2, b:10}]` t GROUP by t.a """, - """$partiql_bag::[{a:1, c:3}, {a:2, c:2}]""") + """$partiql_bag::[{a:1, c:3}, {a:2, c:2}]""" + ) test( "selectDistinctWithJoin", @@ -1061,7 +1148,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { `[1, 1, 1, 1, 2]` t1, `[2, 2, 2, 2, 1]` t2 """, - """$partiql_bag::[{_1:1,_2:2}, {_1:1, _2:1}, {_1:2,_2:2}, {_1:2,_2:1}]""") + """$partiql_bag::[{_1:1,_2:2}, {_1:1, _2:1}, {_1:2,_2:2}, {_1:2,_2:1}]""" + ) test( "selectDistinctStarMixed", @@ -1074,42 +1162,50 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { MISSING, NULL, NULL, MISSING, {'a':1}, {'a':1}, {'a':2}] """, - """ $partiql_bag::[{_1:1},{_1:2},{},{_1:null},{a:1},{a:2}] """) + """ $partiql_bag::[{_1:1},{_1:2},{},{_1:null},{a:1},{a:2}] """ + ) test( "selectDistinctStarScalars", """ SELECT DISTINCT * FROM [1, 1, 2] """, - """ $partiql_bag::[{_1:1},{_1:2}] """) + """ $partiql_bag::[{_1:1},{_1:2}] """ + ) test( "selectDistinctStarStructs", """ SELECT DISTINCT * FROM [ {'a':1}, {'a':1}, {'a':2} ] """, - """ $partiql_bag::[{a:1},{a:2}] """) + """ $partiql_bag::[{a:1},{a:2}] """ + ) test( "selectDistinctStarUnknowns", "SELECT DISTINCT * FROM [MISSING, NULL, NULL, MISSING]", - """ $partiql_bag::[{}, {_1: null}] """) + """ $partiql_bag::[{}, {_1: null}] """ + ) test( "selectDistinctStarBags", "SELECT DISTINCT * FROM [ <<>>, <<>>, <<1>>, <<1>>, <<1, 2>>, <<2, 1>>, <<3, 4>>]", - "$partiql_bag::[{_1:[]}, {_1: [1]}]") + "$partiql_bag::[{_1:[]}, {_1: [1]}]" + ) test( "selectDistinctStarLists", "SELECT DISTINCT * FROM [[1], [1], [1, 2]]", - "$partiql_bag[{_1:[1]}, {_1: [1, 2]}]") + "$partiql_bag[{_1:[1]}, {_1: [1, 2]}]" + ) test( "selectDistinctStarIntegers", "SELECT DISTINCT * FROM [ 1, 1, 2 ]", - "$partiql_bag::[{_1:1},{_1:2}]") + "$partiql_bag::[{_1:1},{_1:2}]" + ) test( "selectDistinctValue", "SELECT DISTINCT VALUE t FROM [1,2,3,1,1,1,1,1] t", - "$partiql_bag::[1,2,3]") + "$partiql_bag::[1,2,3]" + ) test( "selectDistinctExpressionAndWhere", @@ -1120,7 +1216,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { """, """ $partiql_bag::[{c: 2}, {c: 4}] - """) + """ + ) test( "selectDistinctExpression", @@ -1130,7 +1227,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { """, """ $partiql_bag::[{c:"11"},{c:"22"}] - """) + """ + ) } group("project various container types") { @@ -1146,14 +1244,16 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { test( "projectOfUnpivotPath", "SELECT * FROM <<{'name': 'Marrowstone Brewing'}, {'name': 'Tesla'}>>.*", - """$partiql_bag::[{_1: $partiql_bag::[{name: "Marrowstone Brewing"}, {name: "Tesla"}]}]""") + """$partiql_bag::[{_1: $partiql_bag::[{name: "Marrowstone Brewing"}, {name: "Tesla"}]}]""" + ) } group("misc") { test( "parameters", """SELECT ? as b1, f.bar FROM parameterTestTable f WHERE f.bar = ?""", - """$partiql_bag::[{b1:"spam",bar:"baz"}]""") + """$partiql_bag::[{b1:"spam",bar:"baz"}]""" + ) } group("floatN") { @@ -1167,43 +1267,51 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { test( "unpivotMissing", "SELECT * FROM UNPIVOT MISSING", - "$partiql_bag::[]") + "$partiql_bag::[]" + ) test( "unpivotEmptyStruct", "SELECT * FROM UNPIVOT {}", - "$partiql_bag::[]") + "$partiql_bag::[]" + ) EvaluatorTestCase( "unpivotStructWithMissingField", "test * FROM UNPIVOT { 'a': MISSING }", - "$partiql_bag::[]") + "$partiql_bag::[]" + ) test( "unpivotMissingWithAsAndAt", "SELECT unnestIndex, unnestValue FROM UNPIVOT MISSING AS unnestValue AT unnestIndex", - "$partiql_bag::[]") + "$partiql_bag::[]" + ) test( "unpivotMissingCrossJoinWithAsAndAt", "SELECT unnestIndex, unnestValue FROM MISSING, UNPIVOT MISSING AS unnestValue AT unnestIndex", - "$partiql_bag::[]") + "$partiql_bag::[]" + ) // double unpivots with wildcard paths test( "pathUnpivotEmptyStruct1", "{}.*.*.bar", - "$partiql_bag::[]") + "$partiql_bag::[]" + ) test( "pathUnpivotEmptyStruct2", "{}.*.bar.*", - "$partiql_bag::[]") + "$partiql_bag::[]" + ) test( "pathUnpivotEmptyStruct3", "{}.*.bar.*.baz", - "$partiql_bag::[]") + "$partiql_bag::[]" + ) } group("uncategorized") { @@ -1211,17 +1319,20 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { "variableShadow", // Note that i, f, d, and s are defined in the global environment "SELECT f, d, s FROM i AS f, f AS d, @f AS s WHERE f = 1 AND d = 2e0 and s = 1", - "$partiql_bag::[{f: 1, d: 2e0, s: 1}]") + "$partiql_bag::[{f: 1, d: 2e0, s: 1}]" + ) test( "selectValueStructConstructorWithMissing", """SELECT VALUE {'x': a.x, 'y': a.y} FROM `[{x:5}, {y:6}]` AS a""", - """$partiql_bag::[{x:5}, {y:6}]""") + """$partiql_bag::[{x:5}, {y:6}]""" + ) test( "selectIndexStruct", "SELECT VALUE x[0] FROM (SELECT s.id FROM stores AS s) AS x", - """$partiql_bag::["5", "6", "7"]""") + """$partiql_bag::["5", "6", "7"]""" + ) test( "selectStarSingleSource", @@ -1232,7 +1343,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { {name: "Mochi", type: "dog"}, {name: "Lilikoi", type: "unicorn"}, ] - """) + """ + ) test( "implicitAliasSelectSingleSource", "SELECT id FROM stores", @@ -1250,7 +1362,8 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { test( "explicitAliasSelectSingleSourceWithWhere", """SELECT id AS name FROM stores WHERE id = '5' """, - """$partiql_bag::[{name:"5"}]""") + """$partiql_bag::[{name:"5"}]""" + ) // Demonstrates that UndefinedVariableBehavior.ERROR does not affect qualified field names. test( @@ -1268,20 +1381,23 @@ internal val EVALUATOR_TEST_SUITE: IonResultTestSuite = defineTestSuite { group("regression") { // https://github.com/partiql/partiql-lang-kotlin/issues/314 // Ensures that datetime parts can be used as variable names. - test("dateTimePartsAsVariableNames", + test( + "dateTimePartsAsVariableNames", """ SELECT VALUE [year, month, day, hour, minute, second] FROM 1968 AS year, 4 AS month, 3 as day, 12 as hour, 31 as minute, 59 as second """, - "$partiql_bag::[[1968, 4, 3, 12, 31, 59]]") + "$partiql_bag::[[1968, 4, 3, 12, 31, 59]]" + ) // https://github.com/partiql/partiql-lang-kotlin/issues/314 // Ensures that datetime parts can be used as variable names. - test("dateTimePartsAsStructFieldNames", + test( + "dateTimePartsAsStructFieldNames", """ SELECT VALUE [x.year, x.month, x.day, x.hour, x.minute, x.second] FROM << { 'year': 1968, 'month': 4, 'day': 3, 'hour': 12, 'minute': 31, 'second': 59 }>> AS x """, - "$partiql_bag::[[1968, 4, 3, 12, 31, 59]]") + "$partiql_bag::[[1968, 4, 3, 12, 31, 59]]" + ) } } - diff --git a/lang/test/org/partiql/lang/eval/EvaluatorTests.kt b/lang/test/org/partiql/lang/eval/EvaluatorTests.kt index d0c83bfe3a..3515735945 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatorTests.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatorTests.kt @@ -42,12 +42,14 @@ class EvaluatorTests { return unskippedTests.map { it.copy( note = "legacy typing", - compileOptions = CompileOptions.build(it.compileOptions) { typingMode(TypingMode.LEGACY) }) + compileOptions = CompileOptions.build(it.compileOptions) { typingMode(TypingMode.LEGACY) } + ) } + unskippedTests.map { it.copy( note = "permissive typing", - compileOptions = CompileOptions.build(it.compileOptions) { typingMode(TypingMode.PERMISSIVE) }) + compileOptions = CompileOptions.build(it.compileOptions) { typingMode(TypingMode.PERMISSIVE) } + ) } } } @@ -56,4 +58,3 @@ class EvaluatorTests { @MethodSource("evaluatorTests") fun allTests(tc: IonResultTestCase) = tc.runTestCase(valueFactory, mockDb) } - diff --git a/lang/test/org/partiql/lang/eval/ExceptionWrappingTest.kt b/lang/test/org/partiql/lang/eval/ExceptionWrappingTest.kt index 25160f864e..3b1fef82b6 100644 --- a/lang/test/org/partiql/lang/eval/ExceptionWrappingTest.kt +++ b/lang/test/org/partiql/lang/eval/ExceptionWrappingTest.kt @@ -45,7 +45,7 @@ class ExceptionWrappingTest { thunkOptions( ThunkOptions.build { handleExceptionForLegacyMode { ex, srcLocation -> - if(ex is SemanticException) { + if (ex is SemanticException) { DEFAULT_EXCEPTION_HANDLER_FOR_LEGACY_MODE(ex, srcLocation) } else { throw ex diff --git a/lang/test/org/partiql/lang/eval/ExprValueFactoryTest.kt b/lang/test/org/partiql/lang/eval/ExprValueFactoryTest.kt index 599f5d88fe..0c46fbb06d 100644 --- a/lang/test/org/partiql/lang/eval/ExprValueFactoryTest.kt +++ b/lang/test/org/partiql/lang/eval/ExprValueFactoryTest.kt @@ -25,10 +25,10 @@ import org.junit.Test import org.junit.runner.RunWith import org.partiql.lang.errors.ErrorCode import org.partiql.lang.eval.time.Time -import org.partiql.lang.util.seal -import java.math.BigDecimal import org.partiql.lang.util.isBag import org.partiql.lang.util.isMissing +import org.partiql.lang.util.seal +import java.math.BigDecimal import java.time.LocalDate import java.time.LocalTime import java.time.OffsetTime @@ -57,7 +57,7 @@ class ExprValueFactoryTest { fun parametersForExprValueFactoryTest() = listOf( TestCase(ExprValueType.BOOL, true, ion.newBool(true), factory.newBoolean(true)), TestCase(ExprValueType.BOOL, false, ion.newBool(false), factory.newBoolean(false)), - TestCase(ExprValueType.INT, 100L, ion.newInt(100), factory.newInt(100)), //<--Int converted to Long + TestCase(ExprValueType.INT, 100L, ion.newInt(100), factory.newInt(100)), // <--Int converted to Long TestCase(ExprValueType.INT, 101L, ion.newInt(101), factory.newInt(101L)), TestCase(ExprValueType.FLOAT, 103.0, ion.newFloat(103.0), factory.newFloat(103.0)), TestCase(ExprValueType.DECIMAL, BigDecimal(104), ion.newDecimal(BigDecimal(104)), factory.newDecimal(104)), @@ -98,31 +98,31 @@ class ExprValueFactoryTest { ExprValueType.STRUCT, ExprValueType.SEXP, ExprValueType.LIST, - ExprValueType.BAG -> { + ExprValueType.BAG -> { assertScalarEmpty(tc.value) } - ExprValueType.BOOL -> { + ExprValueType.BOOL -> { assertEquals(expectedValue, tc.value.scalar.booleanValue()) assertNull(tc.value.scalar.numberValue()) assertNull(tc.value.scalar.stringValue()) assertNull(tc.value.scalar.bytesValue()) assertNull(tc.value.scalar.timestampValue()) } - ExprValueType.INT -> { + ExprValueType.INT -> { assertNull(tc.value.scalar.booleanValue()) assertEquals(expectedValue, tc.value.scalar.numberValue()) assertNull(tc.value.scalar.stringValue()) assertNull(tc.value.scalar.bytesValue()) assertNull(tc.value.scalar.timestampValue()) } - ExprValueType.FLOAT -> { + ExprValueType.FLOAT -> { assertNull(tc.value.scalar.booleanValue()) assertEquals(expectedValue, tc.value.scalar.numberValue()) assertNull(tc.value.scalar.stringValue()) assertNull(tc.value.scalar.bytesValue()) assertNull(tc.value.scalar.timestampValue()) } - ExprValueType.DECIMAL -> { + ExprValueType.DECIMAL -> { assertNull(tc.value.scalar.booleanValue()) assertEquals(expectedValue, tc.value.scalar.numberValue()) assertNull(tc.value.scalar.stringValue()) @@ -136,28 +136,28 @@ class ExprValueFactoryTest { assertNull(tc.value.scalar.bytesValue()) assertEquals(expectedValue, tc.value.scalar.timestampValue()) } - ExprValueType.SYMBOL -> { + ExprValueType.SYMBOL -> { assertNull(tc.value.scalar.booleanValue()) assertNull(tc.value.scalar.numberValue()) assertEquals(expectedValue, tc.value.scalar.stringValue()) assertNull(tc.value.scalar.bytesValue()) assertNull(tc.value.scalar.timestampValue()) } - ExprValueType.STRING -> { + ExprValueType.STRING -> { assertNull(tc.value.scalar.booleanValue()) assertNull(tc.value.scalar.numberValue()) assertEquals(expectedValue, tc.value.scalar.stringValue()) assertNull(tc.value.scalar.bytesValue()) assertNull(tc.value.scalar.timestampValue()) } - ExprValueType.CLOB -> { + ExprValueType.CLOB -> { assertNull(tc.value.scalar.booleanValue()) assertNull(tc.value.scalar.numberValue()) assertNull(tc.value.scalar.stringValue()) assertEquals(expectedValue, tc.value.scalar.bytesValue()) assertNull(tc.value.scalar.timestampValue()) } - ExprValueType.BLOB -> { + ExprValueType.BLOB -> { assertNull(tc.value.scalar.booleanValue()) assertNull(tc.value.scalar.numberValue()) assertNull(tc.value.scalar.stringValue()) @@ -243,7 +243,7 @@ class ExprValueFactoryTest { assertEquals(tc.expectedIonValue, tc.value.ionValue) val fromIonValue = factory.newFromIonValue(tc.value.ionValue) - assertEquals(ExprValueType.BAG, fromIonValue.type) //Ion has no bag type--[bag.ionVaule] converts to a list with annotation $partiql_bag + assertEquals(ExprValueType.BAG, fromIonValue.type) // Ion has no bag type--[bag.ionVaule] converts to a list with annotation $partiql_bag assertBagValues(fromIonValue) assertEquals(fromIonValue.ionValue, tc.value.ionValue) @@ -289,13 +289,15 @@ class ExprValueFactoryTest { fun nonEmptyUnorderedStructs(): Array { val list = listOf( - factory.newInt(1).namedValue(factory.newSymbol("foo")), - factory.newInt(2).namedValue(factory.newSymbol("bar")), - factory.newInt(3).namedValue(factory.newSymbol("bat"))) + factory.newInt(1).namedValue(factory.newSymbol("foo")), + factory.newInt(2).namedValue(factory.newSymbol("bar")), + factory.newInt(3).namedValue(factory.newSymbol("bat")) + ) return arrayOf( - factory.newStruct(list.asSequence(), StructOrdering.UNORDERED), - factory.newStruct(list, StructOrdering.UNORDERED)) + factory.newStruct(list.asSequence(), StructOrdering.UNORDERED), + factory.newStruct(list, StructOrdering.UNORDERED) + ) } @Test @@ -314,18 +316,19 @@ class ExprValueFactoryTest { assertEquals(1L, contents.single { it.name!!.stringValue() == "foo" }.numberValue()) assertEquals(2L, contents.single { it.name!!.stringValue() == "bar" }.numberValue()) assertEquals(3L, contents.single { it.name!!.stringValue() == "bat" }.numberValue()) - } fun nonEmptyOrderedStructs(): Array { val list = listOf( - factory.newInt(1).namedValue(factory.newSymbol("foo")), - factory.newInt(2).namedValue(factory.newSymbol("bar")), - factory.newInt(3).namedValue(factory.newSymbol("bat"))) + factory.newInt(1).namedValue(factory.newSymbol("foo")), + factory.newInt(2).namedValue(factory.newSymbol("bar")), + factory.newInt(3).namedValue(factory.newSymbol("bat")) + ) return arrayOf( - factory.newStruct(list.asSequence(), StructOrdering.ORDERED), - factory.newStruct(list, StructOrdering.ORDERED)) + factory.newStruct(list.asSequence(), StructOrdering.ORDERED), + factory.newStruct(list, StructOrdering.ORDERED) + ) } @Test @@ -355,20 +358,20 @@ class ExprValueFactoryTest { try { factory.newFromIonValue(otherIonSystem.newInt(1)) fail("no exception thrown") - } catch(e: IllegalArgumentException) { + } catch (e: IllegalArgumentException) { /* intentionally left blank */ } } @Test fun serializeDeserializeMissing() { - //Deserialize - IonValue to ExprValue using newFromIonValue + // Deserialize - IonValue to ExprValue using newFromIonValue val ionValue = ion.newNull().also { it.addTypeAnnotation(MISSING_ANNOTATION) } val exprValue = factory.newFromIonValue(ionValue) assertEquals(ExprValueType.MISSING, exprValue.type) assertEquals(exprValue.ionValue, ionValue) - //Deserialize - IonValue to ExprValue using factory's missing value + // Deserialize - IonValue to ExprValue using factory's missing value val exprValueFromFactory = factory.missingValue assertEquals(ExprValueType.MISSING, exprValueFromFactory.type) @@ -385,17 +388,17 @@ class ExprValueFactoryTest { @Test fun serializeDeserializeBag() { - //Deserialize - IonValue to ExprValue using newFromIonValue + // Deserialize - IonValue to ExprValue using newFromIonValue val ionValue = ion.newList(ion.newInt(1), ion.newInt(2), ion.newInt(3)).also { it.addTypeAnnotation(BAG_ANNOTATION) } val exprValue = factory.newFromIonValue(ionValue) assertEquals(ExprValueType.BAG, exprValue.type) assertEquals(exprValue.ionValue, ionValue) - //Deserialize - IonValue to ExprValue using newBag, newBag adds $partiql_bag annotation to the list + // Deserialize - IonValue to ExprValue using newBag, newBag adds $partiql_bag annotation to the list val exprValueFromFactory = factory.newBag(listOf(factory.newInt(1), factory.newInt(2), factory.newInt(3)).asSequence()) assertEquals(ExprValueType.BAG, exprValueFromFactory.type) - //Serialize - ExprValue to IonValue using ionValue by lazy + // Serialize - ExprValue to IonValue using ionValue by lazy assertTrue(exprValueFromFactory.ionValue.isBag) // Ensure round trip doesn't add the annotation if it already has $partiql_bag annotation @@ -438,7 +441,7 @@ class ExprValueFactoryTest { fun genericTimeExprValueTest2() { val timeExprValue = factory.newTime(Time.of(23, 2, 29, 23, 2, -720)) assertEquals( - expected = OffsetTime.of(23, 2, 29, 0, ZoneOffset.ofTotalSeconds(-720*60)), + expected = OffsetTime.of(23, 2, 29, 0, ZoneOffset.ofTotalSeconds(-720 * 60)), actual = timeExprValue.scalar.timeValue()!!.offsetTime, message = "Expected values to be equal." ) diff --git a/lang/test/org/partiql/lang/eval/JoinWithOnConditionTest.kt b/lang/test/org/partiql/lang/eval/JoinWithOnConditionTest.kt index 8f58be13f2..394a4eef07 100644 --- a/lang/test/org/partiql/lang/eval/JoinWithOnConditionTest.kt +++ b/lang/test/org/partiql/lang/eval/JoinWithOnConditionTest.kt @@ -3,18 +3,17 @@ package org.partiql.lang.eval import junitparams.Parameters import org.junit.Test - class JoinWithOnConditionTest : EvaluatorTestBase() { val sessionNoNulls = mapOf( - "t1" to """ + "t1" to """ [ {id: 1, val:"a"}, {id: 2, val:"b"}, {id: 3, val:"c"}, ] """, - "t2" to """ + "t2" to """ [ {id: 1, val: 10}, {id: 2, val: 20}, @@ -24,14 +23,14 @@ class JoinWithOnConditionTest : EvaluatorTestBase() { ).toSession() val sessionNullIdRow = mapOf( - "t1" to """ + "t1" to """ [ {id: 1, val:"a"}, {id: 2, val:"b"}, {id: 3, val:"c"}, ] """, - "t2" to """ + "t2" to """ [ {id: 1, val: 10}, {id: null, val: 20}, @@ -41,28 +40,27 @@ class JoinWithOnConditionTest : EvaluatorTestBase() { ).toSession() val sessionNullTable: EvaluationSession = mapOf( - "t1" to """ + "t1" to """ [ {id: 1, val:"a"}, {id: 2, val:"b"}, {id: 3, val:"c"}, ] """, - "t2" to """ null """ + "t2" to """ null """ ).toSession() val sessionNullTableRow: EvaluationSession = mapOf( - "t1" to """ + "t1" to """ [ {id: 1, val:"a"}, {id: 2, val:"b"}, {id: 3, val:"c"}, ] """, - "t2" to """[ null ]""" + "t2" to """[ null ]""" ).toSession() - val sqlUnderTest = """ SELECT t1.id AS id, t1.val AS val1, @@ -71,49 +69,66 @@ class JoinWithOnConditionTest : EvaluatorTestBase() { """ private val session = mapOf( - "A" to "[ { 'n': 1 }, { 'n': 3 } ]", - "B" to "[ { 'n': 1 }, { 'n': 2 }, { 'n': 3 } ]", - "C" to "[ { 'n': 2 }, { 'n': 3 } ]").toSession() + "A" to "[ { 'n': 1 }, { 'n': 3 } ]", + "B" to "[ { 'n': 1 }, { 'n': 2 }, { 'n': 3 } ]", + "C" to "[ { 'n': 2 }, { 'n': 3 } ]" + ).toSession() @Test @Parameters fun joinWithOnConditionTest(pair: Pair): Unit = - runTestCaseInLegacyAndPermissiveModes(pair.first, pair.second) + runTestCaseInLegacyAndPermissiveModes(pair.first, pair.second) fun parametersForJoinWithOnConditionTest(): List> { return listOf( - Pair(EvaluatorTestCase( - "JOIN ON with no nulls", - sqlUnderTest, - """ + Pair( + EvaluatorTestCase( + "JOIN ON with no nulls", + sqlUnderTest, + """ << {'id':1, 'val1':'a', 'val2':10}, {'id':2, 'val1':'b', 'val2':20}, {'id':3, 'val1':'c', 'val2':30} >> - """), sessionNoNulls), - Pair(EvaluatorTestCase( - "JOIN ON with no nulls", - sqlUnderTest, """ + ), + sessionNoNulls + ), + Pair( + EvaluatorTestCase( + "JOIN ON with no nulls", + sqlUnderTest, + """ << {'id':1, 'val1':'a', 'val2':10}, {'id':3, 'val1':'c', 'val2':30} >> - """), sessionNullIdRow), - Pair(EvaluatorTestCase( - "JOIN ON with no nulls", - sqlUnderTest, """ + ), + sessionNullIdRow + ), + Pair( + EvaluatorTestCase( + "JOIN ON with no nulls", + sqlUnderTest, + """ <<>> - """), sessionNullTable), - Pair(EvaluatorTestCase( - "JOIN ON with no nulls", - sqlUnderTest, """ + ), + sessionNullTable + ), + Pair( + EvaluatorTestCase( + "JOIN ON with no nulls", + sqlUnderTest, + """ <<>> - """), sessionNullTableRow) + """ + ), + sessionNullTableRow + ) ) } @@ -136,4 +151,4 @@ class JoinWithOnConditionTest : EvaluatorTestBase() { ) runTestCase(testCase, session) } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/LikePredicateTest.kt b/lang/test/org/partiql/lang/eval/LikePredicateTest.kt index 0161aaa159..8ce8244020 100644 --- a/lang/test/org/partiql/lang/eval/LikePredicateTest.kt +++ b/lang/test/org/partiql/lang/eval/LikePredicateTest.kt @@ -23,35 +23,41 @@ import kotlin.test.assertFailsWith class LikePredicateTest : EvaluatorTestBase() { - private val animals = mapOf("animals" to """ + private val animals = mapOf( + "animals" to """ [ {name: "Kumo", type: "dog"}, {name: "Mochi", type: "dog"}, {name: "Lilikoi", type: "unicorn"}, ] - """).toSession() + """ + ).toSession() - private val animalsWithNulls = mapOf("animalsWithNulls" to """ + private val animalsWithNulls = mapOf( + "animalsWithNulls" to """ [ {name: null, type: "dog"}, {name: null, type: "dog"}, {name: null, type: "unicorn"}, ] - """).toSession() - + """ + ).toSession() @Test fun emptyTextUnderscorePattern() = assertEval("""SELECT * FROM `[true]` as a WHERE '' LIKE '_' """, "[]", animals) @Test - fun emptyTextPercentPattern() = assertEval("""SELECT * FROM `[true]` as a WHERE '' LIKE '%' """, "[{_1: true}]", - animals) - + fun emptyTextPercentPattern() = assertEval( + """SELECT * FROM `[true]` as a WHERE '' LIKE '%' """, "[{_1: true}]", + animals + ) @Test - fun allLiteralsAndEscapeIsNull() = assertEval("""SELECT * FROM animals as a WHERE 'A' LIKE 'B' ESCAPE null """, - "[]", - animals) + fun allLiteralsAndEscapeIsNull() = assertEval( + """SELECT * FROM animals as a WHERE 'A' LIKE 'B' ESCAPE null """, + "[]", + animals + ) @Test fun valueLiteralPatternNull() = assertEval("""SELECT * FROM animals as a WHERE 'A' LIKE null """, "[]", animals) @@ -60,35 +66,43 @@ class LikePredicateTest : EvaluatorTestBase() { fun valueNullPatternLiteral() = assertEval("""SELECT * FROM animals as a WHERE null LIKE 'A' """, "[]", animals) @Test - fun valueNullPatternLiteralEscapeNull() = assertEval("""SELECT * FROM animals as a WHERE null LIKE 'A' ESCAPE null""", - "[]", - animals) + fun valueNullPatternLiteralEscapeNull() = assertEval( + """SELECT * FROM animals as a WHERE null LIKE 'A' ESCAPE null""", + "[]", + animals + ) @Test - fun valueNullPatternNullEscapeLiteral() = assertEval("""SELECT * FROM animals as a WHERE null LIKE null ESCAPE '['""", - "[]", - animals) + fun valueNullPatternNullEscapeLiteral() = assertEval( + """SELECT * FROM animals as a WHERE null LIKE null ESCAPE '['""", + "[]", + animals + ) @Test - fun valueLiteralPatternNullEscapeNull() = assertEval("""SELECT * FROM animals as a WHERE 'A' LIKE null ESCAPE null""", - "[]", - animals) + fun valueLiteralPatternNullEscapeNull() = assertEval( + """SELECT * FROM animals as a WHERE 'A' LIKE null ESCAPE null""", + "[]", + animals + ) @Test - fun valueNullPatternNullEscapeNull() = assertEval("""SELECT * FROM animals as a WHERE null LIKE null ESCAPE null""", - "[]", - animals) + fun valueNullPatternNullEscapeNull() = assertEval( + """SELECT * FROM animals as a WHERE null LIKE null ESCAPE null""", + "[]", + animals + ) @Test fun typeIsChecked() { // Specify the types we'll test - data class ParamType(val precedence : Int) + data class ParamType(val precedence: Int) val NULL = ParamType(1) val INT = ParamType(2) // will throw error val STR = ParamType(3) // references are deferred to runtime and take a separate compile path than literals - data class Param(val param : String, val type : ParamType, val escParam : String = param) + data class Param(val param: String, val type: ParamType, val escParam: String = param) val types = listOf( Param("null", NULL), Param("a._null_", NULL), @@ -99,13 +113,13 @@ class LikePredicateTest : EvaluatorTestBase() { ) // Run the test with the given parameters - fun runTest(whereClause : String, softly : SoftAssertions, vararg types : Param) { + fun runTest(whereClause: String, softly: SoftAssertions, vararg types: Param) { val input = """[{num: 1, str: "string", esc: "\\"}]""" val session = mapOf("Object" to input).toSession() val query = "Select * From Object a Where " + whereClause softly.assertThatCode { - when (types.map{ it.type }.minBy{ it.precedence }) { + when (types.map { it.type }.minBy { it.precedence }) { NULL -> assertEval(query, "[]", session) INT -> { val ex = assertFailsWith(message = query) { @@ -130,447 +144,539 @@ class LikePredicateTest : EvaluatorTestBase() { } @Test - fun textAndPatternEmpty() = assertEval(""" SELECT * FROM animals WHERE '' LIKE '' """, """ + fun textAndPatternEmpty() = assertEval( + """ SELECT * FROM animals WHERE '' LIKE '' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] - """, animals) + """, + animals + ) @Test - fun textNonEmptyPatternEmpty() = assertEval(""" SELECT * FROM animals WHERE 'Kumo' LIKE '' """, """ + fun textNonEmptyPatternEmpty() = assertEval( + """ SELECT * FROM animals WHERE 'Kumo' LIKE '' """, + """ [] - """, animals) - + """, + animals + ) @Test - fun noEscapeAllArgsLiteralsMatches() = assertEval("""SELECT * FROM animals as a WHERE 'Kumo' LIKE 'Kumo' """, """ + fun noEscapeAllArgsLiteralsMatches() = assertEval( + """SELECT * FROM animals as a WHERE 'Kumo' LIKE 'Kumo' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] - """, animals) + """, + animals + ) @Test - fun noEscapeAllArgsLiteralsMismatchCase() = assertEval("""SELECT * FROM animals as a WHERE 'Kumo' LIKE 'KuMo' """, - """ + fun noEscapeAllArgsLiteralsMismatchCase() = assertEval( + """SELECT * FROM animals as a WHERE 'Kumo' LIKE 'KuMo' """, + """ [] """, - animals) - + animals + ) @Test - fun noEscapeAllArgsLiteralsMismatchPattern() = assertEval("""SELECT * FROM animals as a WHERE 'xxx' LIKE 'Kumo' """, - """ + fun noEscapeAllArgsLiteralsMismatchPattern() = assertEval( + """SELECT * FROM animals as a WHERE 'xxx' LIKE 'Kumo' """, + """ [] """, - animals) + animals + ) @Test - fun noEscapeAllArgsLiteralsMatchUnderscore() = assertEval("""SELECT * FROM animals as a WHERE 'Kumo' LIKE 'K_mo' """, - """ + fun noEscapeAllArgsLiteralsMatchUnderscore() = assertEval( + """SELECT * FROM animals as a WHERE 'Kumo' LIKE 'K_mo' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun noEscapeAllArgsLiteralsNoMatchUnderscore() = assertEval("""SELECT * FROM animals as a WHERE 'Kuumo' LIKE 'K_mo' """, - """ + fun noEscapeAllArgsLiteralsNoMatchUnderscore() = assertEval( + """SELECT * FROM animals as a WHERE 'Kuumo' LIKE 'K_mo' """, + """ [] """, - animals) + animals + ) @Test - fun noEscapeAllArgsLiteralsNoMatchUnderscoreExtraChar() = assertEval("""SELECT * FROM animals as a WHERE 'KKumo' LIKE 'K_mo' """, - """ + fun noEscapeAllArgsLiteralsNoMatchUnderscoreExtraChar() = assertEval( + """SELECT * FROM animals as a WHERE 'KKumo' LIKE 'K_mo' """, + """ [] """, - animals) + animals + ) @Test - fun noEscapeAllArgsLiteralsMatchConsecutiveUnderscores() = assertEval("""SELECT * FROM animals as a WHERE 'Kumo' LIKE 'K__o' """, - """ + fun noEscapeAllArgsLiteralsMatchConsecutiveUnderscores() = assertEval( + """SELECT * FROM animals as a WHERE 'Kumo' LIKE 'K__o' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun noEscapeAllArgsLiteralsMatch2UnderscoresNonConsecutive() = assertEval("""SELECT * FROM animals as a WHERE 'Kumo' LIKE '_u_o' """, - """ + fun noEscapeAllArgsLiteralsMatch2UnderscoresNonConsecutive() = assertEval( + """SELECT * FROM animals as a WHERE 'Kumo' LIKE '_u_o' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun noEscapeAllArgsLiteralsMatchUnderscoresAtEnd() = assertEval("""SELECT * FROM animals as a WHERE 'Kumo' LIKE 'Kum_' """, - """ + fun noEscapeAllArgsLiteralsMatchUnderscoresAtEnd() = assertEval( + """SELECT * FROM animals as a WHERE 'Kumo' LIKE 'Kum_' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun noEscapeAllArgsLiteralsMatchPercentage() = assertEval("""SELECT * FROM animals as a WHERE 'Kumo' LIKE 'Ku%o' """, - """ + fun noEscapeAllArgsLiteralsMatchPercentage() = assertEval( + """SELECT * FROM animals as a WHERE 'Kumo' LIKE 'Ku%o' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun noEscapeAllArgsLiteralsNoMatchPercentageExtraCharBefore() = assertEval("""SELECT * FROM animals as a WHERE 'KKumo' LIKE 'Ku%o' """, - """ + fun noEscapeAllArgsLiteralsNoMatchPercentageExtraCharBefore() = assertEval( + """SELECT * FROM animals as a WHERE 'KKumo' LIKE 'Ku%o' """, + """ [] """, - animals) + animals + ) @Test - fun noEscapeAllArgsLiteralsNoMatchPercentageExtraCharAfter() = assertEval("""SELECT * FROM animals as a WHERE 'Kumol' LIKE 'Ku%o' """, - """ + fun noEscapeAllArgsLiteralsNoMatchPercentageExtraCharAfter() = assertEval( + """SELECT * FROM animals as a WHERE 'Kumol' LIKE 'Ku%o' """, + """ [] """, - animals) - + animals + ) @Test - fun noEscapeAllArgsLiteralsMatch2PercentagesConsecutive() = assertEval("""SELECT * FROM animals as a WHERE 'Kumo' LIKE 'K%%o' """, - """ + fun noEscapeAllArgsLiteralsMatch2PercentagesConsecutive() = assertEval( + """SELECT * FROM animals as a WHERE 'Kumo' LIKE 'K%%o' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun noEscapeAllArgsLiteralsMatch2PercentagesNonConsecutive() = assertEval("""SELECT * FROM animals as a WHERE 'Kumo' LIKE 'K%m%' """, - """ + fun noEscapeAllArgsLiteralsMatch2PercentagesNonConsecutive() = assertEval( + """SELECT * FROM animals as a WHERE 'Kumo' LIKE 'K%m%' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun noEscapeAllArgsLiteralsMatchPercentageAsFirst() = assertEval("""SELECT * FROM animals as a WHERE 'Kumo' LIKE '%umo' """, - """ + fun noEscapeAllArgsLiteralsMatchPercentageAsFirst() = assertEval( + """SELECT * FROM animals as a WHERE 'Kumo' LIKE '%umo' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun noEscapeAllArgsLiteralsMatchPercentageAsLast() = assertEval("""SELECT * FROM animals as a WHERE 'Kumo' LIKE 'Kum%' """, - """ + fun noEscapeAllArgsLiteralsMatchPercentageAsLast() = assertEval( + """SELECT * FROM animals as a WHERE 'Kumo' LIKE 'Kum%' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) - + animals + ) @Test - fun noEscapeAllArgsLiteralsPercentageAndUnderscore() = assertEval("""SELECT * FROM animals as a WHERE 'Kumo' LIKE 'K_%mo' """, - """ + fun noEscapeAllArgsLiteralsPercentageAndUnderscore() = assertEval( + """SELECT * FROM animals as a WHERE 'Kumo' LIKE 'K_%mo' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun noEscapeAllArgsLiteralsPercentageAndUnderscoreNonConsecutive() = assertEval("""SELECT * FROM animals as a WHERE 'Kumo' LIKE 'K_m%' """, - """ + fun noEscapeAllArgsLiteralsPercentageAndUnderscoreNonConsecutive() = assertEval( + """SELECT * FROM animals as a WHERE 'Kumo' LIKE 'K_m%' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun noEscapeAllArgsLiteralsAllUnderscores() = assertEval("""SELECT * FROM animals as a WHERE 'Kumo' LIKE '____' """, - """ + fun noEscapeAllArgsLiteralsAllUnderscores() = assertEval( + """SELECT * FROM animals as a WHERE 'Kumo' LIKE '____' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun noEscapeAllArgsLiteralsJustPercentage() = assertEval("""SELECT * FROM animals as a WHERE 'Kumo' LIKE '%' """, - """ + fun noEscapeAllArgsLiteralsJustPercentage() = assertEval( + """SELECT * FROM animals as a WHERE 'Kumo' LIKE '%' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun noEscapeAllArgsLiteralsEmptyStringAndJustPercentage() = assertEval("""SELECT * FROM animals as a WHERE '' LIKE '%' """, - """ + fun noEscapeAllArgsLiteralsEmptyStringAndJustPercentage() = assertEval( + """SELECT * FROM animals as a WHERE '' LIKE '%' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun EscapePercentageAllArgsLiterals() = assertEval("""SELECT * FROM animals as a WHERE '%' LIKE '[%' ESCAPE '[' """, - """ + fun EscapePercentageAllArgsLiterals() = assertEval( + """SELECT * FROM animals as a WHERE '%' LIKE '[%' ESCAPE '[' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) - + animals + ) @Test - fun EscapePercentageAllArgsLiteralsPatternWithMetaPercentage() = assertEval("""SELECT * FROM animals as a WHERE '100%' LIKE '1%[%' ESCAPE '[' """, - """ + fun EscapePercentageAllArgsLiteralsPatternWithMetaPercentage() = assertEval( + """SELECT * FROM animals as a WHERE '100%' LIKE '1%[%' ESCAPE '[' """, + """ [ {name:"Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun EscapePercentageWithBackSlashAllArgsLiteralsPatternWithMetaPercentage() = assertEval("""SELECT * FROM animals as a WHERE '100%' LIKE '1%\%' ESCAPE '\' """, - """ + fun EscapePercentageWithBackSlashAllArgsLiteralsPatternWithMetaPercentage() = assertEval( + """SELECT * FROM animals as a WHERE '100%' LIKE '1%\%' ESCAPE '\' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun EscapePercentageAllArgsLiteralsPatternWithMetaUnderscore() = assertEval("""SELECT * FROM animals as a WHERE '100%' LIKE '1__[%' ESCAPE '[' """, - """ + fun EscapePercentageAllArgsLiteralsPatternWithMetaUnderscore() = assertEval( + """SELECT * FROM animals as a WHERE '100%' LIKE '1__[%' ESCAPE '[' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun EscapePercentageAllArgsLiteralsPatternWithMetaPercentAtStart() = assertEval("""SELECT * FROM animals as a WHERE '%100' LIKE '[%%' ESCAPE '[' """, - """ + fun EscapePercentageAllArgsLiteralsPatternWithMetaPercentAtStart() = assertEval( + """SELECT * FROM animals as a WHERE '%100' LIKE '[%%' ESCAPE '[' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun EscapePercentageAllArgsLiteralsPatternWithMetaPercentAtStartFollowedByUnderscore() = assertEval("""SELECT * FROM animals as a WHERE '%100' LIKE '[%_00' ESCAPE '[' """, - """ + fun EscapePercentageAllArgsLiteralsPatternWithMetaPercentAtStartFollowedByUnderscore() = assertEval( + """SELECT * FROM animals as a WHERE '%100' LIKE '[%_00' ESCAPE '[' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun EscapePercentageAllArgsLiteralsPatternWithMetaPercentAtStartFollowedByUnderscoreNoMatch() = assertEval("""SELECT * FROM animals as a WHERE '%1XX' LIKE '[%_00' ESCAPE '[' """, - """ + fun EscapePercentageAllArgsLiteralsPatternWithMetaPercentAtStartFollowedByUnderscoreNoMatch() = assertEval( + """SELECT * FROM animals as a WHERE '%1XX' LIKE '[%_00' ESCAPE '[' """, + """ [] """, - animals) + animals + ) @Test - fun MultipleEscapesNoMeta() = assertEval("""SELECT * FROM animals as a WHERE '1_000_000%' LIKE '1[_000[_000[%' ESCAPE '[' """, - """ + fun MultipleEscapesNoMeta() = assertEval( + """SELECT * FROM animals as a WHERE '1_000_000%' LIKE '1[_000[_000[%' ESCAPE '[' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun MultipleEscapesWithMeta() = assertEval("""SELECT * FROM animals as a WHERE '1_000_000%' LIKE '1[____[_%[%' ESCAPE '[' """, - """ + fun MultipleEscapesWithMeta() = assertEval( + """SELECT * FROM animals as a WHERE '1_000_000%' LIKE '1[____[_%[%' ESCAPE '[' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun MultipleEscapesWithMetaAtStart() = assertEval("""SELECT * FROM animals as a WHERE '1_000_000%' LIKE '_[_%[_%[%' ESCAPE '[' """, - """ + fun MultipleEscapesWithMetaAtStart() = assertEval( + """SELECT * FROM animals as a WHERE '1_000_000%' LIKE '_[_%[_%[%' ESCAPE '[' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun noEscapeValueIsBinding() = assertEval("""SELECT * FROM animals as a WHERE a.name LIKE 'Kumo' """, """ + fun noEscapeValueIsBinding() = assertEval( + """SELECT * FROM animals as a WHERE a.name LIKE 'Kumo' """, + """ [ {name: "Kumo", type: "dog"} ] - """, animals) + """, + animals + ) @Test - fun noEscapeValueIsStringAppendExpression() = assertEval("""SELECT * FROM animals as a WHERE a.name || 'xx' LIKE '%xx' """, - """ + fun noEscapeValueIsStringAppendExpression() = assertEval( + """SELECT * FROM animals as a WHERE a.name || 'xx' LIKE '%xx' """, + """ [ {name: "Kumo", type: "dog"}, {name:"Mochi",type:"dog"}, {name:"Lilikoi",type:"unicorn"} ] """, - animals) + animals + ) @Test - fun noEscapeValueAndPatternAreBindings() = assertEval("""SELECT a.name FROM + fun noEscapeValueAndPatternAreBindings() = assertEval( + """SELECT a.name FROM `[ { name:"Abcd", pattern:"A___" }, { name:"100", pattern:"1%0" } ]` as a - WHERE a.name LIKE a.pattern """, """ + WHERE a.name LIKE a.pattern """, + """ [ { name:"Abcd" }, { name:"100"} ] - """) + """ + ) @Test - fun EscapeLiteralValueAndPatternAreBindings() = assertEval("""SELECT a.name FROM + fun EscapeLiteralValueAndPatternAreBindings() = assertEval( + """SELECT a.name FROM `[ { name:"Abcd", pattern:"A___" }, { name:"100%", pattern:"1%0\\%" } ]` as a - WHERE a.name LIKE a.pattern ESCAPE '\' """, """ + WHERE a.name LIKE a.pattern ESCAPE '\' """, + """ [ { name:"Abcd" }, { name:"100%"} ] - """) + """ + ) @Test - fun EscapeValueAndPatternAreBindings() = assertEval("""SELECT a.name FROM + fun EscapeValueAndPatternAreBindings() = assertEval( + """SELECT a.name FROM `[ { name:"Abcd", pattern:"A___" , escapeChar:'['}, { name:"100%", pattern:"1%0[%", escapeChar: '['} ]` as a - WHERE a.name LIKE a.pattern ESCAPE a.escapeChar """, """ + WHERE a.name LIKE a.pattern ESCAPE a.escapeChar """, + """ [ { name:"Abcd" }, { name:"100%"} ] - """) + """ + ) @Test - fun NotLikeEscapeValueAndPatternAreBindings() = assertEval("""SELECT a.name FROM + fun NotLikeEscapeValueAndPatternAreBindings() = assertEval( + """SELECT a.name FROM `[ { name:"Abcd", pattern:"A__" , escapeChar:'['}, { name:"1000%", pattern:"1_0[%", escapeChar: '['} ]` as a - WHERE a.name NOT LIKE a.pattern ESCAPE a.escapeChar """, """ + WHERE a.name NOT LIKE a.pattern ESCAPE a.escapeChar """, + """ [ { name:"Abcd" }, { name:"1000%"} ] - """) + """ + ) @Test fun emptyStringAsEscape() = assertThrows( "SELECT * FROM <<>> AS a WHERE '%' LIKE '%' ESCAPE ''", "Cannot use empty character as ESCAPE character in a LIKE predicate: \"\"", - NodeMetadata(1, 51)) + NodeMetadata(1, 51) + ) @Test fun moreThanOneCharacterEscape() = assertThrows( "SELECT * FROM <<>> AS a WHERE '%' LIKE '%' ESCAPE '[]'", "Escape character must have size 1 : []", - NodeMetadata(1, 51)) + NodeMetadata(1, 51) + ) @Test fun escapeByItself() = assertThrows( "SELECT * FROM <<>> AS a WHERE 'aaaaa' LIKE '[' ESCAPE '['", "Invalid escape sequence : [", - NodeMetadata(1, 44)) + NodeMetadata(1, 44) + ) @Test fun escapeWithoutWildcard() = assertThrows( "SELECT * FROM <<>> AS a WHERE 'aaaaa' LIKE '[a' ESCAPE '['", "Invalid escape sequence : [a", - NodeMetadata(1, 44)) + NodeMetadata(1, 44) + ) @Test fun valueNotAString() = assertThrows( "SELECT * FROM <<>> AS a WHERE 1 LIKE 'a' ESCAPE '['", "LIKE expression must be given non-null strings as input", - NodeMetadata(1, 33)) + NodeMetadata(1, 33) + ) @Test fun patternNotAString() = assertThrows( "SELECT * FROM <<>> AS a WHERE 'a' LIKE 1 ESCAPE '['", "LIKE expression must be given non-null strings as input", - NodeMetadata(1, 35)) + NodeMetadata(1, 35) + ) @Test fun escapeNotAString() = assertThrows( // column is marked at the position of LIKE "SELECT * FROM <<>> AS a WHERE 'a' LIKE 'a' ESCAPE 1", "LIKE expression must be given non-null strings as input", - NodeMetadata(1, 35)) + NodeMetadata(1, 35) + ) @Test fun valueIsNull() = assertEval("SELECT * FROM <<>> AS a WHERE null LIKE 'a' ESCAPE '['", "[]") @@ -582,55 +688,79 @@ class LikePredicateTest : EvaluatorTestBase() { fun escapeIsNull() = assertEval("SELECT * FROM <<>> AS a WHERE 'a' LIKE 'a' ESCAPE null", "[]") @Test - fun nonLiteralsMissingValue() = assertEval("""SELECT * FROM animals as a WHERE a.xxx LIKE '%' """, """ + fun nonLiteralsMissingValue() = assertEval( + """SELECT * FROM animals as a WHERE a.xxx LIKE '%' """, + """ [] - """, animals) + """, + animals + ) @Test - fun nonLiteralsMissingPattern() = assertEval("""SELECT * FROM animals as a WHERE a.name LIKE a.xxx """, """ + fun nonLiteralsMissingPattern() = assertEval( + """SELECT * FROM animals as a WHERE a.name LIKE a.xxx """, + """ [] - """, animals) + """, + animals + ) @Test - fun nonLiteralsMissingEscape() = assertEval("""SELECT * FROM animals as a WHERE a.name LIKE '%' ESCAPE a.xxx""", """ + fun nonLiteralsMissingEscape() = assertEval( + """SELECT * FROM animals as a WHERE a.name LIKE '%' ESCAPE a.xxx""", + """ [] - """, animals) + """, + animals + ) @Test - fun nonLiteralsNullValue() = assertEval("""SELECT * FROM animalsWithNulls as a WHERE a.name LIKE '%' """, """ + fun nonLiteralsNullValue() = assertEval( + """SELECT * FROM animalsWithNulls as a WHERE a.name LIKE '%' """, + """ [] - """, animalsWithNulls) + """, + animalsWithNulls + ) @Test - fun nonLiteralsNullPattern() = assertEval("""SELECT * FROM animalsWithNulls as a WHERE a.type LIKE a.name """, """ + fun nonLiteralsNullPattern() = assertEval( + """SELECT * FROM animalsWithNulls as a WHERE a.type LIKE a.name """, + """ [] - """, animalsWithNulls) + """, + animalsWithNulls + ) @Test - fun nonLiteralsNullEscape() = assertEval("""SELECT * FROM animalsWithNulls as a WHERE a.type LIKE '%' ESCAPE a.name""", - """ + fun nonLiteralsNullEscape() = assertEval( + """SELECT * FROM animalsWithNulls as a WHERE a.type LIKE '%' ESCAPE a.name""", + """ [] """, - animalsWithNulls) - + animalsWithNulls + ) @Test fun nonLiteralsNonStringEscape() = assertThrows( "SELECT * FROM `[{name:1, type:\"a\"}]` as a WHERE a.type LIKE '%' ESCAPE a.name", "LIKE expression must be given non-null strings as input", - NodeMetadata(1, 56)) + NodeMetadata(1, 56) + ) @Test fun nonLiteralsNonStringPattern() = assertThrows( "SELECT * FROM `[{name:1, type:\"a\"}]` as a WHERE a.type LIKE a.name", "LIKE expression must be given non-null strings as input", - NodeMetadata(1, 56)) + NodeMetadata(1, 56) + ) @Test fun nonLiteralsNonStringValue() = assertThrows( "SELECT * FROM `[{name:1, type:\"a\"}]` as a WHERE a.name LIKE a.type ", "LIKE expression must be given non-null strings as input", - NodeMetadata(1, 56)) + NodeMetadata(1, 56) + ) /** Regression test for: https://github.com/partiql/partiql-lang-kotlin/issues/32 */ @Test diff --git a/lang/test/org/partiql/lang/eval/NaturalExprValueComparatorsTest.kt b/lang/test/org/partiql/lang/eval/NaturalExprValueComparatorsTest.kt index e938dccc6e..a8a423f7e2 100644 --- a/lang/test/org/partiql/lang/eval/NaturalExprValueComparatorsTest.kt +++ b/lang/test/org/partiql/lang/eval/NaturalExprValueComparatorsTest.kt @@ -14,12 +14,12 @@ package org.partiql.lang.eval -import org.partiql.lang.SqlException import junitparams.Parameters import org.junit.Test +import org.partiql.lang.SqlException +import org.partiql.lang.errors.ErrorCode import java.util.Collections import java.util.Random -import org.partiql.lang.errors.ErrorCode class NaturalExprValueComparatorsTest : EvaluatorTestBase() { // the lists below represent the expected ordering of values @@ -33,7 +33,7 @@ class NaturalExprValueComparatorsTest : EvaluatorTestBase() { "`null.int`", "`null.struct`" ) - + private val nonNullExpr = listOf( listOf( "false", @@ -260,11 +260,11 @@ class NaturalExprValueComparatorsTest : EvaluatorTestBase() { "<<>>" ), listOf( - //The ordered values are: true, true, 1 + // The ordered values are: true, true, 1 "<<1, true, true>>" ), listOf( - //The ordered values are: true, true, 1, 1, 1 + // The ordered values are: true, true, 1, 1, 1 "<>" ), listOf( @@ -291,7 +291,7 @@ class NaturalExprValueComparatorsTest : EvaluatorTestBase() { ) private val basicExprs = listOf(nullExprs) + nonNullExpr - + private fun List>.flatten() = this.flatMap { it } private fun List>.eval() = map { it.map { @@ -305,11 +305,13 @@ class NaturalExprValueComparatorsTest : EvaluatorTestBase() { private val iterations = 1000 - data class CompareCase(val id: Int, - val description: String, - val comparator: Comparator, - val unordered: List, - val expected: List>) { + data class CompareCase( + val id: Int, + val description: String, + val comparator: Comparator, + val unordered: List, + val expected: List> + ) { override fun toString() = "$description.$id" } @@ -325,9 +327,11 @@ class NaturalExprValueComparatorsTest : EvaluatorTestBase() { fun List>.moveHeadToTail(): List> = drop(1).plusElement(this[0]) - fun shuffleCase(description: String, - comparator: Comparator, - expectedSource: List>): CompareCase { + fun shuffleCase( + description: String, + comparator: Comparator, + expectedSource: List> + ): CompareCase { val expected = expectedSource.eval() val unordered = expected.flatShuffle() diff --git a/lang/test/org/partiql/lang/eval/NodeMetadataTest.kt b/lang/test/org/partiql/lang/eval/NodeMetadataTest.kt index bf7d45564b..92049e2408 100644 --- a/lang/test/org/partiql/lang/eval/NodeMetadataTest.kt +++ b/lang/test/org/partiql/lang/eval/NodeMetadataTest.kt @@ -44,27 +44,29 @@ class NodeMetadataTest { } private fun PropertyValue.valueAsAny(): Any = when (this.type) { - PropertyType.LONG_CLASS -> this.longValue() - PropertyType.STRING_CLASS -> this.stringValue() - PropertyType.INTEGER_CLASS -> this.integerValue() - PropertyType.TOKEN_CLASS -> this.tokenTypeValue() + PropertyType.LONG_CLASS -> this.longValue() + PropertyType.STRING_CLASS -> this.stringValue() + PropertyType.INTEGER_CLASS -> this.integerValue() + PropertyType.TOKEN_CLASS -> this.tokenTypeValue() PropertyType.ION_VALUE_CLASS -> this.ionValue() } private fun PropertyValueMap.clone(): PropertyValueMap = this.getProperties().fold(PropertyValueMap()) { acc, property -> when (property.propertyType) { - PropertyType.LONG_CLASS -> acc[property] = this[property]!!.longValue() - PropertyType.STRING_CLASS -> acc[property] = this[property]!!.stringValue() - PropertyType.INTEGER_CLASS -> acc[property] = this[property]!!.integerValue() - PropertyType.TOKEN_CLASS -> acc[property] = this[property]!!.tokenTypeValue() + PropertyType.LONG_CLASS -> acc[property] = this[property]!!.longValue() + PropertyType.STRING_CLASS -> acc[property] = this[property]!!.stringValue() + PropertyType.INTEGER_CLASS -> acc[property] = this[property]!!.integerValue() + PropertyType.TOKEN_CLASS -> acc[property] = this[property]!!.tokenTypeValue() PropertyType.ION_VALUE_CLASS -> acc[property] = this[property]!!.ionValue() } acc } - private fun buildErrorContext(initial: PropertyValueMap? = null, - build: PropertyValueMap.() -> Unit): PropertyValueMap { + private fun buildErrorContext( + initial: PropertyValueMap? = null, + build: PropertyValueMap.() -> Unit + ): PropertyValueMap { val p = initial?.clone() ?: PropertyValueMap() p.apply(build) return p @@ -96,12 +98,14 @@ class NodeMetadataTest { * contain either line, column number or both */ private fun parametersForFillErrorContextNotAddingMetadata(): List { - return listOf(buildErrorContext { this[Property.COLUMN_NUMBER] = 1L }, - buildErrorContext { this[Property.LINE_NUMBER] = 2L }, - buildErrorContext { - this[Property.LINE_NUMBER] = 3L - this[Property.COLUMN_NUMBER] = 4L - }) + return listOf( + buildErrorContext { this[Property.COLUMN_NUMBER] = 1L }, + buildErrorContext { this[Property.LINE_NUMBER] = 2L }, + buildErrorContext { + this[Property.LINE_NUMBER] = 3L + this[Property.COLUMN_NUMBER] = 4L + } + ) } @Test @@ -117,4 +121,4 @@ class NodeMetadataTest { assertTrue(actual === errorContext) assertEqualsErrorContext(expected, actual) } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/NullIfEvaluationTest.kt b/lang/test/org/partiql/lang/eval/NullIfEvaluationTest.kt index 5ab38f72ef..f225268f11 100644 --- a/lang/test/org/partiql/lang/eval/NullIfEvaluationTest.kt +++ b/lang/test/org/partiql/lang/eval/NullIfEvaluationTest.kt @@ -22,14 +22,15 @@ class NullIfEvaluationTest : EvaluatorTestBase() { data class NullIfTestCase( val expr1: String, val expr2: String, - val expected: String) + val expected: String + ) @ParameterizedTest @MethodSource("nullifEvaluationTests") fun runTests(tc: NullIfTestCase) = assertEvalExprValue("nullif(${tc.expr1}, ${tc.expr2})", tc.expected) companion object { - fun testCase(expr1: String,expr2: String, expected: String) = NullIfTestCase(expr1, expr2, expected) + fun testCase(expr1: String, expr2: String, expected: String) = NullIfTestCase(expr1, expr2, expected) @JvmStatic @Suppress("unused") @@ -54,4 +55,4 @@ class NullIfEvaluationTest : EvaluatorTestBase() { testCase("1", "'a'", "1") ) } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/QuotedIdentifierTests.kt b/lang/test/org/partiql/lang/eval/QuotedIdentifierTests.kt index 4a6fc4f06a..d86694ed3b 100644 --- a/lang/test/org/partiql/lang/eval/QuotedIdentifierTests.kt +++ b/lang/test/org/partiql/lang/eval/QuotedIdentifierTests.kt @@ -55,7 +55,7 @@ class QuotedIdentifierTests : EvaluatorTestBase() { assertEvalIsMissing("\"abc\"", simpleSession, undefinedVariableMissingCompileOptions) assertEvalIsMissing("\"ABC\"", simpleSession, undefinedVariableMissingCompileOptions) - //Ensure case sensitive lookup still works. + // Ensure case sensitive lookup still works. assertEval("\"Abc\"", "1", simpleSession, undefinedVariableMissingCompileOptions) } @@ -66,14 +66,16 @@ class QuotedIdentifierTests : EvaluatorTestBase() { simpleSession, ErrorCode.EVALUATOR_QUOTED_BINDING_DOES_NOT_EXIST, sourceLocationProperties(1L, 1L) + mapOf(Property.BINDING_NAME to "abc"), - expectedPermissiveModeResult = "MISSING") + expectedPermissiveModeResult = "MISSING" + ) checkInputThrowingEvaluationException( "\"ABC\"", simpleSession, ErrorCode.EVALUATOR_QUOTED_BINDING_DOES_NOT_EXIST, sourceLocationProperties(1L, 1L) + mapOf(Property.BINDING_NAME to "ABC"), - expectedPermissiveModeResult = "MISSING") + expectedPermissiveModeResult = "MISSING" + ) } @Test @@ -84,8 +86,10 @@ class QuotedIdentifierTests : EvaluatorTestBase() { ErrorCode.EVALUATOR_AMBIGUOUS_BINDING, sourceLocationProperties(1L, 1L) + mapOf( Property.BINDING_NAME to "abc", - Property.BINDING_NAME_MATCHES to "Abc, aBc, abC"), - expectedPermissiveModeResult = "MISSING") + Property.BINDING_NAME_MATCHES to "Abc, aBc, abC" + ), + expectedPermissiveModeResult = "MISSING" + ) } @Test @@ -100,21 +104,25 @@ class QuotedIdentifierTests : EvaluatorTestBase() { assertEval( "SELECT \"Abc\".n AS a, \"aBc\".n AS b, \"abC\".n AS c FROM a as Abc, b as aBc, c as abC", "[{a:1, b:2, c:3}]", - simpleSessionWithTables) + simpleSessionWithTables + ) @Test fun quotedTableAliasesAreCaseSensitive() = assertEval( "SELECT \"Abc\".n AS a, \"aBc\".n AS b, \"abC\".n AS c FROM a as \"Abc\", b as \"aBc\", c as \"abC\"", "[{a:1, b:2, c:3}]", - simpleSessionWithTables) + simpleSessionWithTables + ) val tableWithCaseVaryingFields = "[{ Abc: 1, aBc: 2, abC: 3}]" @Test fun quotedStructFieldsAreCaseSensitive() = - assertEval("SELECT s.\"Abc\" , s.\"aBc\", s.\"abC\" FROM `$tableWithCaseVaryingFields` AS s", - tableWithCaseVaryingFields) + assertEval( + "SELECT s.\"Abc\" , s.\"aBc\", s.\"abC\" FROM `$tableWithCaseVaryingFields` AS s", + tableWithCaseVaryingFields + ) @Test fun unquotedStructFieldsAreAmbiguous() { @@ -124,12 +132,13 @@ class QuotedIdentifierTests : EvaluatorTestBase() { ErrorCode.EVALUATOR_AMBIGUOUS_BINDING, sourceLocationProperties(1L, 10L) + mapOf( Property.BINDING_NAME to "abc", - Property.BINDING_NAME_MATCHES to "Abc, aBc, abC"), + Property.BINDING_NAME_MATCHES to "Abc, aBc, abC" + ), expectedPermissiveModeResult = "<<{}>>" ) } - //////////////////////////////////////////// + // ////////////////////////////////////////// private val nestedStructsLowercase = mapOf("a" to "{b:{c:{d:{e:5,f:6}}}}") private val globalHello = mapOf("s" to "\"hello\"") @@ -170,8 +179,6 @@ class QuotedIdentifierTests : EvaluatorTestBase() { "Stores" to "1" ) - - private val friends = mapOf( "friends" to """ { @@ -221,7 +228,6 @@ class QuotedIdentifierTests : EvaluatorTestBase() { fun pathDotMissingAttribute_Inverted() = assertEval(""" "a".z IS MISSING """, "true", nestedStructsLowercase.toSession()) - @Test fun pathIndexing_quotedId() = assertEval(""" "stores"[0]."books"[2]."title" """, "\"C\"", stores.toSession()) @@ -248,7 +254,6 @@ class QuotedIdentifierTests : EvaluatorTestBase() { friends.toSession() ) - @Test fun pathWildCardOverScalar_quotedId() = assertEval( """ "s"[*] """, @@ -262,4 +267,4 @@ class QuotedIdentifierTests : EvaluatorTestBase() { """["hello"]""", globalHello.toSession() ) -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/SimpleEvaluatingCompilerTests.kt b/lang/test/org/partiql/lang/eval/SimpleEvaluatingCompilerTests.kt index 8c284f84e1..449bd5330f 100644 --- a/lang/test/org/partiql/lang/eval/SimpleEvaluatingCompilerTests.kt +++ b/lang/test/org/partiql/lang/eval/SimpleEvaluatingCompilerTests.kt @@ -25,7 +25,6 @@ class SimpleEvaluatingCompilerTests : EvaluatorTestBase() { "listOfInts" to "[1, 2, 3, 4, 5]" ).toSession() - @Test fun selectValue() { assertEval("SELECT VALUE someScalar FROM someScalar", "[1]", session) @@ -45,7 +44,8 @@ class SimpleEvaluatingCompilerTests : EvaluatorTestBase() { fun selectStarWhere() { assertEval( "SELECT * FROM `[{a: 100, b: 1000}, {a: 101, b: 1001}]` WHERE a > 100", - "[{a: 101, b: 1001}]") + "[{a: 101, b: 1001}]" + ) } @Test @@ -108,7 +108,8 @@ class SimpleEvaluatingCompilerTests : EvaluatorTestBase() { "CAST(`'a'` as INT)", ErrorCode.EVALUATOR_CAST_FAILED, sourceLocationProperties(1, 1) + mapOf(Property.CAST_FROM to "SYMBOL", Property.CAST_TO to "INT"), - expectedPermissiveModeResult = "MISSING") + expectedPermissiveModeResult = "MISSING" + ) @Test fun sum() { diff --git a/lang/test/org/partiql/lang/eval/ThunkFactoryTests.kt b/lang/test/org/partiql/lang/eval/ThunkFactoryTests.kt index 29af269cb8..c288dda71b 100644 --- a/lang/test/org/partiql/lang/eval/ThunkFactoryTests.kt +++ b/lang/test/org/partiql/lang/eval/ThunkFactoryTests.kt @@ -67,7 +67,8 @@ class ThunkFactoryTests { createTestCases( StringType(StringType.StringLengthConstraint.Constrained(NumberConstraint.UpTo(20))), STRING_LONG, - true) + true + ) ).flatten() } @@ -157,17 +158,17 @@ class ThunkFactoryTests { }.invoke(Environment.standard()) } - private fun assertInvoke(expectException: Boolean, block: () -> Unit) { - if(expectException) { + if (expectException) { val ex = assertThrows { block() } - assertEquals(ErrorCode.EVALUATOR_VALUE_NOT_INSTANCE_OF_EXPECTED_TYPE, ex.errorCode, - "Message was: ${ex.message}") + assertEquals( + ErrorCode.EVALUATOR_VALUE_NOT_INSTANCE_OF_EXPECTED_TYPE, ex.errorCode, + "Message was: ${ex.message}" + ) } else { block() } } } - diff --git a/lang/test/org/partiql/lang/eval/TypingModeTests.kt b/lang/test/org/partiql/lang/eval/TypingModeTests.kt index 9e8c07170f..d2fa62c879 100644 --- a/lang/test/org/partiql/lang/eval/TypingModeTests.kt +++ b/lang/test/org/partiql/lang/eval/TypingModeTests.kt @@ -62,20 +62,20 @@ class TypingModeTests : EvaluatorTestBase() { try { eval(tc.sql, compileOptions = legacyCompileOptions).ionValue fail("Expected EvaluationException but none was thrown") - } catch(ex: EvaluationException) { + } catch (ex: EvaluationException) { assertEquals("error code", tc.expectedLegacyError.errorCode, ex.errorCode) assertEquals( "line number", tc.expectedLegacyError.lineNum.toLong(), - ex.errorContext?.get(Property.LINE_NUMBER)?.longValue()) + ex.errorContext?.get(Property.LINE_NUMBER)?.longValue() + ) assertEquals( "column number", tc.expectedLegacyError.charOffset.toLong(), - ex.errorContext?.get(Property.COLUMN_NUMBER)?.longValue()) + ex.errorContext?.get(Property.COLUMN_NUMBER)?.longValue() + ) } assertEvalExprValue(tc.sql, tc.expectedPermissiveModeResult, compileOptions = permissiveCompileOptions) } - - } diff --git a/lang/test/org/partiql/lang/eval/builtins/InvalidArgTypeChecker.kt b/lang/test/org/partiql/lang/eval/builtins/InvalidArgTypeChecker.kt index 521ced1997..7746423338 100644 --- a/lang/test/org/partiql/lang/eval/builtins/InvalidArgTypeChecker.kt +++ b/lang/test/org/partiql/lang/eval/builtins/InvalidArgTypeChecker.kt @@ -4,30 +4,27 @@ import org.partiql.lang.errors.ErrorCode import org.partiql.lang.errors.Property import org.partiql.lang.eval.EvaluatorTestBase import org.partiql.lang.eval.expectedArgTypeErrorMsg -import org.partiql.lang.types.StaticType -import org.partiql.lang.types.SingleType +import org.partiql.lang.types.BagType +import org.partiql.lang.types.BlobType import org.partiql.lang.types.BoolType -import org.partiql.lang.types.IntType -import org.partiql.lang.types.FloatType -import org.partiql.lang.types.DecimalType -import org.partiql.lang.types.DateType -import org.partiql.lang.types.TimestampType -import org.partiql.lang.types.TimeType -import org.partiql.lang.types.SymbolType -import org.partiql.lang.types.StringType import org.partiql.lang.types.ClobType -import org.partiql.lang.types.BlobType +import org.partiql.lang.types.DateType +import org.partiql.lang.types.DecimalType +import org.partiql.lang.types.FloatType +import org.partiql.lang.types.IntType import org.partiql.lang.types.ListType -import org.partiql.lang.types.SexpType -import org.partiql.lang.types.StructType -import org.partiql.lang.types.BagType import org.partiql.lang.types.MissingType import org.partiql.lang.types.NullType - - +import org.partiql.lang.types.SexpType +import org.partiql.lang.types.SingleType +import org.partiql.lang.types.StaticType +import org.partiql.lang.types.StringType +import org.partiql.lang.types.StructType +import org.partiql.lang.types.SymbolType +import org.partiql.lang.types.TimeType +import org.partiql.lang.types.TimestampType import java.lang.StringBuilder - /** * This class is used to represent each argument in ExprFunctions for argument type checking. * @param argPosition is the position of argument in an ExprFunction @@ -135,4 +132,4 @@ class InvalidArgTypeChecker : EvaluatorTestBase() { ), expectedPermissiveModeResult = "MISSING" ) -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/builtins/TimestampExtensions.kt b/lang/test/org/partiql/lang/eval/builtins/TimestampExtensions.kt index dbb8c66f65..e8b53e3866 100644 --- a/lang/test/org/partiql/lang/eval/builtins/TimestampExtensions.kt +++ b/lang/test/org/partiql/lang/eval/builtins/TimestampExtensions.kt @@ -6,31 +6,34 @@ import java.time.LocalDate import java.time.OffsetDateTime import java.time.temporal.TemporalAdjusters - internal fun Timestamp.toTemporalAccessor() = TimestampTemporalAccessor(this) internal fun Timestamp.toOffsetDateTime(): OffsetDateTime = - java.time.OffsetDateTime.of( - this.year, - this.month, - this.day, - this.hour, - this.minute, - this.second, - this.decimalSecond.rem(java.math.BigDecimal.valueOf(1L)).multiply(java.math.BigDecimal.valueOf(1000000000)).toInt(), - java.time.ZoneOffset.ofTotalSeconds(this.localOffset * 60)) + java.time.OffsetDateTime.of( + this.year, + this.month, + this.day, + this.hour, + this.minute, + this.second, + this.decimalSecond.rem(java.math.BigDecimal.valueOf(1L)).multiply(java.math.BigDecimal.valueOf(1000000000)).toInt(), + java.time.ZoneOffset.ofTotalSeconds(this.localOffset * 60) + ) internal fun OffsetDateTime.toTimestamp(): com.amazon.ion.Timestamp = - Timestamp.forSecond(this.year, this.month.value, this.dayOfMonth, this.hour, this.minute, - BigDecimal.valueOf(this.second.toLong()).plus( - BigDecimal.valueOf(this.nano.toLong()).divide(BigDecimal.valueOf(1000000000))), - this.offset.totalSeconds / 60) + Timestamp.forSecond( + this.year, this.month.value, this.dayOfMonth, this.hour, this.minute, + BigDecimal.valueOf(this.second.toLong()).plus( + BigDecimal.valueOf(this.nano.toLong()).divide(BigDecimal.valueOf(1000000000)) + ), + this.offset.totalSeconds / 60 + ) internal fun java.util.Random.nextTimestamp(): Timestamp { val year = Math.abs(this.nextInt() % 9999) + 1 val month = Math.abs(this.nextInt() % 12) + 1 - //Determine last day of month for randomly generated month & year (e.g. 28, 29, 30 or 31) + // Determine last day of month for randomly generated month & year (e.g. 28, 29, 30 or 31) val maxDayOfMonth = LocalDate.of(year, month, 1).with(TemporalAdjusters.lastDayOfMonth()).dayOfMonth val day = Math.abs(this.nextInt() % maxDayOfMonth) + 1 @@ -39,17 +42,17 @@ internal fun java.util.Random.nextTimestamp(): Timestamp { val secondFraction = BigDecimal.valueOf(Math.abs(this.nextLong()) % 1000000000).div(BigDecimal.valueOf(1000000000L)) val seconds = BigDecimal.valueOf(Math.abs(this.nextInt() % 59L)).add(secondFraction).abs() - //Note: need to % 59L above because 59L + secondFraction can yield 60 seconds + // Note: need to % 59L above because 59L + secondFraction can yield 60 seconds var offsetMinutes = this.nextInt() % (18 * 60) - //If the offset pushes this timestamp before 1/1/0001 then we will get IllegalArgumentException from - //Timestamp.forSecond - //NOTE: the offset is *substracted* from the specified time! - if(year == 1 && month == 1 && day == 1 && hour <= 18) { + // If the offset pushes this timestamp before 1/1/0001 then we will get IllegalArgumentException from + // Timestamp.forSecond + // NOTE: the offset is *substracted* from the specified time! + if (year == 1 && month == 1 && day == 1 && hour <= 18) { offsetMinutes = -Math.abs(offsetMinutes) } - //Same if the offset can push this time stamp after 12/31/9999 + // Same if the offset can push this time stamp after 12/31/9999 else if (year == 9999 && month == 12 && day == 31 && hour >= 6) { offsetMinutes = Math.abs(offsetMinutes) } diff --git a/lang/test/org/partiql/lang/eval/builtins/TimestampParserTest.kt b/lang/test/org/partiql/lang/eval/builtins/TimestampParserTest.kt index 85a6e9dc42..7171ba4861 100644 --- a/lang/test/org/partiql/lang/eval/builtins/TimestampParserTest.kt +++ b/lang/test/org/partiql/lang/eval/builtins/TimestampParserTest.kt @@ -10,7 +10,10 @@ import org.partiql.lang.errors.ErrorCode import org.partiql.lang.eval.EvaluationException import java.lang.reflect.Type import java.time.format.DateTimeParseException -import kotlin.test.* +import kotlin.test.assertEquals +import kotlin.test.assertNotNull +import kotlin.test.assertNull +import kotlin.test.fail @RunWith(JUnitParamsRunner::class) class TimestampParserTest { diff --git a/lang/test/org/partiql/lang/eval/builtins/TimestampTemporalAccessorTests.kt b/lang/test/org/partiql/lang/eval/builtins/TimestampTemporalAccessorTests.kt index 783a86944e..6882ed298f 100644 --- a/lang/test/org/partiql/lang/eval/builtins/TimestampTemporalAccessorTests.kt +++ b/lang/test/org/partiql/lang/eval/builtins/TimestampTemporalAccessorTests.kt @@ -10,7 +10,7 @@ import org.junit.runner.RunWith import java.time.DateTimeException import java.time.format.DateTimeFormatter import java.time.temporal.UnsupportedTemporalTypeException -import java.util.* +import java.util.Random import kotlin.test.assertEquals import kotlin.test.assertNull @@ -21,7 +21,7 @@ class TimestampTemporalAccessorTests { fun createRng(): Random { val rng = Random() val seed = rng.nextLong() - System.out.println("Randomly generated seed is $seed. Use this to reproduce failures in dev environment.") + println("Randomly generated seed is $seed. Use this to reproduce failures in dev environment.") rng.setSeed(seed) return rng } diff --git a/lang/test/org/partiql/lang/eval/builtins/functions/CharLengthEvaluationTest.kt b/lang/test/org/partiql/lang/eval/builtins/functions/CharLengthEvaluationTest.kt index ad5ddca77f..234a31d1af 100644 --- a/lang/test/org/partiql/lang/eval/builtins/functions/CharLengthEvaluationTest.kt +++ b/lang/test/org/partiql/lang/eval/builtins/functions/CharLengthEvaluationTest.kt @@ -8,7 +8,6 @@ import org.partiql.lang.eval.builtins.Argument import org.partiql.lang.eval.builtins.ExprFunctionTestCase import org.partiql.lang.eval.builtins.checkInvalidArgType import org.partiql.lang.eval.builtins.checkInvalidArity - import org.partiql.lang.types.StaticType import org.partiql.lang.util.ArgumentsProviderBase @@ -32,7 +31,7 @@ class CharLengthEvaluationTest : EvaluatorTestBase() { ExprFunctionTestCase("char_length('ȴȵ💩💋')", "4"), ExprFunctionTestCase("char_length('😁😞😸😸')", "4"), ExprFunctionTestCase("char_length('話家身圧費谷料村能計税金')", "12"), - ExprFunctionTestCase("char_length('eࠫ')", "2"), //This is a unicode "combining character" which is actually 2 codepoints + ExprFunctionTestCase("char_length('eࠫ')", "2"), // This is a unicode "combining character" which is actually 2 codepoints ) } diff --git a/lang/test/org/partiql/lang/eval/builtins/functions/CharacterLengthEvaluationTest.kt b/lang/test/org/partiql/lang/eval/builtins/functions/CharacterLengthEvaluationTest.kt index d50b0aa1ec..f794b188ba 100644 --- a/lang/test/org/partiql/lang/eval/builtins/functions/CharacterLengthEvaluationTest.kt +++ b/lang/test/org/partiql/lang/eval/builtins/functions/CharacterLengthEvaluationTest.kt @@ -32,7 +32,7 @@ class CharacterLengthEvaluationTest : EvaluatorTestBase() { ExprFunctionTestCase("character_length('ȴȵ💩💋')", "4"), ExprFunctionTestCase("character_length('😁😞😸😸')", "4"), ExprFunctionTestCase("character_length('話家身圧費谷料村能計税金')", "12"), - ExprFunctionTestCase("character_length('eࠫ')", "2") //This is a unicode "combining character" which is actually 2 codepoints + ExprFunctionTestCase("character_length('eࠫ')", "2") // This is a unicode "combining character" which is actually 2 codepoints ) } diff --git a/lang/test/org/partiql/lang/eval/builtins/functions/ConcatEvaluationTest.kt b/lang/test/org/partiql/lang/eval/builtins/functions/ConcatEvaluationTest.kt index 6c110b9916..cf246a1eea 100644 --- a/lang/test/org/partiql/lang/eval/builtins/functions/ConcatEvaluationTest.kt +++ b/lang/test/org/partiql/lang/eval/builtins/functions/ConcatEvaluationTest.kt @@ -23,52 +23,52 @@ class ConcatEvaluationTest : EvaluatorTestBase() { // 4. null or missing // 1st arg: String - ExprFunctionTestCase("'a' || 'b'", "\"ab\""), // 2nd arg: String - ExprFunctionTestCase("'a' || `\"b\"`", "\"ab\""), // 2nd arg: Ion String - ExprFunctionTestCase("'a' || `b`", "\"ab\""), // 2nd arg: Ion symbol `` - ExprFunctionTestCase("'a' || `'b'`", "\"ab\""), // 2nd arg: Ion symbol `''` - ExprFunctionTestCase("'a' || null", "null"), // 2nd arg: null - ExprFunctionTestCase("'a' || missing", "null"), // 2nd arg: missing + ExprFunctionTestCase("'a' || 'b'", "\"ab\""), // 2nd arg: String + ExprFunctionTestCase("'a' || `\"b\"`", "\"ab\""), // 2nd arg: Ion String + ExprFunctionTestCase("'a' || `b`", "\"ab\""), // 2nd arg: Ion symbol `` + ExprFunctionTestCase("'a' || `'b'`", "\"ab\""), // 2nd arg: Ion symbol `''` + ExprFunctionTestCase("'a' || null", "null"), // 2nd arg: null + ExprFunctionTestCase("'a' || missing", "null"), // 2nd arg: missing // 1st arg: Ion String - ExprFunctionTestCase("`\"a\"` || 'b'", "\"ab\""), // 2nd arg: String - ExprFunctionTestCase("`\"a\"` || `\"b\"`", "\"ab\""), // 2nd arg: Ion String - ExprFunctionTestCase("`\"a\"` || `b`", "\"ab\""), // 2nd arg: Ion symbol `` - ExprFunctionTestCase("`\"a\"` || `'b'`", "\"ab\""), // 2nd arg: Ion symbol `''` - ExprFunctionTestCase("`\"a\"` || null", "null"), // 2nd arg: null - ExprFunctionTestCase("`\"a\"` || missing", "null"), // 2nd arg: missing + ExprFunctionTestCase("`\"a\"` || 'b'", "\"ab\""), // 2nd arg: String + ExprFunctionTestCase("`\"a\"` || `\"b\"`", "\"ab\""), // 2nd arg: Ion String + ExprFunctionTestCase("`\"a\"` || `b`", "\"ab\""), // 2nd arg: Ion symbol `` + ExprFunctionTestCase("`\"a\"` || `'b'`", "\"ab\""), // 2nd arg: Ion symbol `''` + ExprFunctionTestCase("`\"a\"` || null", "null"), // 2nd arg: null + ExprFunctionTestCase("`\"a\"` || missing", "null"), // 2nd arg: missing // 1st arg: Ion symbol (``) - ExprFunctionTestCase("`a` || 'b'", "\"ab\""), // 2nd arg: String - ExprFunctionTestCase("`a` || `\"b\"`", "\"ab\""), // 2nd arg: Ion String - ExprFunctionTestCase("`a` || `b`", "\"ab\""), // 2nd arg: Ion symbol `` - ExprFunctionTestCase("`a` || `'b'`", "\"ab\""), // 2nd arg: Ion symbol `''` - ExprFunctionTestCase("`a` || null", "null"), // 2nd arg: null - ExprFunctionTestCase("`a` || missing", "null"), // 2nd arg: missing + ExprFunctionTestCase("`a` || 'b'", "\"ab\""), // 2nd arg: String + ExprFunctionTestCase("`a` || `\"b\"`", "\"ab\""), // 2nd arg: Ion String + ExprFunctionTestCase("`a` || `b`", "\"ab\""), // 2nd arg: Ion symbol `` + ExprFunctionTestCase("`a` || `'b'`", "\"ab\""), // 2nd arg: Ion symbol `''` + ExprFunctionTestCase("`a` || null", "null"), // 2nd arg: null + ExprFunctionTestCase("`a` || missing", "null"), // 2nd arg: missing // 1st arg: Ion symbol (``) - ExprFunctionTestCase("`'a'` || 'b'", "\"ab\""), // 2nd arg: String - ExprFunctionTestCase("`'a'` || `\"b\"`", "\"ab\""), // 2nd arg: Ion String - ExprFunctionTestCase("`'a'` || `b`", "\"ab\""), // 2nd arg: Ion symbol `` - ExprFunctionTestCase("`'a'` || `'b'`", "\"ab\""), // 2nd arg: Ion symbol `''` - ExprFunctionTestCase("`'a'` || null", "null"), // 2nd arg: null - ExprFunctionTestCase("`'a'` || missing", "null"), // 2nd arg: missing + ExprFunctionTestCase("`'a'` || 'b'", "\"ab\""), // 2nd arg: String + ExprFunctionTestCase("`'a'` || `\"b\"`", "\"ab\""), // 2nd arg: Ion String + ExprFunctionTestCase("`'a'` || `b`", "\"ab\""), // 2nd arg: Ion symbol `` + ExprFunctionTestCase("`'a'` || `'b'`", "\"ab\""), // 2nd arg: Ion symbol `''` + ExprFunctionTestCase("`'a'` || null", "null"), // 2nd arg: null + ExprFunctionTestCase("`'a'` || missing", "null"), // 2nd arg: missing // 1st arg: null - ExprFunctionTestCase("null || 'b'", "null"), // 2nd arg: String - ExprFunctionTestCase("null || `\"b\"`", "null"), // 2nd arg: Ion String - ExprFunctionTestCase("null || `b`", "null"), // 2nd arg: Ion symbol `` - ExprFunctionTestCase("null || `'b'`", "null"), // 2nd arg: Ion symbol `''` - ExprFunctionTestCase("null || null", "null"), // 2nd arg: null - ExprFunctionTestCase("null || missing", "null"), // 2nd arg: missing + ExprFunctionTestCase("null || 'b'", "null"), // 2nd arg: String + ExprFunctionTestCase("null || `\"b\"`", "null"), // 2nd arg: Ion String + ExprFunctionTestCase("null || `b`", "null"), // 2nd arg: Ion symbol `` + ExprFunctionTestCase("null || `'b'`", "null"), // 2nd arg: Ion symbol `''` + ExprFunctionTestCase("null || null", "null"), // 2nd arg: null + ExprFunctionTestCase("null || missing", "null"), // 2nd arg: missing // 1st arg: missing - ExprFunctionTestCase("missing || 'b'", "null"), // 2nd arg: String - ExprFunctionTestCase("missing || `\"b\"`", "null"), // 2nd arg: Ion String - ExprFunctionTestCase("missing || `b`", "null"), // 2nd arg: Ion symbol `` - ExprFunctionTestCase("missing || `'b'`", "null"), // 2nd arg: Ion symbol `''` - ExprFunctionTestCase("missing || null", "null"), // 2nd arg: null - ExprFunctionTestCase("missing || missing", "null"), // 2nd arg: missing + ExprFunctionTestCase("missing || 'b'", "null"), // 2nd arg: String + ExprFunctionTestCase("missing || `\"b\"`", "null"), // 2nd arg: Ion String + ExprFunctionTestCase("missing || `b`", "null"), // 2nd arg: Ion symbol `` + ExprFunctionTestCase("missing || `'b'`", "null"), // 2nd arg: Ion symbol `''` + ExprFunctionTestCase("missing || null", "null"), // 2nd arg: null + ExprFunctionTestCase("missing || missing", "null"), // 2nd arg: missing // Test for more characters in strings ExprFunctionTestCase("'' || 'a'", "\"a\""), diff --git a/lang/test/org/partiql/lang/eval/builtins/functions/DateDiffEvaluationTest.kt b/lang/test/org/partiql/lang/eval/builtins/functions/DateDiffEvaluationTest.kt index 33e7701b67..1d6ccb4d44 100644 --- a/lang/test/org/partiql/lang/eval/builtins/functions/DateDiffEvaluationTest.kt +++ b/lang/test/org/partiql/lang/eval/builtins/functions/DateDiffEvaluationTest.kt @@ -4,7 +4,6 @@ import org.junit.Test import org.junit.jupiter.params.ParameterizedTest import org.junit.jupiter.params.provider.ArgumentsSource import org.partiql.lang.eval.EvaluatorTestBase - import org.partiql.lang.eval.builtins.Argument import org.partiql.lang.eval.builtins.ExprFunctionTestCase import org.partiql.lang.eval.builtins.checkInvalidArgType @@ -228,7 +227,7 @@ class DateDiffEvaluationTest : EvaluatorTestBase() { ExprFunctionTestCase("date_diff(day, `2017-09-01T`, `2017-10-01T`)", "30"), // September ExprFunctionTestCase("date_diff(day, `2017-10-01T`, `2017-11-01T`)", "31"), // October ExprFunctionTestCase("date_diff(day, `2017-11-01T`, `2017-12-01T`)", "30"), // November - ExprFunctionTestCase("date_diff(day, `2017-12-01T`, `2018-01-01T`)", "31") // December + ExprFunctionTestCase("date_diff(day, `2017-12-01T`, `2018-01-01T`)", "31") // December ) } @@ -244,4 +243,4 @@ class DateDiffEvaluationTest : EvaluatorTestBase() { ) // The invalid arity check is considered as syntax error and already done in the ParserErrorsTest.kt -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/builtins/functions/ExtractEvaluationTest.kt b/lang/test/org/partiql/lang/eval/builtins/functions/ExtractEvaluationTest.kt index bde0b23058..e44f27369b 100644 --- a/lang/test/org/partiql/lang/eval/builtins/functions/ExtractEvaluationTest.kt +++ b/lang/test/org/partiql/lang/eval/builtins/functions/ExtractEvaluationTest.kt @@ -221,4 +221,4 @@ class ExtractEvaluationTest : EvaluatorTestBase() { ) // The invalid arity check is considered as syntax error and already done in the ParserErrorsTest.kt -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/builtins/functions/MakeDateEvaluationTest.kt b/lang/test/org/partiql/lang/eval/builtins/functions/MakeDateEvaluationTest.kt index 42b309c20c..363be7061d 100644 --- a/lang/test/org/partiql/lang/eval/builtins/functions/MakeDateEvaluationTest.kt +++ b/lang/test/org/partiql/lang/eval/builtins/functions/MakeDateEvaluationTest.kt @@ -81,4 +81,4 @@ class MakeDateEvaluationTest : EvaluatorTestBase() { maxArity = 3, minArity = 3 ) -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/builtins/functions/MakeTimeEvaluationTest.kt b/lang/test/org/partiql/lang/eval/builtins/functions/MakeTimeEvaluationTest.kt index e898dc9ed7..33c2d6d7b9 100644 --- a/lang/test/org/partiql/lang/eval/builtins/functions/MakeTimeEvaluationTest.kt +++ b/lang/test/org/partiql/lang/eval/builtins/functions/MakeTimeEvaluationTest.kt @@ -85,4 +85,4 @@ class MakeTimeEvaluationTest : EvaluatorTestBase() { minArity = 3, maxArity = 4 ) -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/builtins/functions/SizeEvaluationTest.kt b/lang/test/org/partiql/lang/eval/builtins/functions/SizeEvaluationTest.kt index 3506e8c36d..4b4a0ebe41 100644 --- a/lang/test/org/partiql/lang/eval/builtins/functions/SizeEvaluationTest.kt +++ b/lang/test/org/partiql/lang/eval/builtins/functions/SizeEvaluationTest.kt @@ -54,4 +54,4 @@ class SizeEvaluationTest : EvaluatorTestBase() { maxArity = 1, minArity = 1 ) -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/builtins/functions/SubstringEvaluationTest.kt b/lang/test/org/partiql/lang/eval/builtins/functions/SubstringEvaluationTest.kt index 5a662bac26..4d0f14a011 100644 --- a/lang/test/org/partiql/lang/eval/builtins/functions/SubstringEvaluationTest.kt +++ b/lang/test/org/partiql/lang/eval/builtins/functions/SubstringEvaluationTest.kt @@ -37,7 +37,7 @@ class SubstringEvaluationTest : EvaluatorTestBase() { ExprFunctionTestCase( "substring('abcde\u0832fgh' FROM 3 FOR 6)", "\"cde\u0832fg\"" - ), //Note: U+0832 is a "combining diacritical mark" https://en.wikipedia.org/wiki/Combining_character. + ), // Note: U+0832 is a "combining diacritical mark" https://en.wikipedia.org/wiki/Combining_character. // Even though it is visually merged with the preceding letter when displayed, it still counts as a distinct codepoint. ExprFunctionTestCase("substring(null FROM 1)", "null"), ExprFunctionTestCase("substring('abc' FROM null)", "null"), diff --git a/lang/test/org/partiql/lang/eval/builtins/functions/ToStringEvaluationTest.kt b/lang/test/org/partiql/lang/eval/builtins/functions/ToStringEvaluationTest.kt index 57e520d2e3..c1c1cf2392 100644 --- a/lang/test/org/partiql/lang/eval/builtins/functions/ToStringEvaluationTest.kt +++ b/lang/test/org/partiql/lang/eval/builtins/functions/ToStringEvaluationTest.kt @@ -70,11 +70,11 @@ class ToStringEvaluationTest : EvaluatorTestBase() { override fun getParameters(): List = listOf( InvalidArgTestCase("to_string(`2017-01-01`, 'b')", "b"), - //Symbol 'z' is known to Java's DateTimeFormatter but is not handled by TimestampTemporalAccessor + // Symbol 'z' is known to Java's DateTimeFormatter but is not handled by TimestampTemporalAccessor InvalidArgTestCase("to_string(`2017-01-01`, 'Y')", "Y"), - //Symbol 'VV' is known to Java's DateTimeFormatter but is not handled by TimestampTemporalAccessor - //*and* causes a different exception to be thrown by DateTimeFormatter.format() than 'z' + // Symbol 'VV' is known to Java's DateTimeFormatter but is not handled by TimestampTemporalAccessor + // *and* causes a different exception to be thrown by DateTimeFormatter.format() than 'z' InvalidArgTestCase("to_string(`2017-01-01`, 'VV')", "VV") ) } @@ -96,4 +96,4 @@ class ToStringEvaluationTest : EvaluatorTestBase() { maxArity = 2, minArity = 2 ) -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/builtins/functions/ToTimestampEvaluationTest.kt b/lang/test/org/partiql/lang/eval/builtins/functions/ToTimestampEvaluationTest.kt index ffa93cb30e..5926f5e47b 100644 --- a/lang/test/org/partiql/lang/eval/builtins/functions/ToTimestampEvaluationTest.kt +++ b/lang/test/org/partiql/lang/eval/builtins/functions/ToTimestampEvaluationTest.kt @@ -13,7 +13,6 @@ import org.partiql.lang.eval.builtins.checkInvalidArity import org.partiql.lang.types.StaticType import org.partiql.lang.util.ArgumentsProviderBase import org.partiql.lang.util.to -import kotlin.math.min class ToTimestampEvaluationTest : EvaluatorTestBase() { // Pass test cases @@ -125,4 +124,4 @@ class ToTimestampEvaluationTest : EvaluatorTestBase() { minArity = 1, maxArity = 2 ) -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/builtins/functions/TrimEvaluationTest.kt b/lang/test/org/partiql/lang/eval/builtins/functions/TrimEvaluationTest.kt index d39e6407f2..c53ffa71c1 100644 --- a/lang/test/org/partiql/lang/eval/builtins/functions/TrimEvaluationTest.kt +++ b/lang/test/org/partiql/lang/eval/builtins/functions/TrimEvaluationTest.kt @@ -99,4 +99,4 @@ class TrimEvaluationTest : EvaluatorTestBase() { ) // The invalid arity check is considered as syntax error and already done in the ParserErrorsTest.kt -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/builtins/functions/UnixTimestampFunctionTest.kt b/lang/test/org/partiql/lang/eval/builtins/functions/UnixTimestampFunctionTest.kt index 9a7d3cbdf3..c90b138ba7 100644 --- a/lang/test/org/partiql/lang/eval/builtins/functions/UnixTimestampFunctionTest.kt +++ b/lang/test/org/partiql/lang/eval/builtins/functions/UnixTimestampFunctionTest.kt @@ -5,8 +5,8 @@ import org.junit.jupiter.params.ParameterizedTest import org.junit.jupiter.params.provider.ArgumentsSource import org.partiql.lang.eval.EvaluatorTestBase import org.partiql.lang.eval.builtins.Argument -import org.partiql.lang.eval.builtins.buildSessionWithNow import org.partiql.lang.eval.builtins.ExprFunctionTestCase +import org.partiql.lang.eval.builtins.buildSessionWithNow import org.partiql.lang.eval.builtins.checkInvalidArgType import org.partiql.lang.eval.builtins.checkInvalidArity import org.partiql.lang.types.StaticType diff --git a/lang/test/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternLexerTest.kt b/lang/test/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternLexerTest.kt index 71cdc1e668..1d75eed6cc 100644 --- a/lang/test/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternLexerTest.kt +++ b/lang/test/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternLexerTest.kt @@ -9,8 +9,10 @@ class TimestampFormatPatternLexerTest { private fun text(s: String) = Token(TokenType.TEXT, s) private fun pattern(s: String) = Token(TokenType.PATTERN, s) - private fun assertTokens(s: String, vararg tokens: Token) = assertEquals(tokens.toList(), - TimestampFormatPatternLexer().tokenize(s)) + private fun assertTokens(s: String, vararg tokens: Token) = assertEquals( + tokens.toList(), + TimestampFormatPatternLexer().tokenize(s) + ) @Test fun singlePatternToken() = assertTokens("y", pattern("y")) @@ -25,86 +27,98 @@ class TimestampFormatPatternLexerTest { fun longSinglePatternToken() = assertTokens("yyyyyyyyyyyyyyyyyyy", pattern("yyyyyyyyyyyyyyyyyyy")) @Test - fun allPatternCharacters() = assertTokens("yMdahHmsSXx", - pattern("y"), - pattern("M"), - pattern("d"), - pattern("a"), - pattern("h"), - pattern("H"), - pattern("m"), - pattern("s"), - pattern("S"), - pattern("X"), - pattern("x")) + fun allPatternCharacters() = assertTokens( + "yMdahHmsSXx", + pattern("y"), + pattern("M"), + pattern("d"), + pattern("a"), + pattern("h"), + pattern("H"), + pattern("m"), + pattern("s"), + pattern("S"), + pattern("X"), + pattern("x") + ) @Test fun allNonEscapedText() = assertTokens(" /-,:.", text(" /-,:.")) @Test - fun onlyText() = assertTokens("'some quoted text'-----'more quoted'.''''", - text("'some quoted text'"), - text("-----"), - text("'more quoted'"), - text("."), - text("''"), - text("''")) + fun onlyText() = assertTokens( + "'some quoted text'-----'more quoted'.''''", + text("'some quoted text'"), + text("-----"), + text("'more quoted'"), + text("."), + text("''"), + text("''") + ) @Test - fun withWhitespace() = assertTokens("y y y y", - pattern("y"), - text(" "), - pattern("y"), - text(" "), - pattern("y"), - text(" "), - pattern("y")) + fun withWhitespace() = assertTokens( + "y y y y", + pattern("y"), + text(" "), + pattern("y"), + text(" "), + pattern("y"), + text(" "), + pattern("y") + ) @Test - fun withNonEscapedTextAndPattern() = assertTokens("y/m-d,h:y.s", - pattern("y"), - text("/"), - pattern("m"), - text("-"), - pattern("d"), - text(","), - pattern("h"), - text(":"), - pattern("y"), - text("."), - pattern("s")) + fun withNonEscapedTextAndPattern() = assertTokens( + "y/m-d,h:y.s", + pattern("y"), + text("/"), + pattern("m"), + text("-"), + pattern("d"), + text(","), + pattern("h"), + text(":"), + pattern("y"), + text("."), + pattern("s") + ) @Test - fun withNonEscapedTextWhitespaceAndPattern() = assertTokens("yyyy-mm-dd HH:hh", - pattern("yyyy"), - text("-"), - pattern("mm"), - text("-"), - pattern("dd"), - text(" "), - pattern("HH"), - text(":"), - pattern("hh")) + fun withNonEscapedTextWhitespaceAndPattern() = assertTokens( + "yyyy-mm-dd HH:hh", + pattern("yyyy"), + text("-"), + pattern("mm"), + text("-"), + pattern("dd"), + text(" "), + pattern("HH"), + text(":"), + pattern("hh") + ) @Test fun withQuotes() = assertTokens("y'TT'y", pattern("y"), text("'TT'"), pattern("y")) @Test - fun ionTimestampDefaultPattern() = assertTokens("yyyy-MM-dd'T'HH:mm:ss.SSSX", - pattern("yyyy"), - text("-"), - pattern("MM"), - text("-"), - pattern("dd"), - text("'T'"), - pattern("HH"), - text(":"), - pattern("mm"), - text(":"), - pattern("ss"), - text("."), - pattern("SSS"), - pattern("X")) + fun ionTimestampDefaultPattern() = assertTokens( + "yyyy-MM-dd'T'HH:mm:ss.SSSX", + pattern("yyyy"), + text("-"), + pattern("MM"), + text("-"), + pattern("dd"), + text("'T'"), + pattern("HH"), + text(":"), + pattern("mm"), + text(":"), + pattern("ss"), + text("."), + pattern("SSS"), + pattern("X") + ) @Test(expected = EvaluationException::class) fun openQuotes() { @@ -115,4 +129,4 @@ class TimestampFormatPatternLexerTest { fun unknownCharacters() { TimestampFormatPatternLexer().tokenize("yyyyP") } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/builtins/timestamp/ToTimestampFormatPatternValidationTest.kt b/lang/test/org/partiql/lang/eval/builtins/timestamp/ToTimestampFormatPatternValidationTest.kt index b8767e40f8..b85f45bb83 100644 --- a/lang/test/org/partiql/lang/eval/builtins/timestamp/ToTimestampFormatPatternValidationTest.kt +++ b/lang/test/org/partiql/lang/eval/builtins/timestamp/ToTimestampFormatPatternValidationTest.kt @@ -10,7 +10,6 @@ import org.partiql.lang.eval.EvaluatorTestBase import org.partiql.lang.util.sourceLocationProperties import org.partiql.lang.util.to - @RunWith(JUnitParamsRunner::class) class ToTimestampFormatPatternValidationTest : EvaluatorTestBase() { @@ -22,7 +21,8 @@ class ToTimestampFormatPatternValidationTest : EvaluatorTestBase() { "TO_TIMESTAMP('doesnt matter', 'yyyy M dd H m a')", ErrorCode.EVALUATOR_TIMESTAMP_FORMAT_PATTERN_HOUR_CLOCK_AM_PM_MISMATCH, sourceLocationProperties(1, 1) + mapOf(Property.TIMESTAMP_FORMAT_PATTERN to "yyyy M dd H m a"), - expectedPermissiveModeResult = "MISSING") + expectedPermissiveModeResult = "MISSING" + ) } @Test @@ -31,7 +31,8 @@ class ToTimestampFormatPatternValidationTest : EvaluatorTestBase() { "TO_TIMESTAMP('doesnt matter', 'yyyy M dd h m')", ErrorCode.EVALUATOR_TIMESTAMP_FORMAT_PATTERN_HOUR_CLOCK_AM_PM_MISMATCH, sourceLocationProperties(1, 1) + mapOf(Property.TIMESTAMP_FORMAT_PATTERN to "yyyy M dd h m"), - expectedPermissiveModeResult = "MISSING") + expectedPermissiveModeResult = "MISSING" + ) } @Test @@ -40,7 +41,8 @@ class ToTimestampFormatPatternValidationTest : EvaluatorTestBase() { "TO_TIMESTAMP('doesnt matter', 'y MMMMM')", ErrorCode.EVALUATOR_INVALID_TIMESTAMP_FORMAT_PATTERN_SYMBOL_FOR_PARSING, sourceLocationProperties(1, 1) + mapOf(Property.TIMESTAMP_FORMAT_PATTERN to "y MMMMM"), - expectedPermissiveModeResult = "MISSING") + expectedPermissiveModeResult = "MISSING" + ) } fun parametersForIncompleteFormatPatternTest() = listOf( @@ -76,21 +78,23 @@ class ToTimestampFormatPatternValidationTest : EvaluatorTestBase() { ValidationTestCase("yyyy-M-d-h-m-S", "SECOND_OF_MINUTE") ) - @Test @Parameters fun incompleteFormatPatternTest(testCase: ValidationTestCase) { - checkInputThrowingEvaluationException("TO_TIMESTAMP('doesnt matter', '${testCase.pattern.replace("'", "''")}')", + checkInputThrowingEvaluationException( + "TO_TIMESTAMP('doesnt matter', '${testCase.pattern.replace("'", "''")}')", ErrorCode.EVALUATOR_INCOMPLETE_TIMESTAMP_FORMAT_PATTERN, - sourceLocationProperties(1, 1) + mapOf(Property.TIMESTAMP_FORMAT_PATTERN to testCase.pattern, - Property.TIMESTAMP_FORMAT_PATTERN_FIELDS to testCase.fields + sourceLocationProperties(1, 1) + mapOf( + Property.TIMESTAMP_FORMAT_PATTERN to testCase.pattern, + Property.TIMESTAMP_FORMAT_PATTERN_FIELDS to testCase.fields ), - expectedPermissiveModeResult = "MISSING") + expectedPermissiveModeResult = "MISSING" + ) } fun parametersForDuplicateFieldPatternTest() = listOf( - //y, yy, and yyyy + // y, yy, and yyyy ValidationTestCase("y y", "YEAR"), ValidationTestCase("y yy", "YEAR"), ValidationTestCase("y yyyy", "YEAR"), @@ -100,8 +104,8 @@ class ToTimestampFormatPatternValidationTest : EvaluatorTestBase() { ValidationTestCase("yyyy y", "YEAR"), ValidationTestCase("yyyy yy", "YEAR"), ValidationTestCase("yyyy yyyy", "YEAR"), - - //M, MM and MMMM + + // M, MM and MMMM ValidationTestCase("M M", "MONTH_OF_YEAR"), ValidationTestCase("M MM", "MONTH_OF_YEAR"), ValidationTestCase("M MMM", "MONTH_OF_YEAR"), @@ -119,12 +123,12 @@ class ToTimestampFormatPatternValidationTest : EvaluatorTestBase() { ValidationTestCase("MMMM MMM", "MONTH_OF_YEAR"), ValidationTestCase("MMMM MMMM", "MONTH_OF_YEAR"), - //d and dd + // d and dd ValidationTestCase("d d", "DAY_OF_MONTH"), ValidationTestCase("d dd", "DAY_OF_MONTH"), ValidationTestCase("dd dd", "DAY_OF_MONTH"), - - //h, hh, H and HH + + // h, hh, H and HH ValidationTestCase("h h", "HOUR_OF_DAY"), ValidationTestCase("h hh", "HOUR_OF_DAY"), ValidationTestCase("hh hh", "HOUR_OF_DAY"), @@ -133,25 +137,25 @@ class ToTimestampFormatPatternValidationTest : EvaluatorTestBase() { ValidationTestCase("HH HH", "HOUR_OF_DAY"), ValidationTestCase("h H", "HOUR_OF_DAY"), ValidationTestCase("hh HH", "HOUR_OF_DAY"), - - //m and mm + + // m and mm ValidationTestCase("m m", "MINUTE_OF_HOUR"), ValidationTestCase("m mm", "MINUTE_OF_HOUR"), ValidationTestCase("mm mm", "MINUTE_OF_HOUR"), - - //s and s + + // s and s ValidationTestCase("s s", "SECOND_OF_MINUTE"), ValidationTestCase("s ss", "SECOND_OF_MINUTE"), ValidationTestCase("ss ss", "SECOND_OF_MINUTE"), - //n, S and S + // n, S and S ValidationTestCase("n S", "FRACTION_OF_SECOND"), ValidationTestCase("n SS", "FRACTION_OF_SECOND"), ValidationTestCase("S SS", "FRACTION_OF_SECOND"), ValidationTestCase("S SSS", "FRACTION_OF_SECOND"), ValidationTestCase("S SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS", "FRACTION_OF_SECOND"), - //x, xx, xxx, xxxx, and xxxxx (y is needed to prevent validation error from a different rule) + // x, xx, xxx, xxxx, and xxxxx (y is needed to prevent validation error from a different rule) ValidationTestCase("y x x", "OFFSET"), ValidationTestCase("y x xx", "OFFSET"), ValidationTestCase("y x xxx", "OFFSET"), @@ -178,7 +182,7 @@ class ToTimestampFormatPatternValidationTest : EvaluatorTestBase() { ValidationTestCase("y xxxxx xxxx", "OFFSET"), ValidationTestCase("y xxxxx xxxxx", "OFFSET"), - //X, XX, XXX, XXXX and XXXXX + // X, XX, XXX, XXXX and XXXXX ValidationTestCase("y X X", "OFFSET"), ValidationTestCase("y X XX", "OFFSET"), ValidationTestCase("y X XXX", "OFFSET"), @@ -205,7 +209,7 @@ class ToTimestampFormatPatternValidationTest : EvaluatorTestBase() { ValidationTestCase("y XXXXX XXXX", "OFFSET"), ValidationTestCase("y XXXXX XXXXX", "OFFSET"), - //x and X (mixed case) + // x and X (mixed case) ValidationTestCase("y x X", "OFFSET"), ValidationTestCase("y x XX", "OFFSET"), ValidationTestCase("y x XXX", "OFFSET"), @@ -236,12 +240,14 @@ class ToTimestampFormatPatternValidationTest : EvaluatorTestBase() { @Test @Parameters fun duplicateFieldPatternTest(testCase: ValidationTestCase) { - checkInputThrowingEvaluationException("TO_TIMESTAMP('doesnt matter', '${testCase.pattern}')", - ErrorCode.EVALUATOR_TIMESTAMP_FORMAT_PATTERN_DUPLICATE_FIELDS, - sourceLocationProperties(1, 1) + mapOf(Property.TIMESTAMP_FORMAT_PATTERN to testCase.pattern, - Property.TIMESTAMP_FORMAT_PATTERN_FIELDS to testCase.fields - ), - expectedPermissiveModeResult = "MISSING") + checkInputThrowingEvaluationException( + "TO_TIMESTAMP('doesnt matter', '${testCase.pattern}')", + ErrorCode.EVALUATOR_TIMESTAMP_FORMAT_PATTERN_DUPLICATE_FIELDS, + sourceLocationProperties(1, 1) + mapOf( + Property.TIMESTAMP_FORMAT_PATTERN to testCase.pattern, + Property.TIMESTAMP_FORMAT_PATTERN_FIELDS to testCase.fields + ), + expectedPermissiveModeResult = "MISSING" + ) } } - diff --git a/lang/test/org/partiql/lang/eval/io/CustomExceptionHandlerTest.kt b/lang/test/org/partiql/lang/eval/io/CustomExceptionHandlerTest.kt index 049b431317..32e353acdc 100644 --- a/lang/test/org/partiql/lang/eval/io/CustomExceptionHandlerTest.kt +++ b/lang/test/org/partiql/lang/eval/io/CustomExceptionHandlerTest.kt @@ -15,14 +15,13 @@ import java.lang.IllegalStateException import kotlin.test.assertTrue import kotlin.test.fail -class AlwaysThrowsFunc: ExprFunction { +class AlwaysThrowsFunc : ExprFunction { override fun callWithRequired(env: Environment, required: List): ExprValue { throw IllegalStateException() } override val signature: FunctionSignature get() = FunctionSignature("alwaysthrows", listOf(), returnType = StaticType.ANY) - } class CustomExceptionHandlerTest { @@ -33,14 +32,18 @@ class CustomExceptionHandlerTest { val ion = IonSystemBuilder.standard().build() val compilerPipeline = CompilerPipeline.build(ion) { addFunction(AlwaysThrowsFunc()) - compileOptions(CompileOptions.build { - thunkOptions(ThunkOptions.build { - handleExceptionForLegacyMode { throwable, sourceLocationMeta -> - customHandlerWasInvoked = true - throw IllegalStateException() - } - }) - }) + compileOptions( + CompileOptions.build { + thunkOptions( + ThunkOptions.build { + handleExceptionForLegacyMode { throwable, sourceLocationMeta -> + customHandlerWasInvoked = true + throw IllegalStateException() + } + } + ) + } + ) } val expression = compilerPipeline.compile("alwaysthrows()") @@ -60,16 +63,20 @@ class CustomExceptionHandlerTest { val ion = IonSystemBuilder.standard().build() val compilerPipeline = CompilerPipeline.builder(ion) - .addFunction(AlwaysThrowsFunc()) - .compileOptions(CompileOptions.builder() - .thunkOptions(ThunkOptions.builder() - .handleExceptionForLegacyMode { throwable, sourceLocationMeta -> - customHandlerWasInvoked = true - throw IllegalStateException() - } - .build()) - .build()) - .build() + .addFunction(AlwaysThrowsFunc()) + .compileOptions( + CompileOptions.builder() + .thunkOptions( + ThunkOptions.builder() + .handleExceptionForLegacyMode { throwable, sourceLocationMeta -> + customHandlerWasInvoked = true + throw IllegalStateException() + } + .build() + ) + .build() + ) + .build() val expression = compilerPipeline.compile("alwaysthrows()") @@ -81,4 +88,4 @@ class CustomExceptionHandlerTest { assertTrue(customHandlerWasInvoked, "Custom handler must be invoked") } } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/io/DelimitedValuesTest.kt b/lang/test/org/partiql/lang/eval/io/DelimitedValuesTest.kt index 74e8be871b..f3ab40116f 100644 --- a/lang/test/org/partiql/lang/eval/io/DelimitedValuesTest.kt +++ b/lang/test/org/partiql/lang/eval/io/DelimitedValuesTest.kt @@ -14,14 +14,14 @@ package org.partiql.lang.eval.io -import org.partiql.lang.eval.io.DelimitedValues.ConversionMode.AUTO -import org.partiql.lang.eval.io.DelimitedValues.ConversionMode.NONE import org.apache.commons.csv.CSVFormat import org.junit.Test import org.partiql.lang.TestBase import org.partiql.lang.eval.ExprValue import org.partiql.lang.eval.ExprValueType import org.partiql.lang.eval.cloneAndRemoveAnnotations +import org.partiql.lang.eval.io.DelimitedValues.ConversionMode.AUTO +import org.partiql.lang.eval.io.DelimitedValues.ConversionMode.NONE import org.partiql.lang.eval.orderedNamesValue import org.partiql.lang.util.newFromIonText import java.io.StringReader @@ -35,17 +35,21 @@ class DelimitedValuesTest : TestBase() { assertEquals(expectedValues, value.ionValue.cloneAndRemoveAnnotations()) } - private fun read(text: String, - csvFormat: CSVFormat, - conversionMode: DelimitedValues.ConversionMode): ExprValue = + private fun read( + text: String, + csvFormat: CSVFormat, + conversionMode: DelimitedValues.ConversionMode + ): ExprValue = DelimitedValues.exprValue(valueFactory, StringReader(text), csvFormat, conversionMode) - private fun assertWrite(expectedText: String, - valueText: String, - names: List, - writeHeader: Boolean, - delimiter: Char = ',', - newline: String = "\n") { + private fun assertWrite( + expectedText: String, + valueText: String, + names: List, + writeHeader: Boolean, + delimiter: Char = ',', + newline: String = "\n" + ) { val actualText = StringWriter().use { val rowExprValue = valueFactory.newFromIonText(valueText) @@ -53,7 +57,8 @@ class DelimitedValuesTest : TestBase() { rowExprValue.asSequence().map { // apply the "schema" it.orderedNamesValue(names) - }) + } + ) // val exprValue = SequenceExprValue( // ion, @@ -69,10 +74,12 @@ class DelimitedValuesTest : TestBase() { assertEquals(expectedText, actualText) } - private fun voidWrite(exprValue: ExprValue, - writeHeader: Boolean, - delimiter: Char = ',', - newline: String = "\n") { + private fun voidWrite( + exprValue: ExprValue, + writeHeader: Boolean, + delimiter: Char = ',', + newline: String = "\n" + ) { DelimitedValues.writeTo(ion, StringWriter(), exprValue, delimiter, newline, writeHeader) } @@ -196,7 +203,8 @@ class DelimitedValuesTest : TestBase() { 0 -> exprValue.orderedNamesValue(listOf("a")) else -> exprValue.orderedNamesValue(listOf("b")) } - }), + } + ), writeHeader = false ) diff --git a/lang/test/org/partiql/lang/eval/like/PatternPartTests.kt b/lang/test/org/partiql/lang/eval/like/PatternPartTests.kt index b6319f4620..d5c17205c6 100644 --- a/lang/test/org/partiql/lang/eval/like/PatternPartTests.kt +++ b/lang/test/org/partiql/lang/eval/like/PatternPartTests.kt @@ -16,96 +16,135 @@ class PatternPartTests { vectors.map { TestCase(pattern, escapeChar?.toInt(), it.first, it.second) } fun parametersForPatternTest() = listOf( - createTestCase("a", null, listOf( - "" to false, - "a" to true, - "aa" to false, - "b" to false, - "bb" to false - )), - createTestCase("aa", null, listOf( - "" to false, - "a" to false, - "aa" to true, - "b" to false, - "bb" to false - )), - createTestCase("_", null, listOf( - "" to false, - "a" to true, - "b" to true, - "aa" to false, - "bb" to false - )), - createTestCase("__", null, listOf( - "a" to false, - "b" to false, - "aa" to true, - "bb" to true - )), - createTestCase("%", null, listOf( - "" to true, - "a" to true, - "bb" to true - )), - createTestCase("%%", null, listOf( - "" to true, - "a" to true, - "bb" to true - )), - createTestCase("a%", null, listOf( - "" to false, - "a" to true, - "ab" to true, - "abcde" to true, - "b" to false, - "ba" to false, - "baa" to false - )), - createTestCase("%a", null, listOf( - "" to false, - "a" to true, - "ba" to true, - "edcba" to true, - "b" to false, - "ab" to false, - "aab" to false - )), - createTestCase("%foo%bar%bat%baz%bork%borz%", null, listOf( - "" to false, - "foobarbatbazborkborz" to true, - "000foo1bar22bat333baz444bork555borz666" to true, - "000foo1bar22bat333baz444bork555borD666" to false - )), - createTestCase("%a%", null, listOf( - "" to false, - "a" to true, - "ab" to true, - "ba" to true, - "bab" to true, - "bbabb" to true, - "b" to false, - "bb" to false - )), - createTestCase("%_asdf_%", null, listOf( - "" to false, - "asdf" to false, - "1asdf1" to true, - "1asdf1x" to true, - "x1asdf1" to true, - "xyz1asdf1" to true, - "1asdf1xyz" to true, - "xyz1asdf1xyz" to true - )), - createTestCase("\\%\\_", '\\', listOf( - "" to false, - "%_" to true - )), - createTestCase("%\\%\\__", '\\', listOf( - "" to false, - "%_1" to true, - "asdf%_1" to true - )) + createTestCase( + "a", null, + listOf( + "" to false, + "a" to true, + "aa" to false, + "b" to false, + "bb" to false + ) + ), + createTestCase( + "aa", null, + listOf( + "" to false, + "a" to false, + "aa" to true, + "b" to false, + "bb" to false + ) + ), + createTestCase( + "_", null, + listOf( + "" to false, + "a" to true, + "b" to true, + "aa" to false, + "bb" to false + ) + ), + createTestCase( + "__", null, + listOf( + "a" to false, + "b" to false, + "aa" to true, + "bb" to true + ) + ), + createTestCase( + "%", null, + listOf( + "" to true, + "a" to true, + "bb" to true + ) + ), + createTestCase( + "%%", null, + listOf( + "" to true, + "a" to true, + "bb" to true + ) + ), + createTestCase( + "a%", null, + listOf( + "" to false, + "a" to true, + "ab" to true, + "abcde" to true, + "b" to false, + "ba" to false, + "baa" to false + ) + ), + createTestCase( + "%a", null, + listOf( + "" to false, + "a" to true, + "ba" to true, + "edcba" to true, + "b" to false, + "ab" to false, + "aab" to false + ) + ), + createTestCase( + "%foo%bar%bat%baz%bork%borz%", null, + listOf( + "" to false, + "foobarbatbazborkborz" to true, + "000foo1bar22bat333baz444bork555borz666" to true, + "000foo1bar22bat333baz444bork555borD666" to false + ) + ), + createTestCase( + "%a%", null, + listOf( + "" to false, + "a" to true, + "ab" to true, + "ba" to true, + "bab" to true, + "bbabb" to true, + "b" to false, + "bb" to false + ) + ), + createTestCase( + "%_asdf_%", null, + listOf( + "" to false, + "asdf" to false, + "1asdf1" to true, + "1asdf1x" to true, + "x1asdf1" to true, + "xyz1asdf1" to true, + "1asdf1xyz" to true, + "xyz1asdf1xyz" to true + ) + ), + createTestCase( + "\\%\\_", '\\', + listOf( + "" to false, + "%_" to true + ) + ), + createTestCase( + "%\\%\\__", '\\', + listOf( + "" to false, + "%_1" to true, + "asdf%_1" to true + ) + ) ).flatten() @Test @@ -131,7 +170,8 @@ class PatternPartTests { PatternPart.ExactChars(" ".codePoints().toArray()), PatternPart.ZeroOrMoreOfAnyChar ), - patParts) + patParts + ) } @Test @@ -139,4 +179,4 @@ class PatternPartTests { // makes absolutely certain we do not stack overflow on too many consecutive `%` characters assertEquals(true, executePattern(parsePattern("%".repeat(10000) + "a", escapeChar = null), "aaaa")) } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/time/TimeTest.kt b/lang/test/org/partiql/lang/eval/time/TimeTest.kt index 9a205cb901..a0362f31f5 100644 --- a/lang/test/org/partiql/lang/eval/time/TimeTest.kt +++ b/lang/test/org/partiql/lang/eval/time/TimeTest.kt @@ -98,7 +98,7 @@ class TimeTest { precision = 0, tz_min = -18 * MINUTES_PER_HOUR, expectedLocalTime = LocalTime.of(23, 23, 12, 0), - expectedZoneOffset = ZoneOffset.ofTotalSeconds( -18 * SECONDS_PER_HOUR) + expectedZoneOffset = ZoneOffset.ofTotalSeconds(-18 * SECONDS_PER_HOUR) ), case( hour = 23, @@ -108,7 +108,7 @@ class TimeTest { precision = 0, tz_min = 18 * MINUTES_PER_HOUR, expectedLocalTime = LocalTime.of(23, 23, 12, 0), - expectedZoneOffset = ZoneOffset.ofTotalSeconds( 18 * SECONDS_PER_HOUR) + expectedZoneOffset = ZoneOffset.ofTotalSeconds(18 * SECONDS_PER_HOUR) ), case( hour = 23, @@ -179,4 +179,4 @@ class TimeTest { expectedErrorCode = ErrorCode.EVALUATOR_TIME_FIELD_OUT_OF_RANGE ) ) -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/eval/visitors/AggregateSupportVisitorTransformTests.kt b/lang/test/org/partiql/lang/eval/visitors/AggregateSupportVisitorTransformTests.kt index c60b296545..0c95529723 100644 --- a/lang/test/org/partiql/lang/eval/visitors/AggregateSupportVisitorTransformTests.kt +++ b/lang/test/org/partiql/lang/eval/visitors/AggregateSupportVisitorTransformTests.kt @@ -35,7 +35,7 @@ class AggregateSupportVisitorTransformTests : VisitorTransformTestBase() { * Simple helper for testing that parses [this] SFW query, transforms it using [AggregateSupportVisitorTransform], * and returns the transformed query as [PartiqlAst.Expr.Select]. */ - private fun String.parseAndTransformQuery() : PartiqlAst.Expr.Select { + private fun String.parseAndTransformQuery(): PartiqlAst.Expr.Select { val query = this val statement = super.parser.parseAstStatement(query) val transformedNode = (transformer).transformStatement(statement) as PartiqlAst.Statement.Query @@ -50,21 +50,25 @@ class AggregateSupportVisitorTransformTests : VisitorTransformTestBase() { * and [AggregateRegisterIdMeta] passed as the second argument of the pair. */ private fun createCallAggMetas(callAggs: List>): MetaContainer = - metaContainerOf(AggregateCallSiteListMeta.TAG to AggregateCallSiteListMeta( - callAggs.map { callAgg -> - PartiqlAst.build { - callAgg( - setq = all(), - funcName = callAgg.first, - arg = lit(ionInt(1)), - metas = metaContainerOf(AggregateRegisterIdMeta.TAG to AggregateRegisterIdMeta(callAgg.second))) + metaContainerOf( + AggregateCallSiteListMeta.TAG to AggregateCallSiteListMeta( + callAggs.map { callAgg -> + PartiqlAst.build { + callAgg( + setq = all(), + funcName = callAgg.first, + arg = lit(ionInt(1)), + metas = metaContainerOf(AggregateRegisterIdMeta.TAG to AggregateRegisterIdMeta(callAgg.second)) + ) + } } - })) + ) + ) /** * Simple helper for testing to remove the [SourceLocationMeta] from [this] [MetaContainer]. */ - private fun MetaContainer.removeSourceLocation() : MetaContainer = this.minus(SourceLocationMeta.TAG) + private fun MetaContainer.removeSourceLocation(): MetaContainer = this.minus(SourceLocationMeta.TAG) /** * Checks that [expected] and [actual] have the same metas (i.e. [AggregateCallSiteListMeta]) @@ -86,27 +90,33 @@ class AggregateSupportVisitorTransformTests : VisitorTransformTestBase() { // one aggregate transform AggSupportTestCase( "SELECT COUNT(1) FROM foo", - listOf(Pair("count", 0))), + listOf(Pair("count", 0)) + ), // multiple aggregates transform AggSupportTestCase( "SELECT COUNT(1), SUM(1), AVG(1) FROM foo", - listOf(Pair("count", 0), Pair("sum", 1), Pair("avg", 2))), + listOf(Pair("count", 0), Pair("sum", 1), Pair("avg", 2)) + ), // one aggregate in HAVING transform AggSupportTestCase( "SELECT 1 FROM foo GROUP BY bar HAVING SUM(1) > 0", - listOf(Pair("sum", 0))), + listOf(Pair("sum", 0)) + ), // one aggregate and one aggregate in HAVING transform AggSupportTestCase( "SELECT COUNT(1) FROM foo GROUP BY bar HAVING SUM(1) > 0", - listOf(Pair("sum", 0), Pair("count", 1))), + listOf(Pair("sum", 0), Pair("count", 1)) + ), // SELECT VALUE aggregate transform AggSupportTestCase( "SELECT VALUE COUNT(1) FROM foo", - emptyList()), + emptyList() + ), // SELECT VALUE one aggregate and HAVING transform AggSupportTestCase( "SELECT VALUE COUNT(1) FROM foo GROUP BY bar HAVING SUM(1) > 0", - listOf(Pair("sum", 0))) + listOf(Pair("sum", 0)) + ) ) } diff --git a/lang/test/org/partiql/lang/eval/visitors/FromSourceAliasVisitorTransformTests.kt b/lang/test/org/partiql/lang/eval/visitors/FromSourceAliasVisitorTransformTests.kt index e81167aa20..4da4260255 100644 --- a/lang/test/org/partiql/lang/eval/visitors/FromSourceAliasVisitorTransformTests.kt +++ b/lang/test/org/partiql/lang/eval/visitors/FromSourceAliasVisitorTransformTests.kt @@ -22,130 +22,159 @@ class FromSourceAliasVisitorTransformTests : VisitorTransformTestBase() { class ArgsProvider : ArgumentsProviderBase() { override fun getParameters(): List = listOf( - //Aliases extracted from variable reference names - TransformTestCase( - "SELECT * FROM a", - "SELECT * FROM a AS a"), - TransformTestCase( - "SELECT * FROM a AT z", - "SELECT * FROM a AS a AT z"), - - TransformTestCase( - "SELECT * FROM a, b", - "SELECT * FROM a AS a, b AS b"), - TransformTestCase( - "SELECT * FROM a, a", - "SELECT * FROM a AS a, a AS a"), - - TransformTestCase( - "SELECT * FROM a AT z, b AT y", - "SELECT * FROM a AS a AT z, b AS b AT y"), - - TransformTestCase( - "SELECT * FROM a, b, c", - "SELECT * FROM a AS a, b AS b, c AS c"), - TransformTestCase( - "SELECT * FROM a AT z, b AT y, c AT x", - "SELECT * FROM a AS a AT z, b AS b AT y, c AS c AT x"), - - //Path variants of the above - TransformTestCase( - "SELECT * FROM foo.a", - "SELECT * FROM foo.a AS a"), - - TransformTestCase( - "SELECT * FROM foo.a, bar.b", - "SELECT * FROM foo.a AS a, bar.b AS b"), - - TransformTestCase( - "SELECT * FROM foo.a, bar.a", - "SELECT * FROM foo.a AS a, bar.a AS a"), - - TransformTestCase( - "SELECT * FROM foo.a, foo.bar.a", - "SELECT * FROM foo.a AS a, foo.bar.a AS a"), - - TransformTestCase( - "SELECT * FROM foo.a, bar.b, bat.c", - "SELECT * FROM foo.a AS a, bar.b AS b, bat.c AS c"), - - TransformTestCase( - "SELECT * FROM foo.doo.a", - "SELECT * FROM foo.doo.a AS a"), - - TransformTestCase( - "SELECT * FROM foo.doo.a, bar.doo.b", - "SELECT * FROM foo.doo.a AS a, bar.doo.b AS b"), - - TransformTestCase( - "SELECT * FROM foo.doo.a, bar.doo.b, bat.doo.c", - "SELECT * FROM foo.doo.a AS a, bar.doo.b AS b, bat.doo.c AS c"), - - //Aliases synthesized by position in reference - TransformTestCase( - "SELECT * FROM <>", - "SELECT * FROM <> AS _1"), - - TransformTestCase( - "SELECT * FROM <>, <>", - "SELECT * FROM <> AS _1, <> AS _2"), - - TransformTestCase( - "SELECT * FROM <>, <>, <>", - "SELECT * FROM <> AS _1, <> as _2, <> AS _3"), - - TransformTestCase( - "SELECT * FROM a, <>, <>", - "SELECT * FROM a AS a, <> as _2, <> AS _3"), - - TransformTestCase( - "SELECT * FROM <>, b, <>", - "SELECT * FROM <> AS _1, b AS b, <> AS _3"), - - TransformTestCase( - "SELECT * FROM <>, <>, c", - "SELECT * FROM <> AS _1, <> AS _2, c AS c"), - - - //Subqueries should be independent - TransformTestCase( - "SELECT * FROM (SELECT * FROM <>, <>), <>, <>", - "SELECT * FROM (SELECT * FROM <> AS _1, <> AS _2) AS _1, <> AS _2, <> AS _3"), - TransformTestCase( - "SELECT * FROM a, (SELECT a.x, b.y FROM b)", - "SELECT * FROM a AS a, (SELECT a.x, b.y FROM b AS b) AS _2"), - - //The transform should apply to subqueries even if the from source they are contained within has already been - //aliased. - TransformTestCase( - "SELECT * FROM (SELECT * FROM <>, <>) AS z, <>, <>", - "SELECT * FROM (SELECT * FROM <> AS _1, <> AS _2) AS z, <> AS _2, <> AS _3"), - - //UNPIVOT variants of the above - TransformTestCase( - "SELECT * FROM UNPIVOT a", - "SELECT * FROM UNPIVOT a AS a"), - TransformTestCase( - "SELECT * FROM UNPIVOT a AT z", - "SELECT * FROM UNPIVOT a AS a AT z"), - TransformTestCase( - "SELECT * FROM UNPIVOT <> AT z", - "SELECT * FROM UNPIVOT <> AS _1 AT z"), - TransformTestCase( - "SELECT * FROM UNPIVOT (SELECT * FROM <>, <>), <>, <>", - "SELECT * FROM UNPIVOT (SELECT * FROM <> AS _1, <> AS _2) AS _1, <> AS _2, <> AS _3"), - TransformTestCase( - "SELECT * FROM UNPIVOT (SELECT * FROM <>, <>) AS z, <>, <>", - "SELECT * FROM UNPIVOT (SELECT * FROM <> AS _1, <> AS _2) AS z, <> AS _2, <> AS _3"), - - // DML - TransformTestCase( - "FROM dogs INSERT INTO collies VALUE ?", - "FROM dogs AS dogs INSERT INTO collies VALUE ?"), - - // CAST ALIAS TEST - TransformTestCase( - """ + // Aliases extracted from variable reference names + TransformTestCase( + "SELECT * FROM a", + "SELECT * FROM a AS a" + ), + TransformTestCase( + "SELECT * FROM a AT z", + "SELECT * FROM a AS a AT z" + ), + + TransformTestCase( + "SELECT * FROM a, b", + "SELECT * FROM a AS a, b AS b" + ), + TransformTestCase( + "SELECT * FROM a, a", + "SELECT * FROM a AS a, a AS a" + ), + + TransformTestCase( + "SELECT * FROM a AT z, b AT y", + "SELECT * FROM a AS a AT z, b AS b AT y" + ), + + TransformTestCase( + "SELECT * FROM a, b, c", + "SELECT * FROM a AS a, b AS b, c AS c" + ), + TransformTestCase( + "SELECT * FROM a AT z, b AT y, c AT x", + "SELECT * FROM a AS a AT z, b AS b AT y, c AS c AT x" + ), + + // Path variants of the above + TransformTestCase( + "SELECT * FROM foo.a", + "SELECT * FROM foo.a AS a" + ), + + TransformTestCase( + "SELECT * FROM foo.a, bar.b", + "SELECT * FROM foo.a AS a, bar.b AS b" + ), + + TransformTestCase( + "SELECT * FROM foo.a, bar.a", + "SELECT * FROM foo.a AS a, bar.a AS a" + ), + + TransformTestCase( + "SELECT * FROM foo.a, foo.bar.a", + "SELECT * FROM foo.a AS a, foo.bar.a AS a" + ), + + TransformTestCase( + "SELECT * FROM foo.a, bar.b, bat.c", + "SELECT * FROM foo.a AS a, bar.b AS b, bat.c AS c" + ), + + TransformTestCase( + "SELECT * FROM foo.doo.a", + "SELECT * FROM foo.doo.a AS a" + ), + + TransformTestCase( + "SELECT * FROM foo.doo.a, bar.doo.b", + "SELECT * FROM foo.doo.a AS a, bar.doo.b AS b" + ), + + TransformTestCase( + "SELECT * FROM foo.doo.a, bar.doo.b, bat.doo.c", + "SELECT * FROM foo.doo.a AS a, bar.doo.b AS b, bat.doo.c AS c" + ), + + // Aliases synthesized by position in reference + TransformTestCase( + "SELECT * FROM <>", + "SELECT * FROM <> AS _1" + ), + + TransformTestCase( + "SELECT * FROM <>, <>", + "SELECT * FROM <> AS _1, <> AS _2" + ), + + TransformTestCase( + "SELECT * FROM <>, <>, <>", + "SELECT * FROM <> AS _1, <> as _2, <> AS _3" + ), + + TransformTestCase( + "SELECT * FROM a, <>, <>", + "SELECT * FROM a AS a, <> as _2, <> AS _3" + ), + + TransformTestCase( + "SELECT * FROM <>, b, <>", + "SELECT * FROM <> AS _1, b AS b, <> AS _3" + ), + + TransformTestCase( + "SELECT * FROM <>, <>, c", + "SELECT * FROM <> AS _1, <> AS _2, c AS c" + ), + + // Subqueries should be independent + TransformTestCase( + "SELECT * FROM (SELECT * FROM <>, <>), <>, <>", + "SELECT * FROM (SELECT * FROM <> AS _1, <> AS _2) AS _1, <> AS _2, <> AS _3" + ), + TransformTestCase( + "SELECT * FROM a, (SELECT a.x, b.y FROM b)", + "SELECT * FROM a AS a, (SELECT a.x, b.y FROM b AS b) AS _2" + ), + + // The transform should apply to subqueries even if the from source they are contained within has already been + // aliased. + TransformTestCase( + "SELECT * FROM (SELECT * FROM <>, <>) AS z, <>, <>", + "SELECT * FROM (SELECT * FROM <> AS _1, <> AS _2) AS z, <> AS _2, <> AS _3" + ), + + // UNPIVOT variants of the above + TransformTestCase( + "SELECT * FROM UNPIVOT a", + "SELECT * FROM UNPIVOT a AS a" + ), + TransformTestCase( + "SELECT * FROM UNPIVOT a AT z", + "SELECT * FROM UNPIVOT a AS a AT z" + ), + TransformTestCase( + "SELECT * FROM UNPIVOT <> AT z", + "SELECT * FROM UNPIVOT <> AS _1 AT z" + ), + TransformTestCase( + "SELECT * FROM UNPIVOT (SELECT * FROM <>, <>), <>, <>", + "SELECT * FROM UNPIVOT (SELECT * FROM <> AS _1, <> AS _2) AS _1, <> AS _2, <> AS _3" + ), + TransformTestCase( + "SELECT * FROM UNPIVOT (SELECT * FROM <>, <>) AS z, <>, <>", + "SELECT * FROM UNPIVOT (SELECT * FROM <> AS _1, <> AS _2) AS z, <> AS _2, <> AS _3" + ), + + // DML + TransformTestCase( + "FROM dogs INSERT INTO collies VALUE ?", + "FROM dogs AS dogs INSERT INTO collies VALUE ?" + ), + + // CAST ALIAS TEST + TransformTestCase( + """ SELECT 1 FROM CAST(1 AS STRING), CAST(foo AS INT), @@ -154,7 +183,7 @@ class FromSourceAliasVisitorTransformTests : VisitorTransformTestBase() { CAST(CAST(foo.baz AS INT) AS FLOAT), CAST(x + y AS INT) """, - """ + """ SELECT 1 FROM CAST(1 AS STRING) AS _1, CAST(foo AS INT) AS foo, @@ -162,12 +191,12 @@ class FromSourceAliasVisitorTransformTests : VisitorTransformTestBase() { CAST(CAST(bat AS INT) AS STRING) AS bat, CAST(CAST(foo.baz AS INT) AS FLOAT) AS baz, CAST(x + y AS INT) AS _6 - """) + """ + ) ) } @ParameterizedTest @ArgumentsSource(ArgsProvider::class) fun test(tc: TransformTestCase) = runTestForIdempotentTransform(tc, FromSourceAliasVisitorTransform()) - } diff --git a/lang/test/org/partiql/lang/eval/visitors/PartiqlAstSanityValidatorTests.kt b/lang/test/org/partiql/lang/eval/visitors/PartiqlAstSanityValidatorTests.kt index 9ca9abf65d..9b1c595f00 100644 --- a/lang/test/org/partiql/lang/eval/visitors/PartiqlAstSanityValidatorTests.kt +++ b/lang/test/org/partiql/lang/eval/visitors/PartiqlAstSanityValidatorTests.kt @@ -37,8 +37,11 @@ class PartiqlAstSanityValidatorTests : TestBase() { project = projectValue(litInt(1)), from = scan(litInt(1)), group = groupBy( - strategy = groupPartial(), - keyList = groupKeyList(emptyList())))) + strategy = groupPartial(), + keyList = groupKeyList(emptyList()) + ) + ) + ) } ) } @@ -57,8 +60,13 @@ class PartiqlAstSanityValidatorTests : TestBase() { project = projectValue(litInt(1)), from = scan(litInt(1)), group = groupBy( - strategy = groupPartial(), - keyList = groupKeyList(emptyList())))))) + strategy = groupPartial(), + keyList = groupKeyList(emptyList()) + ) + ) + ) + ) + ) } ) } @@ -80,8 +88,15 @@ class PartiqlAstSanityValidatorTests : TestBase() { project = projectValue(litInt(1)), from = scan(litInt(1)), group = groupBy( - strategy = groupPartial(), - keyList = groupKeyList(emptyList())))))))) + strategy = groupPartial(), + keyList = groupKeyList(emptyList()) + ) + ) + ) + ) + ) + ) + ) } ) } @@ -98,7 +113,10 @@ class PartiqlAstSanityValidatorTests : TestBase() { project = projectPivot(litInt(1), litInt(1)), group = groupBy( strategy = groupFull(), - keyList = groupKeyList(emptyList())))) + keyList = groupKeyList(emptyList()) + ) + ) + ) } ) } @@ -111,13 +129,19 @@ class PartiqlAstSanityValidatorTests : TestBase() { PartiqlAst.build { query( select( - project = projectValue(select( + project = projectValue( + select( from = scan(litInt(1)), project = projectPivot(litInt(1), litInt(1)), group = groupBy( strategy = groupFull(), - keyList = groupKeyList(emptyList())))), - from = scan(litInt(1)))) + keyList = groupKeyList(emptyList()) + ) + ) + ), + from = scan(litInt(1)) + ) + ) } ) } @@ -134,7 +158,9 @@ class PartiqlAstSanityValidatorTests : TestBase() { project = projectValue(litInt(1)), // The error should occur when `groupBy` is null but `having` is not group = null, - having = litInt(1))) + having = litInt(1) + ) + ) } ) } @@ -146,14 +172,19 @@ class PartiqlAstSanityValidatorTests : TestBase() { partiqlAstSanityValidator.validate( PartiqlAst.build { query( - select ( - from = scan(select( - from = scan(litInt(1)), - project = projectValue(litInt(1)), - // The error should occur when `groupBy` is null but `having` is not - group = null, - having = litInt(1))), - project = projectValue(litInt(1)))) + select( + from = scan( + select( + from = scan(litInt(1)), + project = projectValue(litInt(1)), + // The error should occur when `groupBy` is null but `having` is not + group = null, + having = litInt(1) + ) + ), + project = projectValue(litInt(1)) + ) + ) } ) } @@ -172,8 +203,11 @@ class PartiqlAstSanityValidatorTests : TestBase() { // The error should occur when `groupBy.groupByItems` is empty and `having` is not null group = groupBy( strategy = groupFull(), - keyList = groupKeyList(emptyList())), - having = litInt(1))) + keyList = groupKeyList(emptyList()) + ), + having = litInt(1) + ) + ) } ) } @@ -186,15 +220,21 @@ class PartiqlAstSanityValidatorTests : TestBase() { PartiqlAst.build { query( select( - from = scan(select( - from = scan(litInt(1)), - project = projectValue(litInt(1)), - // The error should occur when `groupBy.groupByItems` is empty and `having` is not null - group = groupBy( - strategy = groupFull(), - keyList = groupKeyList(emptyList())), - having = litInt(1))), - project = projectValue(litInt(1)))) + from = scan( + select( + from = scan(litInt(1)), + project = projectValue(litInt(1)), + // The error should occur when `groupBy.groupByItems` is empty and `having` is not null + group = groupBy( + strategy = groupFull(), + keyList = groupKeyList(emptyList()) + ), + having = litInt(1) + ) + ), + project = projectValue(litInt(1)) + ) + ) } ) } @@ -216,7 +256,9 @@ class PartiqlAstSanityValidatorTests : TestBase() { query( select( from = scan(lit(ion.singleValue("${Long.MAX_VALUE}0").toIonElement())), - project = projectValue(litInt(1)))) + project = projectValue(litInt(1)) + ) + ) } ) } @@ -230,9 +272,14 @@ class PartiqlAstSanityValidatorTests : TestBase() { query( select( from = scan(litInt(1)), - project = projectValue(select( - from = scan(lit(ion.singleValue("${Long.MAX_VALUE}0").toIonElement())), - project = projectValue(litInt(1)))))) + project = projectValue( + select( + from = scan(lit(ion.singleValue("${Long.MAX_VALUE}0").toIonElement())), + project = projectValue(litInt(1)) + ) + ) + ) + ) } ) } diff --git a/lang/test/org/partiql/lang/eval/visitors/SelectStarVisitorTransformTests.kt b/lang/test/org/partiql/lang/eval/visitors/SelectStarVisitorTransformTests.kt index bc8a912016..0af3ceb022 100644 --- a/lang/test/org/partiql/lang/eval/visitors/SelectStarVisitorTransformTests.kt +++ b/lang/test/org/partiql/lang/eval/visitors/SelectStarVisitorTransformTests.kt @@ -11,11 +11,11 @@ class SelectStarVisitorTransformTests : VisitorTransformTestBase() { class ArgsProvider : ArgumentsProviderBase() { override fun getParameters(): List = listOf( TransformTestCase( - """ + """ SELECT * FROM foo AS f """, - """ + """ SELECT "f".* FROM foo AS f """ diff --git a/lang/test/org/partiql/lang/eval/visitors/StaticTypeInferenceVisitorTransformTest.kt b/lang/test/org/partiql/lang/eval/visitors/StaticTypeInferenceVisitorTransformTest.kt index 1c7d7672a6..a9b27dbaa4 100644 --- a/lang/test/org/partiql/lang/eval/visitors/StaticTypeInferenceVisitorTransformTest.kt +++ b/lang/test/org/partiql/lang/eval/visitors/StaticTypeInferenceVisitorTransformTest.kt @@ -11,7 +11,6 @@ import org.partiql.lang.ast.passes.inference.isLob import org.partiql.lang.ast.passes.inference.isNumeric import org.partiql.lang.ast.passes.inference.isText import org.partiql.lang.ast.passes.inference.isUnknown -import org.partiql.lang.ast.toAstStatement import org.partiql.lang.domains.PartiqlAst import org.partiql.lang.domains.staticType import org.partiql.lang.errors.Problem @@ -32,14 +31,8 @@ import org.partiql.lang.types.SexpType import org.partiql.lang.types.StaticType import org.partiql.lang.types.StaticType.Companion.ALL_TYPES import org.partiql.lang.types.StaticType.Companion.ANY -import org.partiql.lang.types.StaticType.Companion.BOOL -import org.partiql.lang.types.StringType -import org.partiql.lang.types.StructType -import org.partiql.lang.types.TypedOpParameter -import org.partiql.lang.types.VarargFormalParameter -import org.partiql.lang.types.StaticType.Companion.NULL -import org.partiql.lang.types.StaticType.Companion.MISSING import org.partiql.lang.types.StaticType.Companion.BAG +import org.partiql.lang.types.StaticType.Companion.BOOL import org.partiql.lang.types.StaticType.Companion.CLOB import org.partiql.lang.types.StaticType.Companion.DECIMAL import org.partiql.lang.types.StaticType.Companion.FLOAT @@ -48,6 +41,8 @@ import org.partiql.lang.types.StaticType.Companion.INT2 import org.partiql.lang.types.StaticType.Companion.INT4 import org.partiql.lang.types.StaticType.Companion.INT8 import org.partiql.lang.types.StaticType.Companion.LIST +import org.partiql.lang.types.StaticType.Companion.MISSING +import org.partiql.lang.types.StaticType.Companion.NULL import org.partiql.lang.types.StaticType.Companion.NULL_OR_MISSING import org.partiql.lang.types.StaticType.Companion.SEXP import org.partiql.lang.types.StaticType.Companion.STRING @@ -55,6 +50,10 @@ import org.partiql.lang.types.StaticType.Companion.STRUCT import org.partiql.lang.types.StaticType.Companion.SYMBOL import org.partiql.lang.types.StaticType.Companion.TIMESTAMP import org.partiql.lang.types.StaticType.Companion.unionOf +import org.partiql.lang.types.StringType +import org.partiql.lang.types.StructType +import org.partiql.lang.types.TypedOpParameter +import org.partiql.lang.types.VarargFormalParameter import org.partiql.lang.util.cartesianProduct import org.partiql.lang.util.compareTo import org.partiql.lang.util.countMatchingSubstrings @@ -167,7 +166,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { val inferencer = StaticTypeInferencer( globalBindings = globalBindings, customFunctionSignatures = tc.customFunctionSignatures, - customTypedOpParameters = customTypedOpParameters) + customTypedOpParameters = customTypedOpParameters + ) val defaultVisitorTransforms = basicVisitorTransforms() val staticTypeVisitorTransform = StaticTypeVisitorTransform(ion, globalBindings) @@ -222,7 +222,7 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { } private fun crossExpand(template: String, operators: List): List = - when(template.countMatchingSubstrings(TOKEN)) { + when (template.countMatchingSubstrings(TOKEN)) { 0 -> listOf(template) else -> { operators.flatMap { @@ -248,17 +248,20 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { private fun createReturnsNullOrMissingError(line: Long = 1, col: Long, nAryOp: String): Problem = Problem( SourceLocationMeta(line, col, nAryOp.length.toLong()), - SemanticProblemDetails.ExpressionAlwaysReturnsNullOrMissing) + SemanticProblemDetails.ExpressionAlwaysReturnsNullOrMissing + ) private fun createReturnsNullOrMissingError(sourceLocation: SourceLocationMeta): Problem = Problem( sourceLocation, - SemanticProblemDetails.ExpressionAlwaysReturnsNullOrMissing) + SemanticProblemDetails.ExpressionAlwaysReturnsNullOrMissing + ) private fun createDataTypeMismatchError(line: Long = 1, col: Long, argTypes: List, nAryOp: String): Problem = Problem( SourceLocationMeta(line, col, nAryOp.length.toLong()), - SemanticProblemDetails.IncompatibleDatatypesForOp(actualArgumentTypes = argTypes, nAryOp = nAryOp)) + SemanticProblemDetails.IncompatibleDatatypesForOp(actualArgumentTypes = argTypes, nAryOp = nAryOp) + ) private fun createDataTypeMismatchError(sourceLocation: SourceLocationMeta, argTypes: List, nAryOp: String): Problem = Problem( @@ -340,8 +343,7 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { ), handler = handler ) - } - else { + } else { it } } @@ -432,7 +434,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { return if (leftType == rightType) { listOf(originalTestCase) } else { - listOf(originalTestCase, + listOf( + originalTestCase, TestCase( name = "x $op y : $name", originalSql = "x $op y", @@ -475,455 +478,455 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { expectedType = numericType ) } + - // Same numeric operand types, double binary operators - ALL_NUMERIC_TYPES.flatMap { numericType -> - doubleArithmeticOpCases( - name = "$numericType", - leftType = numericType, - middleType = numericType, - rightType = numericType, - expectedType = numericType - ) - } + - listOf( - // mixed operand types, single binary operators - singleArithmeticOpCases( - name = "int2 and int4 operands", - leftType = INT2, - rightType = INT4, - expectedType = INT4 - ), - singleArithmeticOpCases( - name = "int2 and int8 operands", - leftType = INT2, - rightType = INT8, - expectedType = INT8 - ), - singleArithmeticOpCases( - name = "int2 and int operands", - leftType = INT2, - rightType = INT, - expectedType = INT - ), - singleArithmeticOpCases( - name = "int4 and int8 operands", - leftType = INT4, - rightType = INT8, - expectedType = INT8 - ), - singleArithmeticOpCases( - name = "int4 and int operands", - leftType = INT4, - rightType = INT, - expectedType = INT - ), - singleArithmeticOpCases( - name = "int4 and any_of(int2, int4) operands", - leftType = INT4, - rightType = unionOf(INT2, INT4), - expectedType = INT4 - ), - singleArithmeticOpCases( - name = "int8 and any_of(int2, int4)", - leftType = INT8, - rightType = unionOf(INT2, INT4), - expectedType = INT8 - ), - singleArithmeticOpCases( - name = "int8 and any_of(int2, int4, float)", - leftType = INT8, - rightType = unionOf(INT2, INT4, FLOAT), - expectedType = unionOf(INT8, FLOAT) - ), - singleArithmeticOpCases( - name = "int8 and any_of(int8, int2, int4, float, decimal)", - leftType = INT8, - rightType = unionOf(INT2, INT4, FLOAT, DECIMAL), - expectedType = unionOf(INT8, FLOAT, DECIMAL) - ), - singleArithmeticOpCases( - name = "any_of(int8, decimal) and any_of(int2, int4, float)", - leftType = unionOf(INT8, DECIMAL), - rightType = unionOf(INT2, INT4, FLOAT), - expectedType = unionOf(INT8, FLOAT, DECIMAL) - ), - doubleArithmeticOpCases( - name = "int2, int4 and int8", - leftType = INT2, - middleType = INT4, - rightType = INT8, - expectedType = INT8 - ), - - // mixed operand types, double binary operators - doubleArithmeticOpCases( - name = "int8, int4 and int2", - leftType = INT8, - middleType = INT4, - rightType = INT2, - expectedType = INT8 - ), - doubleArithmeticOpCases( - name = "any_of(int8, decimal) and int4", - leftType = unionOf(INT8, DECIMAL), - middleType = INT4, - rightType = INT2, - expectedType = unionOf(INT8, DECIMAL) - ), - doubleArithmeticOpCases( - name = "any_of(int8, decimal), any_of(int4, float, missing) and any_of(int2, decimal)", - leftType = unionOf(INT8, DECIMAL), - middleType = unionOf(INT4, FLOAT, MISSING), - rightType = unionOf(INT2, DECIMAL), - expectedType = unionOf( - MISSING, - INT8, - FLOAT, - DECIMAL - ) - ), - - // NULL propagation, single binary operators - singleArithmeticOpCases( - name = "one nullable operand", - leftType = INT4.asNullable(), - rightType = INT4, - expectedType = INT4.asNullable() - ), - singleArithmeticOpCases( - name = "two nullable operands", - leftType = INT4.asNullable(), - rightType = INT4.asNullable(), - expectedType = INT4.asNullable() - ), - singleArithmeticOpCases( - name = "int4, union(int4, float)", - leftType = INT4, - rightType = AnyOfType(setOf(INT4, FLOAT)), - expectedType = AnyOfType(setOf(INT4, FLOAT)) - ), - singleArithmeticOpCases( - name = "int4, union(int4, float)", - leftType = DECIMAL, - rightType = AnyOfType(setOf(INT4, FLOAT)), - expectedType = DECIMAL - ), - singleArithmeticOpCases( - name = "any, int", - leftType = ANY, - rightType = INT, - expectedType = unionOf( - MISSING, - NULL, - INT, - FLOAT, - DECIMAL - ) - ), - singleArithmeticOpCases( - name = "any, float", - leftType = ANY, - rightType = FLOAT, - expectedType = unionOf( - MISSING, - NULL, - FLOAT, - DECIMAL - ) - ), - singleArithmeticOpCases( - name = "any, decimal", - leftType = ANY, - rightType = DECIMAL, - expectedType = unionOf( - MISSING, - NULL, - DECIMAL - ) - ), - singleArithmeticOpCases( - name = "any, any", - leftType = ANY, - rightType = ANY, - expectedType = unionOf( - MISSING, - NULL, - INT, - INT2, - INT4, - INT8, - FLOAT, - DECIMAL + // Same numeric operand types, double binary operators + ALL_NUMERIC_TYPES.flatMap { numericType -> + doubleArithmeticOpCases( + name = "$numericType", + leftType = numericType, + middleType = numericType, + rightType = numericType, + expectedType = numericType ) - ), + } + + listOf( + // mixed operand types, single binary operators + singleArithmeticOpCases( + name = "int2 and int4 operands", + leftType = INT2, + rightType = INT4, + expectedType = INT4 + ), + singleArithmeticOpCases( + name = "int2 and int8 operands", + leftType = INT2, + rightType = INT8, + expectedType = INT8 + ), + singleArithmeticOpCases( + name = "int2 and int operands", + leftType = INT2, + rightType = INT, + expectedType = INT + ), + singleArithmeticOpCases( + name = "int4 and int8 operands", + leftType = INT4, + rightType = INT8, + expectedType = INT8 + ), + singleArithmeticOpCases( + name = "int4 and int operands", + leftType = INT4, + rightType = INT, + expectedType = INT + ), + singleArithmeticOpCases( + name = "int4 and any_of(int2, int4) operands", + leftType = INT4, + rightType = unionOf(INT2, INT4), + expectedType = INT4 + ), + singleArithmeticOpCases( + name = "int8 and any_of(int2, int4)", + leftType = INT8, + rightType = unionOf(INT2, INT4), + expectedType = INT8 + ), + singleArithmeticOpCases( + name = "int8 and any_of(int2, int4, float)", + leftType = INT8, + rightType = unionOf(INT2, INT4, FLOAT), + expectedType = unionOf(INT8, FLOAT) + ), + singleArithmeticOpCases( + name = "int8 and any_of(int8, int2, int4, float, decimal)", + leftType = INT8, + rightType = unionOf(INT2, INT4, FLOAT, DECIMAL), + expectedType = unionOf(INT8, FLOAT, DECIMAL) + ), + singleArithmeticOpCases( + name = "any_of(int8, decimal) and any_of(int2, int4, float)", + leftType = unionOf(INT8, DECIMAL), + rightType = unionOf(INT2, INT4, FLOAT), + expectedType = unionOf(INT8, FLOAT, DECIMAL) + ), + doubleArithmeticOpCases( + name = "int2, int4 and int8", + leftType = INT2, + middleType = INT4, + rightType = INT8, + expectedType = INT8 + ), - // NULL propagation, single binary operator - singleArithmeticOpCases( - name = "int4, union(null, float)", - leftType = INT4, - rightType = FLOAT.asNullable(), - expectedType = FLOAT.asNullable() - ), - - // NULL propagation, double binary operators - doubleArithmeticOpCases( - name = "one nullable operand, 1 of 3", - leftType = INT4.asNullable(), - middleType = INT4, - rightType = INT4, - expectedType = INT4.asNullable() - ), - doubleArithmeticOpCases( - name = "one nullable operand, 2 of 3", - leftType = INT4, - middleType = INT4.asNullable(), - rightType = INT4, - expectedType = INT4.asNullable() - ), - doubleArithmeticOpCases( - name = "one nullable operand, 3 of 3", - leftType = INT4, - middleType = INT4, - rightType = INT4.asNullable(), - expectedType = INT4.asNullable() - ), - doubleArithmeticOpCases( - name = "three nullable operands", - leftType = INT4.asNullable(), - middleType = INT4.asNullable(), - rightType = INT4.asNullable(), - expectedType = INT4.asNullable() - ), - - // MISSING propagation, single binary operators - singleArithmeticOpCases( - name = "one optional operand, 1 of 2", - leftType = INT4.asOptional(), - rightType = INT4, - expectedType = INT4.asOptional() - ), - singleArithmeticOpCases( - name = "one optional operand, 2 of 2", - leftType = INT4, - rightType = INT4.asOptional(), - expectedType = INT4.asOptional() - ), - singleArithmeticOpCases( - name = "two optional operands", - leftType = INT4.asOptional(), - rightType = INT4.asOptional(), - expectedType = INT4.asOptional() - ), - - // NULL propagation, double binary operators - doubleArithmeticOpCases( - name = "one optional operand, 1 of 3", - leftType = INT4.asOptional(), - middleType = INT4, - rightType = INT4, - expectedType = INT4.asOptional() - ), - doubleArithmeticOpCases( - name = "one optional operand, 2 of 3", - leftType = INT4, - middleType = INT4.asOptional(), - rightType = INT4, - expectedType = INT4.asOptional() - ), - doubleArithmeticOpCases( - name = "one optional operand, 3 of 3", - leftType = INT4, - middleType = INT4, - rightType = INT4.asOptional(), - expectedType = INT4.asOptional() - ), - doubleArithmeticOpCases( - name = "three optional operands", - leftType = INT4.asOptional(), - middleType = INT4.asOptional(), - rightType = INT4.asOptional(), - expectedType = INT4.asOptional() - ), - doubleArithmeticOpCases( - name = "int4, float, int4", - leftType = INT4, - middleType = FLOAT, - rightType = INT4, - expectedType = FLOAT - ), - doubleArithmeticOpCases( - name = "float, decimal, int4", - leftType = FLOAT, - middleType = DECIMAL, - rightType = INT4, - expectedType = DECIMAL - ), - doubleArithmeticOpCases( - name = "nullable and optional", - leftType = INT4, - middleType = INT4.asNullable(), - rightType = INT4.asOptional(), - expectedType = INT4.asOptional().asNullable() - ), + // mixed operand types, double binary operators + doubleArithmeticOpCases( + name = "int8, int4 and int2", + leftType = INT8, + middleType = INT4, + rightType = INT2, + expectedType = INT8 + ), + doubleArithmeticOpCases( + name = "any_of(int8, decimal) and int4", + leftType = unionOf(INT8, DECIMAL), + middleType = INT4, + rightType = INT2, + expectedType = unionOf(INT8, DECIMAL) + ), + doubleArithmeticOpCases( + name = "any_of(int8, decimal), any_of(int4, float, missing) and any_of(int2, decimal)", + leftType = unionOf(INT8, DECIMAL), + middleType = unionOf(INT4, FLOAT, MISSING), + rightType = unionOf(INT2, DECIMAL), + expectedType = unionOf( + MISSING, + INT8, + FLOAT, + DECIMAL + ) + ), - // - // data type mismatch cases for arithmetic ops below - // - OpType.ARITHMETIC.operators.flatMap { op -> - // non-numeric, non-unknown with non-unknown -> data type mismatch error - generateAllUniquePairs(ALL_NON_NUMERIC_NON_UNKNOWN_TYPES, ALL_NON_UNKNOWN_TYPES).map { - singleNAryOpErrorTestCase( - name = "data type mismatch - ${it.first}, ${it.second}", - op = op, - leftType = it.first, - rightType = it.second, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(it.first, it.second), nAryOp = op) - ) + // NULL propagation, single binary operators + singleArithmeticOpCases( + name = "one nullable operand", + leftType = INT4.asNullable(), + rightType = INT4, + expectedType = INT4.asNullable() + ), + singleArithmeticOpCases( + name = "two nullable operands", + leftType = INT4.asNullable(), + rightType = INT4.asNullable(), + expectedType = INT4.asNullable() + ), + singleArithmeticOpCases( + name = "int4, union(int4, float)", + leftType = INT4, + rightType = AnyOfType(setOf(INT4, FLOAT)), + expectedType = AnyOfType(setOf(INT4, FLOAT)) + ), + singleArithmeticOpCases( + name = "int4, union(int4, float)", + leftType = DECIMAL, + rightType = AnyOfType(setOf(INT4, FLOAT)), + expectedType = DECIMAL + ), + singleArithmeticOpCases( + name = "any, int", + leftType = ANY, + rightType = INT, + expectedType = unionOf( + MISSING, + NULL, + INT, + FLOAT, + DECIMAL ) - } + - // non-numeric, non-unknown with an unknown -> data type mismatch and null or missing error - generateAllUniquePairs(ALL_NON_NUMERIC_NON_UNKNOWN_TYPES, ALL_UNKNOWN_TYPES).map { - singleNAryOpErrorTestCase( - name = "data type mismatch, null or missing error - ${it.first}, ${it.second}", - op = op, - leftType = it.first, - rightType = it.second, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(it.first, it.second), nAryOp = op), - createReturnsNullOrMissingError(col = 3, nAryOp = op) - ) + ), + singleArithmeticOpCases( + name = "any, float", + leftType = ANY, + rightType = FLOAT, + expectedType = unionOf( + MISSING, + NULL, + FLOAT, + DECIMAL ) - } + - // numeric with an unknown -> null or missing error - generateAllUniquePairs(ALL_NUMERIC_TYPES, ALL_UNKNOWN_TYPES).map { - singleNAryOpErrorTestCase( - name = "null or missing error - ${it.first}, ${it.second}", - op = op, - leftType = it.first, - rightType = it.second, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = op)) + ), + singleArithmeticOpCases( + name = "any, decimal", + leftType = ANY, + rightType = DECIMAL, + expectedType = unionOf( + MISSING, + NULL, + DECIMAL ) - } + - // unknown with an unknown -> null or missing error - generateAllUniquePairs(ALL_UNKNOWN_TYPES, ALL_UNKNOWN_TYPES).map { - singleNAryOpErrorTestCase( - name = "null or missing error - ${it.first}, ${it.second}", - op = op, - leftType = it.first, - rightType = it.second, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = op)) + ), + singleArithmeticOpCases( + name = "any, any", + leftType = ANY, + rightType = ANY, + expectedType = unionOf( + MISSING, + NULL, + INT, + INT2, + INT4, + INT8, + FLOAT, + DECIMAL ) - } + - listOf( - // double arithmetic ops with unknowns -> null or missing errors - doubleOpErrorCases( - name = "null, null, null", - op = op, - leftType = NULL, - middleType = NULL, - rightType = NULL, - expectedProblems = listOf( - createReturnsNullOrMissingError(col = 3, nAryOp = op), - createReturnsNullOrMissingError(col = 7, nAryOp = op) - ) - ), - doubleOpErrorCases( - name = "null, null, missing", - op = op, - leftType = NULL, - middleType = NULL, - rightType = MISSING, - expectedProblems = listOf( - createReturnsNullOrMissingError(col = 3, nAryOp = op), - createReturnsNullOrMissingError(col = 7, nAryOp = op) - ) - ), - doubleOpErrorCases( - name = "missing, null, null", - op = op, - leftType = MISSING, - middleType = NULL, - rightType = NULL, - expectedProblems = listOf( - createReturnsNullOrMissingError(col = 3, nAryOp = op), - createReturnsNullOrMissingError(col = 7, nAryOp = op) - ) - ), - doubleOpErrorCases( - name = "null, missing, null", - op = op, - leftType = NULL, - middleType = MISSING, - rightType = NULL, - expectedProblems = listOf( - createReturnsNullOrMissingError(col = 3, nAryOp = op), - createReturnsNullOrMissingError(col = 7, nAryOp = op) - ) - ), - doubleOpErrorCases( - name = "missing, missing, null", - op = op, - leftType = MISSING, - middleType = MISSING, - rightType = NULL, - expectedProblems = listOf( - createReturnsNullOrMissingError(col = 3, nAryOp = op), - createReturnsNullOrMissingError(col = 7, nAryOp = op) - ) - ), - doubleOpErrorCases( - name = "null, missing, missing", - op = op, - leftType = NULL, - middleType = MISSING, - rightType = MISSING, - expectedProblems = listOf( - createReturnsNullOrMissingError(col = 3, nAryOp = op), - createReturnsNullOrMissingError(col = 7, nAryOp = op) - ) - ), - doubleOpErrorCases( - name = "missing, null, missing", - op = op, - leftType = MISSING, - middleType = NULL, - rightType = MISSING, - expectedProblems = listOf( - createReturnsNullOrMissingError(col = 3, nAryOp = op), - createReturnsNullOrMissingError(col = 7, nAryOp = op) - ) - ), - doubleOpErrorCases( - name = "missing, missing, missing", - op = op, - leftType = MISSING, - middleType = MISSING, - rightType = MISSING, - expectedProblems = listOf( - createReturnsNullOrMissingError(col = 3, nAryOp = op), - createReturnsNullOrMissingError(col = 7, nAryOp = op) + ), + + // NULL propagation, single binary operator + singleArithmeticOpCases( + name = "int4, union(null, float)", + leftType = INT4, + rightType = FLOAT.asNullable(), + expectedType = FLOAT.asNullable() + ), + + // NULL propagation, double binary operators + doubleArithmeticOpCases( + name = "one nullable operand, 1 of 3", + leftType = INT4.asNullable(), + middleType = INT4, + rightType = INT4, + expectedType = INT4.asNullable() + ), + doubleArithmeticOpCases( + name = "one nullable operand, 2 of 3", + leftType = INT4, + middleType = INT4.asNullable(), + rightType = INT4, + expectedType = INT4.asNullable() + ), + doubleArithmeticOpCases( + name = "one nullable operand, 3 of 3", + leftType = INT4, + middleType = INT4, + rightType = INT4.asNullable(), + expectedType = INT4.asNullable() + ), + doubleArithmeticOpCases( + name = "three nullable operands", + leftType = INT4.asNullable(), + middleType = INT4.asNullable(), + rightType = INT4.asNullable(), + expectedType = INT4.asNullable() + ), + + // MISSING propagation, single binary operators + singleArithmeticOpCases( + name = "one optional operand, 1 of 2", + leftType = INT4.asOptional(), + rightType = INT4, + expectedType = INT4.asOptional() + ), + singleArithmeticOpCases( + name = "one optional operand, 2 of 2", + leftType = INT4, + rightType = INT4.asOptional(), + expectedType = INT4.asOptional() + ), + singleArithmeticOpCases( + name = "two optional operands", + leftType = INT4.asOptional(), + rightType = INT4.asOptional(), + expectedType = INT4.asOptional() + ), + + // NULL propagation, double binary operators + doubleArithmeticOpCases( + name = "one optional operand, 1 of 3", + leftType = INT4.asOptional(), + middleType = INT4, + rightType = INT4, + expectedType = INT4.asOptional() + ), + doubleArithmeticOpCases( + name = "one optional operand, 2 of 3", + leftType = INT4, + middleType = INT4.asOptional(), + rightType = INT4, + expectedType = INT4.asOptional() + ), + doubleArithmeticOpCases( + name = "one optional operand, 3 of 3", + leftType = INT4, + middleType = INT4, + rightType = INT4.asOptional(), + expectedType = INT4.asOptional() + ), + doubleArithmeticOpCases( + name = "three optional operands", + leftType = INT4.asOptional(), + middleType = INT4.asOptional(), + rightType = INT4.asOptional(), + expectedType = INT4.asOptional() + ), + doubleArithmeticOpCases( + name = "int4, float, int4", + leftType = INT4, + middleType = FLOAT, + rightType = INT4, + expectedType = FLOAT + ), + doubleArithmeticOpCases( + name = "float, decimal, int4", + leftType = FLOAT, + middleType = DECIMAL, + rightType = INT4, + expectedType = DECIMAL + ), + doubleArithmeticOpCases( + name = "nullable and optional", + leftType = INT4, + middleType = INT4.asNullable(), + rightType = INT4.asOptional(), + expectedType = INT4.asOptional().asNullable() + ), + + // + // data type mismatch cases for arithmetic ops below + // + OpType.ARITHMETIC.operators.flatMap { op -> + // non-numeric, non-unknown with non-unknown -> data type mismatch error + generateAllUniquePairs(ALL_NON_NUMERIC_NON_UNKNOWN_TYPES, ALL_NON_UNKNOWN_TYPES).map { + singleNAryOpErrorTestCase( + name = "data type mismatch - ${it.first}, ${it.second}", + op = op, + leftType = it.first, + rightType = it.second, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(it.first, it.second), nAryOp = op) + ) ) - ) - ) + - // other test cases resulting in a data type mismatch - listOf( - Pair(unionOf(STRING, SYMBOL), SYMBOL), - Pair(unionOf(STRING, SYMBOL), unionOf(STRING, SYMBOL)), - Pair(STRING.asNullable(), INT4), - Pair(STRING.asOptional(), INT4), - Pair(STRING.asNullable().asOptional(), INT4), - Pair(ANY, STRING) - ).flatMap { - singleNAryOpMismatchWithSwappedCases( - name = "data type mismatch - ${it.first}, ${it.second}", - op = op, - leftType = it.first, - rightType = it.second - ) + } + + // non-numeric, non-unknown with an unknown -> data type mismatch and null or missing error + generateAllUniquePairs(ALL_NON_NUMERIC_NON_UNKNOWN_TYPES, ALL_UNKNOWN_TYPES).map { + singleNAryOpErrorTestCase( + name = "data type mismatch, null or missing error - ${it.first}, ${it.second}", + op = op, + leftType = it.first, + rightType = it.second, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(it.first, it.second), nAryOp = op), + createReturnsNullOrMissingError(col = 3, nAryOp = op) + ) + ) + } + + // numeric with an unknown -> null or missing error + generateAllUniquePairs(ALL_NUMERIC_TYPES, ALL_UNKNOWN_TYPES).map { + singleNAryOpErrorTestCase( + name = "null or missing error - ${it.first}, ${it.second}", + op = op, + leftType = it.first, + rightType = it.second, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = op)) + ) + } + + // unknown with an unknown -> null or missing error + generateAllUniquePairs(ALL_UNKNOWN_TYPES, ALL_UNKNOWN_TYPES).map { + singleNAryOpErrorTestCase( + name = "null or missing error - ${it.first}, ${it.second}", + op = op, + leftType = it.first, + rightType = it.second, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = op)) + ) + } + + listOf( + // double arithmetic ops with unknowns -> null or missing errors + doubleOpErrorCases( + name = "null, null, null", + op = op, + leftType = NULL, + middleType = NULL, + rightType = NULL, + expectedProblems = listOf( + createReturnsNullOrMissingError(col = 3, nAryOp = op), + createReturnsNullOrMissingError(col = 7, nAryOp = op) + ) + ), + doubleOpErrorCases( + name = "null, null, missing", + op = op, + leftType = NULL, + middleType = NULL, + rightType = MISSING, + expectedProblems = listOf( + createReturnsNullOrMissingError(col = 3, nAryOp = op), + createReturnsNullOrMissingError(col = 7, nAryOp = op) + ) + ), + doubleOpErrorCases( + name = "missing, null, null", + op = op, + leftType = MISSING, + middleType = NULL, + rightType = NULL, + expectedProblems = listOf( + createReturnsNullOrMissingError(col = 3, nAryOp = op), + createReturnsNullOrMissingError(col = 7, nAryOp = op) + ) + ), + doubleOpErrorCases( + name = "null, missing, null", + op = op, + leftType = NULL, + middleType = MISSING, + rightType = NULL, + expectedProblems = listOf( + createReturnsNullOrMissingError(col = 3, nAryOp = op), + createReturnsNullOrMissingError(col = 7, nAryOp = op) + ) + ), + doubleOpErrorCases( + name = "missing, missing, null", + op = op, + leftType = MISSING, + middleType = MISSING, + rightType = NULL, + expectedProblems = listOf( + createReturnsNullOrMissingError(col = 3, nAryOp = op), + createReturnsNullOrMissingError(col = 7, nAryOp = op) + ) + ), + doubleOpErrorCases( + name = "null, missing, missing", + op = op, + leftType = NULL, + middleType = MISSING, + rightType = MISSING, + expectedProblems = listOf( + createReturnsNullOrMissingError(col = 3, nAryOp = op), + createReturnsNullOrMissingError(col = 7, nAryOp = op) + ) + ), + doubleOpErrorCases( + name = "missing, null, missing", + op = op, + leftType = MISSING, + middleType = NULL, + rightType = MISSING, + expectedProblems = listOf( + createReturnsNullOrMissingError(col = 3, nAryOp = op), + createReturnsNullOrMissingError(col = 7, nAryOp = op) + ) + ), + doubleOpErrorCases( + name = "missing, missing, missing", + op = op, + leftType = MISSING, + middleType = MISSING, + rightType = MISSING, + expectedProblems = listOf( + createReturnsNullOrMissingError(col = 3, nAryOp = op), + createReturnsNullOrMissingError(col = 7, nAryOp = op) + ) + ) + ) + + // other test cases resulting in a data type mismatch + listOf( + Pair(unionOf(STRING, SYMBOL), SYMBOL), + Pair(unionOf(STRING, SYMBOL), unionOf(STRING, SYMBOL)), + Pair(STRING.asNullable(), INT4), + Pair(STRING.asOptional(), INT4), + Pair(STRING.asNullable().asOptional(), INT4), + Pair(ANY, STRING) + ).flatMap { + singleNAryOpMismatchWithSwappedCases( + name = "data type mismatch - ${it.first}, ${it.second}", + op = op, + leftType = it.first, + rightType = it.second + ) + } } - } - ).flatten() + ).flatten() /** * Creates a test case for each unary arithmetic operand (+, -) of the form `{unary op} x` with [argType] @@ -974,89 +977,89 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { expectedOutputType = numericType ) } + - createUnaryArithmeticOpCases( - name = "unary op - ANY", - argType = ANY, - expectedOutputType = unionOf( - NULL, - MISSING, - FLOAT, - INT2, - INT4, - INT8, - INT, - DECIMAL, - FLOAT - ) - ), + createUnaryArithmeticOpCases( + name = "unary op - ANY", + argType = ANY, + expectedOutputType = unionOf( + NULL, + MISSING, + FLOAT, + INT2, + INT4, + INT8, + INT, + DECIMAL, + FLOAT + ) + ), createUnaryArithmeticOpCases( name = "unary op - union(INT, STRING)", argType = unionOf(INT, STRING), expectedOutputType = unionOf(INT, MISSING) ) ).flatten() + - // - // data type mismatch cases below this line - // - listOf("+", "-").flatMap { op -> - // unknown -> expression always returns null or missing error - ALL_UNKNOWN_TYPES.map { unknownType -> - createUnaryOpErrorCase( - name = "unary op with unknown op error - $unknownType", - op = op, - argType = unknownType, - expectedProblems = listOf( - createReturnsNullOrMissingError(col = 1, nAryOp = op) - ) - ) - } + - // incompatible types for unary arithmetic -> data type mismatch - ALL_NON_NUMERIC_NON_UNKNOWN_TYPES.map { nonNumericType -> - createUnaryOpErrorCase( - name = "unary op with data type mismatch - $nonNumericType", - op = op, - argType = nonNumericType, - expectedProblems = listOf( - createDataTypeMismatchError(col = 1, argTypes = listOf(nonNumericType), nAryOp = op) + // + // data type mismatch cases below this line + // + listOf("+", "-").flatMap { op -> + // unknown -> expression always returns null or missing error + ALL_UNKNOWN_TYPES.map { unknownType -> + createUnaryOpErrorCase( + name = "unary op with unknown op error - $unknownType", + op = op, + argType = unknownType, + expectedProblems = listOf( + createReturnsNullOrMissingError(col = 1, nAryOp = op) + ) ) - ) - } + - listOf( - // other unary arithmetic tests - createUnaryOpErrorCase( - name = "unary op with data type mismatch - union(STRING, SYMBOL)", - op = op, - argType = unionOf(STRING, SYMBOL), - expectedProblems = listOf( - createDataTypeMismatchError(col = 1, argTypes = listOf(unionOf(STRING, SYMBOL)), nAryOp = op) - ) - ), - createUnaryOpErrorCase( - name = "unary op with data type mismatch - nullable string", - op = op, - argType = STRING.asNullable(), - expectedProblems = listOf( - createDataTypeMismatchError(col = 1, argTypes = listOf(STRING.asNullable()), nAryOp = op) - ) - ), - createUnaryOpErrorCase( - name = "unary op with data type mismatch - optional string", - op = op, - argType = STRING.asOptional(), - expectedProblems = listOf( - createDataTypeMismatchError(col = 1, argTypes = listOf(STRING.asOptional()), nAryOp = op) - ) - ), - createUnaryOpErrorCase( - name = "unary op with data type mismatch - nullable, optional string", - op = op, - argType = STRING.asNullable().asOptional(), - expectedProblems = listOf( - createDataTypeMismatchError(col = 1, argTypes = listOf(STRING.asNullable().asOptional()), nAryOp = op) + } + + // incompatible types for unary arithmetic -> data type mismatch + ALL_NON_NUMERIC_NON_UNKNOWN_TYPES.map { nonNumericType -> + createUnaryOpErrorCase( + name = "unary op with data type mismatch - $nonNumericType", + op = op, + argType = nonNumericType, + expectedProblems = listOf( + createDataTypeMismatchError(col = 1, argTypes = listOf(nonNumericType), nAryOp = op) + ) + ) + } + + listOf( + // other unary arithmetic tests + createUnaryOpErrorCase( + name = "unary op with data type mismatch - union(STRING, SYMBOL)", + op = op, + argType = unionOf(STRING, SYMBOL), + expectedProblems = listOf( + createDataTypeMismatchError(col = 1, argTypes = listOf(unionOf(STRING, SYMBOL)), nAryOp = op) + ) + ), + createUnaryOpErrorCase( + name = "unary op with data type mismatch - nullable string", + op = op, + argType = STRING.asNullable(), + expectedProblems = listOf( + createDataTypeMismatchError(col = 1, argTypes = listOf(STRING.asNullable()), nAryOp = op) + ) + ), + createUnaryOpErrorCase( + name = "unary op with data type mismatch - optional string", + op = op, + argType = STRING.asOptional(), + expectedProblems = listOf( + createDataTypeMismatchError(col = 1, argTypes = listOf(STRING.asOptional()), nAryOp = op) + ) + ), + createUnaryOpErrorCase( + name = "unary op with data type mismatch - nullable, optional string", + op = op, + argType = STRING.asNullable().asOptional(), + expectedProblems = listOf( + createDataTypeMismatchError(col = 1, argTypes = listOf(STRING.asNullable().asOptional()), nAryOp = op) + ) + ) ) - ) - ) - } + } /** * Creates two test cases with the specified operand and expected types for every NAry comparison and equality @@ -1083,13 +1086,13 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { rightType, expectQueryOutputType(expectedComparisonType, expectedWarnings) ) + - createSingleNAryOpCasesWithSwappedArgs( - OpType.EQUALITY, - name, - leftType, - rightType, - expectQueryOutputType(expectedEqualityType, expectedWarnings) - ) + createSingleNAryOpCasesWithSwappedArgs( + OpType.EQUALITY, + name, + leftType, + rightType, + expectQueryOutputType(expectedEqualityType, expectedWarnings) + ) @JvmStatic @Suppress("unused") @@ -1103,332 +1106,332 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { expectedComparisonType = BOOL ) } + - // text {comparison/equality op} text -> bool - generateAllUniquePairs(ALL_TEXT_TYPES, ALL_TEXT_TYPES).flatMap { - singleNAryComparisonAndEqualityCases( - name = "${it.first}, ${it.second}", - leftType = it.first, - rightType = it.second, - expectedComparisonType = BOOL - ) - } + - // lob {comparison/equality op} lob -> bool - generateAllUniquePairs(ALL_LOB_TYPES, ALL_LOB_TYPES).flatMap { - singleNAryComparisonAndEqualityCases( - name = "${it.first}, ${it.second}", - leftType = it.first, - rightType = it.second, - expectedComparisonType = BOOL - ) - } + - listOf( - singleNAryComparisonAndEqualityCases( - name = "bool, bool", - leftType = BOOL, - rightType = BOOL, - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "timestamp, timestamp", - leftType = TIMESTAMP, - rightType = TIMESTAMP, - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "list, list", - leftType = LIST, - rightType = LIST, - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "sexp, sexp", - leftType = SEXP, - rightType = SEXP, - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "bag, bag", - leftType = BAG, - rightType = BAG, - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "struct, struct", - leftType = STRUCT, - rightType = STRUCT, - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "int4, union(string, float); equality gives bool", - leftType = INT4, - rightType = unionOf(STRING, FLOAT), - expectedComparisonType = unionOf(MISSING, BOOL), - expectedEqualityType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "int4, union(null, float)", - leftType = INT4, - rightType = unionOf(NULL, FLOAT), - expectedComparisonType = unionOf(NULL, BOOL) - ), - singleNAryComparisonAndEqualityCases( - name = "int4, union(missing, float)", - leftType = INT4, - rightType = unionOf(MISSING, FLOAT), - expectedComparisonType = unionOf(MISSING, BOOL) - ), - singleNAryComparisonAndEqualityCases( - name = "int4, any", - leftType = INT4, - rightType = ANY, - expectedComparisonType = unionOf(MISSING, NULL, BOOL) - ), - singleNAryComparisonAndEqualityCases( - name = "union(int4, float), union(int4, string); equality gives bool", - leftType = unionOf(INT4, FLOAT), - rightType = unionOf(INT4, STRING), - expectedComparisonType = unionOf(MISSING, BOOL), - expectedEqualityType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "union(int4, decimal), union(int4, float)", - leftType = unionOf(INT4, DECIMAL), - rightType = unionOf(INT4, FLOAT), - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "union(int4, string), union(int4, string); equality gives bool", - leftType = unionOf(INT4, STRING), - rightType = unionOf(INT4, STRING), - expectedComparisonType = unionOf(MISSING, BOOL), - expectedEqualityType = BOOL - ), - // Collections with different, comparable element types - singleNAryComparisonAndEqualityCases( - name = "list(int), list(decimal)", - leftType = ListType(INT), - rightType = ListType(DECIMAL), - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "list(int), list(null)", - leftType = ListType(INT), - rightType = ListType(NULL), - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "list(int), list(missing)", - leftType = ListType(INT), - rightType = ListType(MISSING), - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "list(int), list(unionOf(int, decimal))", - leftType = ListType(INT), - rightType = ListType(unionOf(INT, DECIMAL)), - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "list(int), list(unionOf(int, timestamp))", - leftType = ListType(INT), - rightType = ListType(unionOf(INT, TIMESTAMP)), - expectedComparisonType = BOOL - ), - // Collections with different, incomparable element types doesn't give any error/warning. Further - // container comparability checks deferred to later https://github.com/partiql/partiql-lang-kotlin/issues/505 - singleNAryComparisonAndEqualityCases( - name = "list(int), list(timestamp)", - leftType = ListType(INT), - rightType = ListType(TIMESTAMP), - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "sexp(int), sexp(timestamp)", - leftType = SexpType(INT), - rightType = SexpType(TIMESTAMP), - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "bag(int), bag(timestamp)", - leftType = BagType(INT), - rightType = BagType(TIMESTAMP), - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "list(int), list(unionOf(timestamp, bool))", - leftType = ListType(INT), - rightType = ListType(unionOf(TIMESTAMP, BOOL)), - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "list(list(int)), list(list(timestamp)) - nested incompatible lists", - leftType = ListType(ListType(INT)), - rightType = ListType(ListType(TIMESTAMP)), - expectedComparisonType = BOOL - ), - // structs with comparable fields - singleNAryComparisonAndEqualityCases( - name = "struct(a to int), struct(a to decimal)", - leftType = StructType(mapOf("a" to INT)), - rightType = StructType(mapOf("a" to DECIMAL)), - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "struct(a to int, b to string), struct(a to decimal, b to symbol) - multiple, comparable fields", - leftType = StructType(mapOf("a" to INT, "b" to STRING)), - rightType = StructType(mapOf("a" to DECIMAL, "b" to SYMBOL)), - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "struct(a to int), struct(a to missing)", - leftType = StructType(mapOf("a" to INT)), - rightType = StructType(mapOf("a" to MISSING)), - expectedComparisonType = BOOL - ), - // structs with different numbers of fields. Further container comparability checks deferred to later - // https://github.com/partiql/partiql-lang-kotlin/issues/505 - singleNAryComparisonAndEqualityCases( - name = "struct(a to int), struct()", - leftType = StructType(mapOf("a" to INT)), - rightType = STRUCT, - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "struct(a to int), struct(a to decimal, b to float)", - leftType = StructType(mapOf("a" to INT)), - rightType = StructType(mapOf("a" to DECIMAL, "b" to FLOAT)), - expectedComparisonType = BOOL - ), - // structs with incomparable fields. Further container comparability checks deferred to later - // https://github.com/partiql/partiql-lang-kotlin/issues/505 - singleNAryComparisonAndEqualityCases( - name = "struct(a to int), struct(a to timestamp)", - leftType = StructType(mapOf("a" to INT)), - rightType = StructType(mapOf("a" to TIMESTAMP)), - expectedComparisonType = BOOL - ), - singleNAryComparisonAndEqualityCases( - name = "struct(a to int, b to symbol), struct(a to timestamp, b to timestamp) - multiple incomparable", - leftType = StructType(mapOf("a" to INT, "b" to SYMBOL)), - rightType = StructType(mapOf("a" to TIMESTAMP, "b" to TIMESTAMP)), - expectedComparisonType = BOOL - ), - // struct with different number of fields an incomparable field - singleNAryComparisonAndEqualityCases( - name = "struct(a to int), struct(a to timestamp, b to timestamp)", - leftType = StructType(mapOf("a" to INT)), - rightType = StructType(mapOf("a" to TIMESTAMP, "b" to TIMESTAMP)), - expectedComparisonType = BOOL - ), - ).flatten() + - (OpType.COMPARISON.operators + OpType.EQUALITY.operators).flatMap { op -> - // comparing numeric type with non-numeric, non-unknown type -> data type mismatch - ALL_NUMERIC_TYPES.flatMap { numericType -> - ALL_NON_NUMERIC_NON_UNKNOWN_TYPES.map { nonNumericType -> - singleNAryOpErrorTestCase( - name = "data type mismatch - $numericType, $nonNumericType", - op = op, - leftType = numericType, - rightType = nonNumericType, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(numericType, nonNumericType), nAryOp = op) - ) - ) - } + // text {comparison/equality op} text -> bool + generateAllUniquePairs(ALL_TEXT_TYPES, ALL_TEXT_TYPES).flatMap { + singleNAryComparisonAndEqualityCases( + name = "${it.first}, ${it.second}", + leftType = it.first, + rightType = it.second, + expectedComparisonType = BOOL + ) } + - // comparing text type with non-text, non-unknown type -> data type mismatch - ALL_TEXT_TYPES.flatMap { textType -> - ALL_NON_TEXT_NON_UNKNOWN_TYPES.map { nonTextType -> - singleNAryOpErrorTestCase( - name = "data type mismatch - $textType, $nonTextType", - op = op, - leftType = textType, - rightType = nonTextType, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(textType, nonTextType), nAryOp = op) - ) - ) - } + // lob {comparison/equality op} lob -> bool + generateAllUniquePairs(ALL_LOB_TYPES, ALL_LOB_TYPES).flatMap { + singleNAryComparisonAndEqualityCases( + name = "${it.first}, ${it.second}", + leftType = it.first, + rightType = it.second, + expectedComparisonType = BOOL + ) } + - // comparing lob type with non-lob, non-unknown type -> data type mismatch - ALL_LOB_TYPES.flatMap { lobType -> - ALL_NON_LOB_NON_UNKNOWN_TYPES.map { nonLobType -> + listOf( + singleNAryComparisonAndEqualityCases( + name = "bool, bool", + leftType = BOOL, + rightType = BOOL, + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "timestamp, timestamp", + leftType = TIMESTAMP, + rightType = TIMESTAMP, + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "list, list", + leftType = LIST, + rightType = LIST, + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "sexp, sexp", + leftType = SEXP, + rightType = SEXP, + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "bag, bag", + leftType = BAG, + rightType = BAG, + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "struct, struct", + leftType = STRUCT, + rightType = STRUCT, + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "int4, union(string, float); equality gives bool", + leftType = INT4, + rightType = unionOf(STRING, FLOAT), + expectedComparisonType = unionOf(MISSING, BOOL), + expectedEqualityType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "int4, union(null, float)", + leftType = INT4, + rightType = unionOf(NULL, FLOAT), + expectedComparisonType = unionOf(NULL, BOOL) + ), + singleNAryComparisonAndEqualityCases( + name = "int4, union(missing, float)", + leftType = INT4, + rightType = unionOf(MISSING, FLOAT), + expectedComparisonType = unionOf(MISSING, BOOL) + ), + singleNAryComparisonAndEqualityCases( + name = "int4, any", + leftType = INT4, + rightType = ANY, + expectedComparisonType = unionOf(MISSING, NULL, BOOL) + ), + singleNAryComparisonAndEqualityCases( + name = "union(int4, float), union(int4, string); equality gives bool", + leftType = unionOf(INT4, FLOAT), + rightType = unionOf(INT4, STRING), + expectedComparisonType = unionOf(MISSING, BOOL), + expectedEqualityType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "union(int4, decimal), union(int4, float)", + leftType = unionOf(INT4, DECIMAL), + rightType = unionOf(INT4, FLOAT), + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "union(int4, string), union(int4, string); equality gives bool", + leftType = unionOf(INT4, STRING), + rightType = unionOf(INT4, STRING), + expectedComparisonType = unionOf(MISSING, BOOL), + expectedEqualityType = BOOL + ), + // Collections with different, comparable element types + singleNAryComparisonAndEqualityCases( + name = "list(int), list(decimal)", + leftType = ListType(INT), + rightType = ListType(DECIMAL), + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "list(int), list(null)", + leftType = ListType(INT), + rightType = ListType(NULL), + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "list(int), list(missing)", + leftType = ListType(INT), + rightType = ListType(MISSING), + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "list(int), list(unionOf(int, decimal))", + leftType = ListType(INT), + rightType = ListType(unionOf(INT, DECIMAL)), + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "list(int), list(unionOf(int, timestamp))", + leftType = ListType(INT), + rightType = ListType(unionOf(INT, TIMESTAMP)), + expectedComparisonType = BOOL + ), + // Collections with different, incomparable element types doesn't give any error/warning. Further + // container comparability checks deferred to later https://github.com/partiql/partiql-lang-kotlin/issues/505 + singleNAryComparisonAndEqualityCases( + name = "list(int), list(timestamp)", + leftType = ListType(INT), + rightType = ListType(TIMESTAMP), + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "sexp(int), sexp(timestamp)", + leftType = SexpType(INT), + rightType = SexpType(TIMESTAMP), + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "bag(int), bag(timestamp)", + leftType = BagType(INT), + rightType = BagType(TIMESTAMP), + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "list(int), list(unionOf(timestamp, bool))", + leftType = ListType(INT), + rightType = ListType(unionOf(TIMESTAMP, BOOL)), + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "list(list(int)), list(list(timestamp)) - nested incompatible lists", + leftType = ListType(ListType(INT)), + rightType = ListType(ListType(TIMESTAMP)), + expectedComparisonType = BOOL + ), + // structs with comparable fields + singleNAryComparisonAndEqualityCases( + name = "struct(a to int), struct(a to decimal)", + leftType = StructType(mapOf("a" to INT)), + rightType = StructType(mapOf("a" to DECIMAL)), + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "struct(a to int, b to string), struct(a to decimal, b to symbol) - multiple, comparable fields", + leftType = StructType(mapOf("a" to INT, "b" to STRING)), + rightType = StructType(mapOf("a" to DECIMAL, "b" to SYMBOL)), + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "struct(a to int), struct(a to missing)", + leftType = StructType(mapOf("a" to INT)), + rightType = StructType(mapOf("a" to MISSING)), + expectedComparisonType = BOOL + ), + // structs with different numbers of fields. Further container comparability checks deferred to later + // https://github.com/partiql/partiql-lang-kotlin/issues/505 + singleNAryComparisonAndEqualityCases( + name = "struct(a to int), struct()", + leftType = StructType(mapOf("a" to INT)), + rightType = STRUCT, + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "struct(a to int), struct(a to decimal, b to float)", + leftType = StructType(mapOf("a" to INT)), + rightType = StructType(mapOf("a" to DECIMAL, "b" to FLOAT)), + expectedComparisonType = BOOL + ), + // structs with incomparable fields. Further container comparability checks deferred to later + // https://github.com/partiql/partiql-lang-kotlin/issues/505 + singleNAryComparisonAndEqualityCases( + name = "struct(a to int), struct(a to timestamp)", + leftType = StructType(mapOf("a" to INT)), + rightType = StructType(mapOf("a" to TIMESTAMP)), + expectedComparisonType = BOOL + ), + singleNAryComparisonAndEqualityCases( + name = "struct(a to int, b to symbol), struct(a to timestamp, b to timestamp) - multiple incomparable", + leftType = StructType(mapOf("a" to INT, "b" to SYMBOL)), + rightType = StructType(mapOf("a" to TIMESTAMP, "b" to TIMESTAMP)), + expectedComparisonType = BOOL + ), + // struct with different number of fields an incomparable field + singleNAryComparisonAndEqualityCases( + name = "struct(a to int), struct(a to timestamp, b to timestamp)", + leftType = StructType(mapOf("a" to INT)), + rightType = StructType(mapOf("a" to TIMESTAMP, "b" to TIMESTAMP)), + expectedComparisonType = BOOL + ), + ).flatten() + + (OpType.COMPARISON.operators + OpType.EQUALITY.operators).flatMap { op -> + // comparing numeric type with non-numeric, non-unknown type -> data type mismatch + ALL_NUMERIC_TYPES.flatMap { numericType -> + ALL_NON_NUMERIC_NON_UNKNOWN_TYPES.map { nonNumericType -> + singleNAryOpErrorTestCase( + name = "data type mismatch - $numericType, $nonNumericType", + op = op, + leftType = numericType, + rightType = nonNumericType, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(numericType, nonNumericType), nAryOp = op) + ) + ) + } + } + + // comparing text type with non-text, non-unknown type -> data type mismatch + ALL_TEXT_TYPES.flatMap { textType -> + ALL_NON_TEXT_NON_UNKNOWN_TYPES.map { nonTextType -> + singleNAryOpErrorTestCase( + name = "data type mismatch - $textType, $nonTextType", + op = op, + leftType = textType, + rightType = nonTextType, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(textType, nonTextType), nAryOp = op) + ) + ) + } + } + + // comparing lob type with non-lob, non-unknown type -> data type mismatch + ALL_LOB_TYPES.flatMap { lobType -> + ALL_NON_LOB_NON_UNKNOWN_TYPES.map { nonLobType -> + singleNAryOpErrorTestCase( + name = "data type mismatch - $lobType, $nonLobType", + op = op, + leftType = lobType, + rightType = nonLobType, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(lobType, nonLobType), nAryOp = op) + ) + ) + } + } + + // comparing non-categorized types with non-unknown other type -> data type mismatch + ALL_TYPES_ONLY_COMPARABLE_TO_SELF.flatMap { otherType -> + ALL_NON_UNKNOWN_TYPES.filter { it != otherType }.map { nonCompatibleType -> + singleNAryOpErrorTestCase( + name = "data type mismatch - $otherType, $nonCompatibleType", + op = op, + leftType = otherType, + rightType = nonCompatibleType, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(otherType, nonCompatibleType), nAryOp = op) + ) + ) + } + } + + // any type compared with an unknown -> null or missing error + generateAllUniquePairs(ALL_TYPES, ALL_UNKNOWN_TYPES).map { + singleNAryOpErrorTestCase( + name = "null or missing error - ${it.first}, ${it.second}", + op = op, + leftType = it.first, + rightType = it.second, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = op)) + ) + } + + // other unknown error tests singleNAryOpErrorTestCase( - name = "data type mismatch - $lobType, $nonLobType", + name = "missing, union(null, float)", op = op, - leftType = lobType, - rightType = nonLobType, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(lobType, nonLobType), nAryOp = op) - ) - ) - } - } + - // comparing non-categorized types with non-unknown other type -> data type mismatch - ALL_TYPES_ONLY_COMPARABLE_TO_SELF.flatMap { otherType -> - ALL_NON_UNKNOWN_TYPES.filter { it != otherType }.map { nonCompatibleType -> + leftType = MISSING, + rightType = unionOf(NULL, FLOAT), + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = op)) + ) + singleNAryOpErrorTestCase( - name = "data type mismatch - $otherType, $nonCompatibleType", + name = "union(null, missing), any", op = op, - leftType = otherType, - rightType = nonCompatibleType, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(otherType, nonCompatibleType), nAryOp = op) - ) + leftType = NULL_OR_MISSING, + rightType = ANY, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = op)) + ) + + // other miscellaneous tests + singleNAryOpMismatchWithSwappedCases( + name = "int, union(timestamp, null)", + op = op, + leftType = INT, + rightType = unionOf(TIMESTAMP, NULL) + ) + + singleNAryOpMismatchWithSwappedCases( + name = "int, union(timestamp, missing)", + op = op, + leftType = INT, + rightType = unionOf(TIMESTAMP, MISSING) + ) + + singleNAryOpMismatchWithSwappedCases( + name = "union(int missing), union(timestamp, missing)", + op = op, + leftType = unionOf(INT, MISSING), + rightType = unionOf(TIMESTAMP, MISSING) + ) + + singleNAryOpMismatchWithSwappedCases( + name = "union(int, decimal, float), union(string, symbol)", + op = op, + leftType = unionOf(INT, DECIMAL, FLOAT), + rightType = unionOf(STRING, SYMBOL) ) - } - } + - // any type compared with an unknown -> null or missing error - generateAllUniquePairs(ALL_TYPES, ALL_UNKNOWN_TYPES).map { - singleNAryOpErrorTestCase( - name = "null or missing error - ${it.first}, ${it.second}", - op = op, - leftType = it.first, - rightType = it.second, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = op)) - ) - } + - // other unknown error tests - singleNAryOpErrorTestCase( - name = "missing, union(null, float)", - op = op, - leftType = MISSING, - rightType = unionOf(NULL, FLOAT), - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = op)) - ) + - singleNAryOpErrorTestCase( - name = "union(null, missing), any", - op = op, - leftType = NULL_OR_MISSING, - rightType = ANY, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = op)) - ) + - // other miscellaneous tests - singleNAryOpMismatchWithSwappedCases( - name = "int, union(timestamp, null)", - op = op, - leftType = INT, - rightType = unionOf(TIMESTAMP, NULL) - ) + - singleNAryOpMismatchWithSwappedCases( - name = "int, union(timestamp, missing)", - op = op, - leftType = INT, - rightType = unionOf(TIMESTAMP, MISSING) - ) + - singleNAryOpMismatchWithSwappedCases( - name = "union(int missing), union(timestamp, missing)", - op = op, - leftType = unionOf(INT, MISSING), - rightType = unionOf(TIMESTAMP, MISSING) - ) + - singleNAryOpMismatchWithSwappedCases( - name = "union(int, decimal, float), union(string, symbol)", - op = op, - leftType = unionOf(INT, DECIMAL, FLOAT), - rightType = unionOf(STRING, SYMBOL) - ) - } + } /** * Creates two test cases with the specified operand and expected types for every NAry logical @@ -1563,24 +1566,24 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { ) ) ) + - // `NOT` non-bool -> data type mismatch - ALL_NON_BOOL_NON_UNKNOWN_TYPES.map { nonBoolType -> - createNotDataTypeMismatchTestCase( - name = "NAry op NOT data type mismatch - $nonBoolType", - argType = nonBoolType, - expectedProblems = listOf( - createDataTypeMismatchError(col = 1, argTypes = listOf(nonBoolType), nAryOp = "NOT") + // `NOT` non-bool -> data type mismatch + ALL_NON_BOOL_NON_UNKNOWN_TYPES.map { nonBoolType -> + createNotDataTypeMismatchTestCase( + name = "NAry op NOT data type mismatch - $nonBoolType", + argType = nonBoolType, + expectedProblems = listOf( + createDataTypeMismatchError(col = 1, argTypes = listOf(nonBoolType), nAryOp = "NOT") + ) ) - ) - } + - // `NOT` unknown -> , null or missing error - ALL_UNKNOWN_TYPES.map { unknownType -> - createNotDataTypeMismatchTestCase( - name = "NAry op NOT null or missing error - $unknownType", - argType = unknownType, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 1, nAryOp = "NOT")) - ) - } + listOf( + } + + // `NOT` unknown -> , null or missing error + ALL_UNKNOWN_TYPES.map { unknownType -> + createNotDataTypeMismatchTestCase( + name = "NAry op NOT null or missing error - $unknownType", + argType = unknownType, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 1, nAryOp = "NOT")) + ) + } + listOf( // // `AND` + `OR` successful cases below this line // @@ -1625,39 +1628,39 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { ) ) } + - // non-unknown, non-boolean with unknown -> data type mismatch and null or missing error - generateAllUniquePairs(ALL_NON_BOOL_NON_UNKNOWN_TYPES, ALL_UNKNOWN_TYPES).map { - singleNAryOpErrorTestCase( - name = "data type mismatch, null or missing error - ${it.first}, ${it.second}", - op = op, - leftType = it.first, - rightType = it.second, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(it.first, it.second), nAryOp = op), - createReturnsNullOrMissingError(col = 3, nAryOp = op) + // non-unknown, non-boolean with unknown -> data type mismatch and null or missing error + generateAllUniquePairs(ALL_NON_BOOL_NON_UNKNOWN_TYPES, ALL_UNKNOWN_TYPES).map { + singleNAryOpErrorTestCase( + name = "data type mismatch, null or missing error - ${it.first}, ${it.second}", + op = op, + leftType = it.first, + rightType = it.second, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(it.first, it.second), nAryOp = op), + createReturnsNullOrMissingError(col = 3, nAryOp = op) + ) ) - ) - } + - // bool with an unknown -> null or missing error - generateAllUniquePairs(listOf(BOOL), ALL_UNKNOWN_TYPES).map { - singleNAryOpErrorTestCase( - name = "null or missing error - ${it.first}, ${it.second}", - op = op, - leftType = it.first, - rightType = it.second, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = op)) - ) - } + - // unknown with an unknown -> null or missing error - generateAllUniquePairs(ALL_UNKNOWN_TYPES, ALL_UNKNOWN_TYPES).map { - singleNAryOpErrorTestCase( - name = "null or missing error - ${it.first}, ${it.second}", - op = op, - leftType = it.first, - rightType = it.second, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = op)) - ) - } + listOf( + } + + // bool with an unknown -> null or missing error + generateAllUniquePairs(listOf(BOOL), ALL_UNKNOWN_TYPES).map { + singleNAryOpErrorTestCase( + name = "null or missing error - ${it.first}, ${it.second}", + op = op, + leftType = it.first, + rightType = it.second, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = op)) + ) + } + + // unknown with an unknown -> null or missing error + generateAllUniquePairs(ALL_UNKNOWN_TYPES, ALL_UNKNOWN_TYPES).map { + singleNAryOpErrorTestCase( + name = "null or missing error - ${it.first}, ${it.second}", + op = op, + leftType = it.first, + rightType = it.second, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = op)) + ) + } + listOf( singleNAryOpErrorTestCase( "data type mismatch - union(int, string), bool", op = op, @@ -1679,75 +1682,75 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { ) ) } + - // double logical op with at least one non-unknown, non-boolean -> data type mismatch - listOf("AND", "OR ").flatMap { op -> - ALL_NON_BOOL_NON_UNKNOWN_TYPES.flatMap { nonBoolType -> - listOf( - doubleOpErrorCases( - name = "data type mismatch - $nonBoolType, bool, bool", - op = op, - leftType = nonBoolType, - middleType = BOOL, - rightType = BOOL, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(nonBoolType, BOOL), nAryOp = op.trim()) - ) - ), - doubleOpErrorCases( - name = "data type mismatch - bool, $nonBoolType, bool", - op = op, - leftType = BOOL, - middleType = nonBoolType, - rightType = BOOL, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(BOOL, nonBoolType), nAryOp = op.trim()) - ) - ), - doubleOpErrorCases( - name = "data type mismatch - bool, bool, $nonBoolType", - op = op, - leftType = BOOL, - middleType = BOOL, - rightType = nonBoolType, - expectedProblems = listOf( - createDataTypeMismatchError(col = 9, argTypes = listOf(BOOL, nonBoolType), nAryOp = op.trim()) - ) - ), - doubleOpErrorCases( - name = "data type mismatch - $nonBoolType, $nonBoolType, bool", - op = op, - leftType = nonBoolType, - middleType = nonBoolType, - rightType = BOOL, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(nonBoolType, nonBoolType), nAryOp = op.trim()) - ) - ), - doubleOpErrorCases( - name = "data type mismatch - bool, $nonBoolType, $nonBoolType", - op = op, - leftType = BOOL, - middleType = nonBoolType, - rightType = nonBoolType, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(BOOL, nonBoolType), nAryOp = op.trim()), - createDataTypeMismatchError(col = 9, argTypes = listOf(BOOL, nonBoolType), nAryOp = op.trim()) - ) - ), - doubleOpErrorCases( - name = "data type mismatch - $nonBoolType, $nonBoolType, $nonBoolType", - op = op, - leftType = nonBoolType, - middleType = nonBoolType, - rightType = nonBoolType, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(nonBoolType, nonBoolType), nAryOp = op.trim()), - createDataTypeMismatchError(col = 9, argTypes = listOf(BOOL, nonBoolType), nAryOp = op.trim()) + // double logical op with at least one non-unknown, non-boolean -> data type mismatch + listOf("AND", "OR ").flatMap { op -> + ALL_NON_BOOL_NON_UNKNOWN_TYPES.flatMap { nonBoolType -> + listOf( + doubleOpErrorCases( + name = "data type mismatch - $nonBoolType, bool, bool", + op = op, + leftType = nonBoolType, + middleType = BOOL, + rightType = BOOL, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(nonBoolType, BOOL), nAryOp = op.trim()) + ) + ), + doubleOpErrorCases( + name = "data type mismatch - bool, $nonBoolType, bool", + op = op, + leftType = BOOL, + middleType = nonBoolType, + rightType = BOOL, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(BOOL, nonBoolType), nAryOp = op.trim()) + ) + ), + doubleOpErrorCases( + name = "data type mismatch - bool, bool, $nonBoolType", + op = op, + leftType = BOOL, + middleType = BOOL, + rightType = nonBoolType, + expectedProblems = listOf( + createDataTypeMismatchError(col = 9, argTypes = listOf(BOOL, nonBoolType), nAryOp = op.trim()) + ) + ), + doubleOpErrorCases( + name = "data type mismatch - $nonBoolType, $nonBoolType, bool", + op = op, + leftType = nonBoolType, + middleType = nonBoolType, + rightType = BOOL, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(nonBoolType, nonBoolType), nAryOp = op.trim()) + ) + ), + doubleOpErrorCases( + name = "data type mismatch - bool, $nonBoolType, $nonBoolType", + op = op, + leftType = BOOL, + middleType = nonBoolType, + rightType = nonBoolType, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(BOOL, nonBoolType), nAryOp = op.trim()), + createDataTypeMismatchError(col = 9, argTypes = listOf(BOOL, nonBoolType), nAryOp = op.trim()) + ) + ), + doubleOpErrorCases( + name = "data type mismatch - $nonBoolType, $nonBoolType, $nonBoolType", + op = op, + leftType = nonBoolType, + middleType = nonBoolType, + rightType = nonBoolType, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(nonBoolType, nonBoolType), nAryOp = op.trim()), + createDataTypeMismatchError(col = 9, argTypes = listOf(BOOL, nonBoolType), nAryOp = op.trim()) + ) ) ) - ) + } } - } ).flatten() private fun createTrimTestCases( @@ -1876,51 +1879,51 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { expectedType = unionOf(MISSING, STRING, NULL) ) ).flatten() + - // - // data type mismatch cases below this line - // + // + // data type mismatch cases below this line + // - // non-text, non-unknown with non-unknown -> data type mismatch - generateAllUniquePairs(ALL_NON_TEXT_NON_UNKNOWN_TYPES, ALL_NON_UNKNOWN_TYPES).map { - createNAryConcatDataTypeMismatchTest( - name = "data type mismatch - ${it.first}, ${it.second}", - leftType = it.first, - rightType = it.second, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(it.first, it.second), nAryOp = "||") + // non-text, non-unknown with non-unknown -> data type mismatch + generateAllUniquePairs(ALL_NON_TEXT_NON_UNKNOWN_TYPES, ALL_NON_UNKNOWN_TYPES).map { + createNAryConcatDataTypeMismatchTest( + name = "data type mismatch - ${it.first}, ${it.second}", + leftType = it.first, + rightType = it.second, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(it.first, it.second), nAryOp = "||") + ) ) - ) - } + - // non-text, non-unknown with an unknown -> data type mismatch and null or missing error - generateAllUniquePairs(ALL_NON_TEXT_NON_UNKNOWN_TYPES, ALL_UNKNOWN_TYPES).map { - createNAryConcatDataTypeMismatchTest( - name = "data type mismatch, null or missing error - ${it.first}, ${it.second}", - leftType = it.first, - rightType = it.second, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(it.first, it.second), nAryOp = "||"), - createReturnsNullOrMissingError(col = 3, nAryOp = "||") + } + + // non-text, non-unknown with an unknown -> data type mismatch and null or missing error + generateAllUniquePairs(ALL_NON_TEXT_NON_UNKNOWN_TYPES, ALL_UNKNOWN_TYPES).map { + createNAryConcatDataTypeMismatchTest( + name = "data type mismatch, null or missing error - ${it.first}, ${it.second}", + leftType = it.first, + rightType = it.second, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(it.first, it.second), nAryOp = "||"), + createReturnsNullOrMissingError(col = 3, nAryOp = "||") + ) ) - ) - } + - // text with an unknown -> null or missing error - generateAllUniquePairs(ALL_TEXT_TYPES, ALL_UNKNOWN_TYPES).map { - createNAryConcatDataTypeMismatchTest( - name = "null or missing error - ${it.first}, ${it.second}", - leftType = it.first, - rightType = it.second, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "||")) - ) - } + - // unknown with an unknown -> null or missing error - generateAllUniquePairs(ALL_UNKNOWN_TYPES, ALL_UNKNOWN_TYPES).map { - createNAryConcatDataTypeMismatchTest( - name = "null or missing error - ${it.first}, ${it.second}", - leftType = it.first, - rightType = it.second, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "||")) - ) - } + listOf( + } + + // text with an unknown -> null or missing error + generateAllUniquePairs(ALL_TEXT_TYPES, ALL_UNKNOWN_TYPES).map { + createNAryConcatDataTypeMismatchTest( + name = "null or missing error - ${it.first}, ${it.second}", + leftType = it.first, + rightType = it.second, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "||")) + ) + } + + // unknown with an unknown -> null or missing error + generateAllUniquePairs(ALL_UNKNOWN_TYPES, ALL_UNKNOWN_TYPES).map { + createNAryConcatDataTypeMismatchTest( + name = "null or missing error - ${it.first}, ${it.second}", + leftType = it.first, + rightType = it.second, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "||")) + ) + } + listOf( createNAryConcatDataTypeMismatchTest( name = "null or missing error - constrained string, null", leftType = StringType(NumberConstraint.Equals(2)), @@ -1940,48 +1943,48 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "||")) ) ) + - singleNAryOpMismatchWithSwappedCases( - name = "data type mismatch - constrained string, int", - op = "||", - leftType = StringType(NumberConstraint.Equals(2)), - rightType = INT - ) + - singleNAryOpMismatchWithSwappedCases( - name = "data type mismatch - union(int, string), bool", - op = "||", - leftType = unionOf(INT, STRING), - rightType = BOOL - ) + - singleNAryOpMismatchWithSwappedCases( - name = "data type mismatch - union(int, string, null), bool", - op = "||", - leftType = unionOf(INT, STRING, NULL), - rightType = BOOL - ) + - singleNAryOpMismatchWithSwappedCases( - name = "data type mismatch - nullable int, string", - op = "||", - leftType = INT.asNullable(), - rightType = STRING - ) + - singleNAryOpMismatchWithSwappedCases( - name = "data type mismatch - optional int, string", - op = "||", - leftType = INT.asOptional(), - rightType = STRING - ) + - singleNAryOpMismatchWithSwappedCases( - name = "data type mismatch - nullable + optional int, string", - op = "||", - leftType = INT.asNullable().asOptional(), - rightType = STRING - ) + - singleNAryOpMismatchWithSwappedCases( - name = "data type mismatch - any, int", - op = "||", - leftType = ANY, - rightType = INT - ) + singleNAryOpMismatchWithSwappedCases( + name = "data type mismatch - constrained string, int", + op = "||", + leftType = StringType(NumberConstraint.Equals(2)), + rightType = INT + ) + + singleNAryOpMismatchWithSwappedCases( + name = "data type mismatch - union(int, string), bool", + op = "||", + leftType = unionOf(INT, STRING), + rightType = BOOL + ) + + singleNAryOpMismatchWithSwappedCases( + name = "data type mismatch - union(int, string, null), bool", + op = "||", + leftType = unionOf(INT, STRING, NULL), + rightType = BOOL + ) + + singleNAryOpMismatchWithSwappedCases( + name = "data type mismatch - nullable int, string", + op = "||", + leftType = INT.asNullable(), + rightType = STRING + ) + + singleNAryOpMismatchWithSwappedCases( + name = "data type mismatch - optional int, string", + op = "||", + leftType = INT.asOptional(), + rightType = STRING + ) + + singleNAryOpMismatchWithSwappedCases( + name = "data type mismatch - nullable + optional int, string", + op = "||", + leftType = INT.asNullable().asOptional(), + rightType = STRING + ) + + singleNAryOpMismatchWithSwappedCases( + name = "data type mismatch - any, int", + op = "||", + leftType = ANY, + rightType = INT + ) private fun createNAryLikeTest( name: String, @@ -1996,16 +1999,20 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { originalSql = "x LIKE y", globals = mapOf( "x" to valueType, - "y" to patternType), - handler = handler) + "y" to patternType + ), + handler = handler + ) else -> TestCase( name = name, originalSql = "x LIKE y ESCAPE z", globals = mapOf( "x" to valueType, "y" to patternType, - "z" to escapeType), - handler = handler) + "z" to escapeType + ), + handler = handler + ) } private fun createNAryLikeValidTest( @@ -2027,7 +2034,6 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { ) ) - private fun createNAryLikeDataTypeMismatchTest( name: String, valueType: StaticType, @@ -2138,223 +2144,223 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { outputType = unionOf(BOOL, MISSING, NULL) ) ) + - // - // data type mismatch cases below this line - // - - // 2 args (value and pattern args only) - non-text, non-unknown with non-unknown -> data type mismatch - generateAllUniquePairs(ALL_NON_TEXT_NON_UNKNOWN_TYPES, ALL_NON_UNKNOWN_TYPES).map { - createNAryLikeDataTypeMismatchTest( - name = "data type mismatch - ${it.first}, ${it.second}", - valueType = it.first, - patternType = it.second, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(it.first, it.second), nAryOp = "LIKE") - ) - ) - } + - // non-text, non-unknown with unknown -> data type mismatch and null or missing error - generateAllUniquePairs(ALL_NON_TEXT_NON_UNKNOWN_TYPES, ALL_UNKNOWN_TYPES).map { - createNAryLikeDataTypeMismatchTest( - name = "data type mismatch, null or missing error - ${it.first}, ${it.second}", - valueType = it.first, - patternType = it.second, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(it.first, it.second), nAryOp = "LIKE"), - createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE") - ) - ) - } + - // text with an unknown -> null or missing error - generateAllUniquePairs(ALL_TEXT_TYPES, ALL_UNKNOWN_TYPES).map { - createNAryLikeDataTypeMismatchTest( - name = "null or missing error - ${it.first}, ${it.second}", - valueType = it.first, - patternType = it.second, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) - ) - } + - // unknown with an unknown -> null or missing error - generateAllUniquePairs(ALL_UNKNOWN_TYPES, ALL_UNKNOWN_TYPES).map { - createNAryLikeDataTypeMismatchTest( - name = "null or missing error - ${it.first}, ${it.second}", - valueType = it.first, - patternType = it.second, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) - ) - } + - // 3 args - 1 invalid argument (non-text, non-unknown) -> data type mismatch - generateAllUniquePairs(ALL_TEXT_TYPES, ALL_TEXT_TYPES).flatMap { textTypes -> - val (textType1, textType2) = textTypes - ALL_NON_TEXT_NON_UNKNOWN_TYPES.flatMap { nonTextType -> - listOf( - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE data type mismatch - $nonTextType LIKE $textType1 ESCAPE $textType2", - valueType = nonTextType, - patternType = textType1, - escapeType = textType2, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(nonTextType, textType1, textType2), nAryOp = "LIKE") - ) - ), - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE data type mismatch - $textType1 LIKE $nonTextType ESCAPE $textType2", - valueType = textType1, - patternType = nonTextType, - escapeType = textType2, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(textType1, nonTextType, textType2), nAryOp = "LIKE") - ) - ), - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE data type mismatch - $textType1 LIKE $textType2 ESCAPE $nonTextType", - valueType = textType1, - patternType = textType2, - escapeType = nonTextType, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(textType1, textType2, nonTextType), nAryOp = "LIKE") - ) + // + // data type mismatch cases below this line + // + + // 2 args (value and pattern args only) - non-text, non-unknown with non-unknown -> data type mismatch + generateAllUniquePairs(ALL_NON_TEXT_NON_UNKNOWN_TYPES, ALL_NON_UNKNOWN_TYPES).map { + createNAryLikeDataTypeMismatchTest( + name = "data type mismatch - ${it.first}, ${it.second}", + valueType = it.first, + patternType = it.second, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(it.first, it.second), nAryOp = "LIKE") ) ) - } - } + - listOf( - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE data type mismatch - union(string, int, null) LIKE bool", - valueType = unionOf(STRING, INT, NULL), - patternType = BOOL, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(unionOf(STRING, INT, NULL), BOOL), nAryOp = "LIKE") - ) - ), - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE data type mismatch - 3 args, escape type of union of incompatible types", - valueType = STRING, - patternType = STRING, - escapeType = unionOf(INT, DECIMAL, BOOL), - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(STRING, STRING, unionOf(INT, DECIMAL, BOOL)), nAryOp = "LIKE") - ) - ), - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE data type mismatch - 3 args, escape type of union of incompatible types", - valueType = STRING, - patternType = unionOf(INT, DECIMAL, BOOL), - escapeType = SYMBOL, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(STRING, unionOf(INT, DECIMAL, BOOL), SYMBOL), nAryOp = "LIKE") - ) - ), - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE data type mismatch - 3 args, escape type of union of incompatible types", - valueType = unionOf(INT, DECIMAL, BOOL), - patternType = STRING, - escapeType = STRING, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(unionOf(INT, DECIMAL, BOOL), STRING, STRING), nAryOp = "LIKE") - ) - ), - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE with null or missing error - string LIKE string ESCAPE null", - valueType = STRING, - patternType = STRING, - escapeType = NULL, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) - ), - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE with null or missing error - string LIKE null ESCAPE string", - valueType = STRING, - patternType = NULL, - escapeType = STRING, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) - ), - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE with null or missing error - null LIKE string ESCAPE string", - valueType = NULL, - patternType = STRING, - escapeType = STRING, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) - ), - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE with null or missing error - null LIKE null ESCAPE null", - valueType = NULL, - patternType = NULL, - escapeType = NULL, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) - ), - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE with null or missing error - string LIKE missing ESCAPE string", - valueType = STRING, - patternType = MISSING, - escapeType = STRING, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) - ), - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE with null or missing error - missing LIKE string ESCAPE string", - valueType = MISSING, - patternType = STRING, - escapeType = STRING, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) - ), - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE with null or missing error - missing LIKE missing ESCAPE missing", - valueType = MISSING, - patternType = MISSING, - escapeType = MISSING, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) - ), - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE with null or missing error - missing LIKE null ESCAPE null", - valueType = MISSING, - patternType = NULL, - escapeType = NULL, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) - ), - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE with null or missing error - null LIKE missing ESCAPE null", - valueType = NULL, - patternType = MISSING, - escapeType = NULL, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) - ), - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE with null or missing error - null LIKE null ESCAPE missing", - valueType = NULL, - patternType = NULL, - escapeType = MISSING, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) - ), - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE data type mismatch, null or missing error - 3 args, incompatible escape type with unknown types", - valueType = NULL, - patternType = MISSING, - escapeType = INT, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(NULL, MISSING, INT), nAryOp = "LIKE"), - createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE") + } + + // non-text, non-unknown with unknown -> data type mismatch and null or missing error + generateAllUniquePairs(ALL_NON_TEXT_NON_UNKNOWN_TYPES, ALL_UNKNOWN_TYPES).map { + createNAryLikeDataTypeMismatchTest( + name = "data type mismatch, null or missing error - ${it.first}, ${it.second}", + valueType = it.first, + patternType = it.second, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(it.first, it.second), nAryOp = "LIKE"), + createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE") + ) ) - ), - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE data type mismatch, null or missing error - 3 args, incompatible pattern type with unknown types", - valueType = NULL, - patternType = INT, - escapeType = MISSING, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(NULL, INT, MISSING), nAryOp = "LIKE"), - createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE") + } + + // text with an unknown -> null or missing error + generateAllUniquePairs(ALL_TEXT_TYPES, ALL_UNKNOWN_TYPES).map { + createNAryLikeDataTypeMismatchTest( + name = "null or missing error - ${it.first}, ${it.second}", + valueType = it.first, + patternType = it.second, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) ) - ), - createNAryLikeDataTypeMismatchTest( - name = "NAry op LIKE data type mismatch, null or missing error - 3 args, incompatible value type with unknown types", - valueType = STRUCT, - patternType = NULL, - escapeType = MISSING, - expectedProblems = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(STRUCT, NULL, MISSING), nAryOp = "LIKE"), - createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE") + } + + // unknown with an unknown -> null or missing error + generateAllUniquePairs(ALL_UNKNOWN_TYPES, ALL_UNKNOWN_TYPES).map { + createNAryLikeDataTypeMismatchTest( + name = "null or missing error - ${it.first}, ${it.second}", + valueType = it.first, + patternType = it.second, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) ) - ), - ) + } + + // 3 args - 1 invalid argument (non-text, non-unknown) -> data type mismatch + generateAllUniquePairs(ALL_TEXT_TYPES, ALL_TEXT_TYPES).flatMap { textTypes -> + val (textType1, textType2) = textTypes + ALL_NON_TEXT_NON_UNKNOWN_TYPES.flatMap { nonTextType -> + listOf( + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE data type mismatch - $nonTextType LIKE $textType1 ESCAPE $textType2", + valueType = nonTextType, + patternType = textType1, + escapeType = textType2, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(nonTextType, textType1, textType2), nAryOp = "LIKE") + ) + ), + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE data type mismatch - $textType1 LIKE $nonTextType ESCAPE $textType2", + valueType = textType1, + patternType = nonTextType, + escapeType = textType2, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(textType1, nonTextType, textType2), nAryOp = "LIKE") + ) + ), + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE data type mismatch - $textType1 LIKE $textType2 ESCAPE $nonTextType", + valueType = textType1, + patternType = textType2, + escapeType = nonTextType, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(textType1, textType2, nonTextType), nAryOp = "LIKE") + ) + ) + ) + } + } + + listOf( + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE data type mismatch - union(string, int, null) LIKE bool", + valueType = unionOf(STRING, INT, NULL), + patternType = BOOL, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(unionOf(STRING, INT, NULL), BOOL), nAryOp = "LIKE") + ) + ), + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE data type mismatch - 3 args, escape type of union of incompatible types", + valueType = STRING, + patternType = STRING, + escapeType = unionOf(INT, DECIMAL, BOOL), + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(STRING, STRING, unionOf(INT, DECIMAL, BOOL)), nAryOp = "LIKE") + ) + ), + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE data type mismatch - 3 args, escape type of union of incompatible types", + valueType = STRING, + patternType = unionOf(INT, DECIMAL, BOOL), + escapeType = SYMBOL, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(STRING, unionOf(INT, DECIMAL, BOOL), SYMBOL), nAryOp = "LIKE") + ) + ), + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE data type mismatch - 3 args, escape type of union of incompatible types", + valueType = unionOf(INT, DECIMAL, BOOL), + patternType = STRING, + escapeType = STRING, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(unionOf(INT, DECIMAL, BOOL), STRING, STRING), nAryOp = "LIKE") + ) + ), + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE with null or missing error - string LIKE string ESCAPE null", + valueType = STRING, + patternType = STRING, + escapeType = NULL, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) + ), + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE with null or missing error - string LIKE null ESCAPE string", + valueType = STRING, + patternType = NULL, + escapeType = STRING, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) + ), + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE with null or missing error - null LIKE string ESCAPE string", + valueType = NULL, + patternType = STRING, + escapeType = STRING, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) + ), + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE with null or missing error - null LIKE null ESCAPE null", + valueType = NULL, + patternType = NULL, + escapeType = NULL, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) + ), + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE with null or missing error - string LIKE missing ESCAPE string", + valueType = STRING, + patternType = MISSING, + escapeType = STRING, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) + ), + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE with null or missing error - missing LIKE string ESCAPE string", + valueType = MISSING, + patternType = STRING, + escapeType = STRING, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) + ), + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE with null or missing error - missing LIKE missing ESCAPE missing", + valueType = MISSING, + patternType = MISSING, + escapeType = MISSING, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) + ), + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE with null or missing error - missing LIKE null ESCAPE null", + valueType = MISSING, + patternType = NULL, + escapeType = NULL, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) + ), + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE with null or missing error - null LIKE missing ESCAPE null", + valueType = NULL, + patternType = MISSING, + escapeType = NULL, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) + ), + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE with null or missing error - null LIKE null ESCAPE missing", + valueType = NULL, + patternType = NULL, + escapeType = MISSING, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE")) + ), + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE data type mismatch, null or missing error - 3 args, incompatible escape type with unknown types", + valueType = NULL, + patternType = MISSING, + escapeType = INT, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(NULL, MISSING, INT), nAryOp = "LIKE"), + createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE") + ) + ), + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE data type mismatch, null or missing error - 3 args, incompatible pattern type with unknown types", + valueType = NULL, + patternType = INT, + escapeType = MISSING, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(NULL, INT, MISSING), nAryOp = "LIKE"), + createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE") + ) + ), + createNAryLikeDataTypeMismatchTest( + name = "NAry op LIKE data type mismatch, null or missing error - 3 args, incompatible value type with unknown types", + valueType = STRUCT, + patternType = NULL, + escapeType = MISSING, + expectedProblems = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(STRUCT, NULL, MISSING), nAryOp = "LIKE"), + createReturnsNullOrMissingError(col = 3, nAryOp = "LIKE") + ) + ), + ) /** * Creates a test expecting [outputType] and [expectedWarnings] with the query: @@ -2384,7 +2390,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { ) return when (createSwapped && fromType != toType) { true -> - listOf(originalTest, + listOf( + originalTest, TestCase( name = "x BETWEEN z AND y : $name", originalSql = "x BETWEEN z AND y", @@ -2446,7 +2453,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { handler = expectSemanticErrors(listOf(createDataTypeMismatchError(col = 3, argTypes = listOf(valueType, fromType, toType), nAryOp = "BETWEEN"))) ) return when (fromType != toType) { - true -> listOf(originalTest, + true -> listOf( + originalTest, TestCase( name = "x BETWEEN z AND y : $name", originalSql = "x BETWEEN z AND y", @@ -2512,179 +2520,179 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { ) } } + - // tests with two comparable types - generateAllUniquePairs(comparableTypes, comparableTypes).flatMap { comparable -> - // BETWEEN AND -> data type mismatch - // , come from [comparableTypes] and are comparable with each other. - // comes from [incomparableTypes] and is incomparable with . - incomparableTypes.flatMap { incomparable -> - createNAryBetweenErrorTest( - name = "data type mismatch - x: ${comparable.first}, y: $incomparable, z: ${comparable.second}", - valueType = comparable.first, - fromType = incomparable, - toType = comparable.second, - expectedErrors = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(comparable.first, incomparable, comparable.second), nAryOp = "BETWEEN") - ) - ) - } + - ALL_UNKNOWN_TYPES.flatMap { unknownType -> - // BETWEEN AND -> null or missing error - // and come from `comparableTypes` and are comparable with each - // other - createNAryBetweenErrorTest( - name = "null or missing error - x: $unknownType, y: ${comparable.first}, z: ${comparable.second}", - valueType = unknownType, - fromType = comparable.first, - toType = comparable.second, - expectedErrors = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "BETWEEN")) - ) + - // BETWEEN AND -> null or missing error - // and come from `comparableTypes` and are comparable with each - // other - createNAryBetweenErrorTest( - name = "null or missing error - x: ${comparable.first}, y: $unknownType, z: ${comparable.second}", - valueType = comparable.first, - fromType = unknownType, - toType = comparable.second, - expectedErrors = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "BETWEEN")) - ) - } - } + - comparableTypes.flatMap { comparable -> - incomparableTypes.flatMap { incomparable -> - ALL_UNKNOWN_TYPES.flatMap { unknownType -> - // BETWEEN AND unknown -> data type mismatch and null or missing - // error - // comes from [comparableTypes] and comes from [incomparableTypes]. - // Comparing with results in a null or missing error + // tests with two comparable types + generateAllUniquePairs(comparableTypes, comparableTypes).flatMap { comparable -> + // BETWEEN AND -> data type mismatch + // , come from [comparableTypes] and are comparable with each other. + // comes from [incomparableTypes] and is incomparable with . + incomparableTypes.flatMap { incomparable -> createNAryBetweenErrorTest( - name = "data type mismatch, null or missing error - x: ${comparable}, y: $incomparable, z: $unknownType", - valueType = comparable, + name = "data type mismatch - x: ${comparable.first}, y: $incomparable, z: ${comparable.second}", + valueType = comparable.first, fromType = incomparable, - toType = unknownType, + toType = comparable.second, expectedErrors = listOf( - createDataTypeMismatchError(col = 3, argTypes = listOf(comparable, incomparable, unknownType), nAryOp = "BETWEEN"), - createReturnsNullOrMissingError(col = 3, nAryOp = "BETWEEN") + createDataTypeMismatchError(col = 3, argTypes = listOf(comparable.first, incomparable, comparable.second), nAryOp = "BETWEEN") ) ) + } + + ALL_UNKNOWN_TYPES.flatMap { unknownType -> + // BETWEEN AND -> null or missing error + // and come from `comparableTypes` and are comparable with each + // other + createNAryBetweenErrorTest( + name = "null or missing error - x: $unknownType, y: ${comparable.first}, z: ${comparable.second}", + valueType = unknownType, + fromType = comparable.first, + toType = comparable.second, + expectedErrors = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "BETWEEN")) + ) + + // BETWEEN AND -> null or missing error + // and come from `comparableTypes` and are comparable with each + // other + createNAryBetweenErrorTest( + name = "null or missing error - x: ${comparable.first}, y: $unknownType, z: ${comparable.second}", + valueType = comparable.first, + fromType = unknownType, + toType = comparable.second, + expectedErrors = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "BETWEEN")) + ) + } + } + + comparableTypes.flatMap { comparable -> + incomparableTypes.flatMap { incomparable -> + ALL_UNKNOWN_TYPES.flatMap { unknownType -> + // BETWEEN AND unknown -> data type mismatch and null or missing + // error + // comes from [comparableTypes] and comes from [incomparableTypes]. + // Comparing with results in a null or missing error + createNAryBetweenErrorTest( + name = "data type mismatch, null or missing error - x: $comparable, y: $incomparable, z: $unknownType", + valueType = comparable, + fromType = incomparable, + toType = unknownType, + expectedErrors = listOf( + createDataTypeMismatchError(col = 3, argTypes = listOf(comparable, incomparable, unknownType), nAryOp = "BETWEEN"), + createReturnsNullOrMissingError(col = 3, nAryOp = "BETWEEN") + ) + ) + } } } - } @JvmStatic @Suppress("unused") fun parametersForNAryBetweenTests() = createNAryBetweenComparableTypeTests(ALL_NUMERIC_TYPES) + - createNAryBetweenComparableTypeTests(ALL_TEXT_TYPES) + - createNAryBetweenComparableTypeTests(ALL_LOB_TYPES) + - ALL_TYPES_ONLY_COMPARABLE_TO_SELF.flatMap { otherType -> - createNAryBetweenComparableTypeTests(listOf(otherType)) - } + - createNAryBetweenValidTest( - name = "matching union types; x: union(int, string), y: int, z: decimal", - valueType = unionOf(INT, STRING), - fromType = INT, - toType = DECIMAL, - outputType = unionOf(BOOL, MISSING) - ) + - createNAryBetweenValidTest( - name = "matching union types containing null; x: union(int, string, null), y: int, z: decimal", - valueType = unionOf(INT, STRING, NULL), - fromType = INT, - toType = DECIMAL, - outputType = unionOf(BOOL, MISSING, NULL) - ) + - createNAryBetweenValidTest( - name = "x: ANY, y: INT, z: DECIMAL", - valueType = ANY, - fromType = INT, - toType = DECIMAL, - outputType = unionOf(BOOL, MISSING, NULL) - ) + - // - // data type mismatch cases for arithmetic ops below - // + createNAryBetweenComparableTypeTests(ALL_TEXT_TYPES) + + createNAryBetweenComparableTypeTests(ALL_LOB_TYPES) + + ALL_TYPES_ONLY_COMPARABLE_TO_SELF.flatMap { otherType -> + createNAryBetweenComparableTypeTests(listOf(otherType)) + } + + createNAryBetweenValidTest( + name = "matching union types; x: union(int, string), y: int, z: decimal", + valueType = unionOf(INT, STRING), + fromType = INT, + toType = DECIMAL, + outputType = unionOf(BOOL, MISSING) + ) + + createNAryBetweenValidTest( + name = "matching union types containing null; x: union(int, string, null), y: int, z: decimal", + valueType = unionOf(INT, STRING, NULL), + fromType = INT, + toType = DECIMAL, + outputType = unionOf(BOOL, MISSING, NULL) + ) + + createNAryBetweenValidTest( + name = "x: ANY, y: INT, z: DECIMAL", + valueType = ANY, + fromType = INT, + toType = DECIMAL, + outputType = unionOf(BOOL, MISSING, NULL) + ) + + // + // data type mismatch cases for arithmetic ops below + // - // numeric with non-numerics - createMultipleNAryBetweenErrorTests(comparableTypes = ALL_NUMERIC_TYPES, incomparableTypes = ALL_NON_NUMERIC_NON_UNKNOWN_TYPES) + - // text with non-text - createMultipleNAryBetweenErrorTests(comparableTypes = ALL_TEXT_TYPES, incomparableTypes = ALL_NON_TEXT_NON_UNKNOWN_TYPES) + - // lob with non-lobs - createMultipleNAryBetweenErrorTests(comparableTypes = ALL_LOB_TYPES, incomparableTypes = ALL_NON_LOB_NON_UNKNOWN_TYPES) + - // types only comparable to self with different types - ALL_TYPES_ONLY_COMPARABLE_TO_SELF.flatMap { otherType -> - createMultipleNAryBetweenErrorTests(comparableTypes = listOf(otherType), incomparableTypes = ALL_NON_UNKNOWN_TYPES.filter { it != otherType }) - } + - // unknowns with non-unknown types - generateAllUniquePairs(ALL_UNKNOWN_TYPES, ALL_UNKNOWN_TYPES).flatMap { unknownTypes -> - ALL_NON_UNKNOWN_TYPES.flatMap { nonUnknownType -> - createNAryBetweenErrorTest( - name = "null or missing error - x: $nonUnknownType, y: ${unknownTypes.first}, z: ${unknownTypes.second}", - valueType = nonUnknownType, - fromType = unknownTypes.first, - toType = unknownTypes.second, - expectedErrors = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "BETWEEN")) - ) + - createNAryBetweenErrorTest( - name = "null or missing error - x: ${unknownTypes.first}, y: $nonUnknownType, z: ${unknownTypes.second}", - valueType = unknownTypes.first, - fromType = nonUnknownType, - toType = unknownTypes.second, - expectedErrors = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "BETWEEN")) - ) + - createNAryBetweenErrorTest( - name = "null or missing error - x: ${unknownTypes.first}, y: ${unknownTypes.second}, z: $nonUnknownType", - valueType = unknownTypes.first, - fromType = unknownTypes.second, - toType = nonUnknownType, - expectedErrors = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "BETWEEN")) - ) - } - } + - createNAryBetweenDataTypeMismatchTest( - name = "incomparable nullable valueType; x: nullable int, y: nullable string, z: nullable symbol", - valueType = INT.asNullable(), - fromType = STRING.asNullable(), - toType = SYMBOL.asNullable() - ) + - createNAryBetweenDataTypeMismatchTest( - name = "incomparable nullable from/toType; x: nullable string, y: nullable int, z: nullable symbol", - valueType = STRING.asNullable(), - fromType = INT.asNullable(), - toType = SYMBOL.asNullable() - ) + - createNAryBetweenDataTypeMismatchTest( - name = "incomparable optional valueType; x: optional int, y: optional string, z: optional symbol", - valueType = INT.asOptional(), - fromType = STRING.asOptional(), - toType = SYMBOL.asOptional() - ) + - createNAryBetweenDataTypeMismatchTest( - name = "incomparable optional from/toType; x: optional string, y: optional int, z: optional symbol", - valueType = STRING.asOptional(), - fromType = INT.asOptional(), - toType = SYMBOL.asOptional() - ) + - createNAryBetweenDataTypeMismatchTest( - name = "union comparable to one union, not to other union; x: union(int, decimal), y: union(int, null), z: union(string, symbol)", - valueType = unionOf(INT, DECIMAL), - fromType = unionOf(INT, NULL), - toType = unionOf(STRING, SYMBOL) - ) + - createNAryBetweenDataTypeMismatchTest( - name = "union incomparable to other unions; x: union(bool, string), y: union(int, null), z: union(string, symbol)", - valueType = unionOf(BOOL, STRING), - fromType = unionOf(INT, FLOAT), - toType = unionOf(INT, DECIMAL) - ) + - // valueType is comparable to fromType and toType. but fromType is incomparable to toType - createNAryBetweenDataTypeMismatchTest( - name = "fromType incomparable to toType; x: union(int, string), y: int, z: string", - valueType = unionOf(INT, STRING), - fromType = INT, - toType = STRING - ) + // numeric with non-numerics + createMultipleNAryBetweenErrorTests(comparableTypes = ALL_NUMERIC_TYPES, incomparableTypes = ALL_NON_NUMERIC_NON_UNKNOWN_TYPES) + + // text with non-text + createMultipleNAryBetweenErrorTests(comparableTypes = ALL_TEXT_TYPES, incomparableTypes = ALL_NON_TEXT_NON_UNKNOWN_TYPES) + + // lob with non-lobs + createMultipleNAryBetweenErrorTests(comparableTypes = ALL_LOB_TYPES, incomparableTypes = ALL_NON_LOB_NON_UNKNOWN_TYPES) + + // types only comparable to self with different types + ALL_TYPES_ONLY_COMPARABLE_TO_SELF.flatMap { otherType -> + createMultipleNAryBetweenErrorTests(comparableTypes = listOf(otherType), incomparableTypes = ALL_NON_UNKNOWN_TYPES.filter { it != otherType }) + } + + // unknowns with non-unknown types + generateAllUniquePairs(ALL_UNKNOWN_TYPES, ALL_UNKNOWN_TYPES).flatMap { unknownTypes -> + ALL_NON_UNKNOWN_TYPES.flatMap { nonUnknownType -> + createNAryBetweenErrorTest( + name = "null or missing error - x: $nonUnknownType, y: ${unknownTypes.first}, z: ${unknownTypes.second}", + valueType = nonUnknownType, + fromType = unknownTypes.first, + toType = unknownTypes.second, + expectedErrors = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "BETWEEN")) + ) + + createNAryBetweenErrorTest( + name = "null or missing error - x: ${unknownTypes.first}, y: $nonUnknownType, z: ${unknownTypes.second}", + valueType = unknownTypes.first, + fromType = nonUnknownType, + toType = unknownTypes.second, + expectedErrors = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "BETWEEN")) + ) + + createNAryBetweenErrorTest( + name = "null or missing error - x: ${unknownTypes.first}, y: ${unknownTypes.second}, z: $nonUnknownType", + valueType = unknownTypes.first, + fromType = unknownTypes.second, + toType = nonUnknownType, + expectedErrors = listOf(createReturnsNullOrMissingError(col = 3, nAryOp = "BETWEEN")) + ) + } + } + + createNAryBetweenDataTypeMismatchTest( + name = "incomparable nullable valueType; x: nullable int, y: nullable string, z: nullable symbol", + valueType = INT.asNullable(), + fromType = STRING.asNullable(), + toType = SYMBOL.asNullable() + ) + + createNAryBetweenDataTypeMismatchTest( + name = "incomparable nullable from/toType; x: nullable string, y: nullable int, z: nullable symbol", + valueType = STRING.asNullable(), + fromType = INT.asNullable(), + toType = SYMBOL.asNullable() + ) + + createNAryBetweenDataTypeMismatchTest( + name = "incomparable optional valueType; x: optional int, y: optional string, z: optional symbol", + valueType = INT.asOptional(), + fromType = STRING.asOptional(), + toType = SYMBOL.asOptional() + ) + + createNAryBetweenDataTypeMismatchTest( + name = "incomparable optional from/toType; x: optional string, y: optional int, z: optional symbol", + valueType = STRING.asOptional(), + fromType = INT.asOptional(), + toType = SYMBOL.asOptional() + ) + + createNAryBetweenDataTypeMismatchTest( + name = "union comparable to one union, not to other union; x: union(int, decimal), y: union(int, null), z: union(string, symbol)", + valueType = unionOf(INT, DECIMAL), + fromType = unionOf(INT, NULL), + toType = unionOf(STRING, SYMBOL) + ) + + createNAryBetweenDataTypeMismatchTest( + name = "union incomparable to other unions; x: union(bool, string), y: union(int, null), z: union(string, symbol)", + valueType = unionOf(BOOL, STRING), + fromType = unionOf(INT, FLOAT), + toType = unionOf(INT, DECIMAL) + ) + + // valueType is comparable to fromType and toType. but fromType is incomparable to toType + createNAryBetweenDataTypeMismatchTest( + name = "fromType incomparable to toType; x: union(int, string), y: int, z: string", + valueType = unionOf(INT, STRING), + fromType = INT, + toType = STRING + ) @JvmStatic @Suppress("unused") @@ -2766,7 +2774,9 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { "a" to unionOf( ListType(elementType = StaticType.INT), ListType(elementType = StaticType.BOOL), - ListType(elementType = StaticType.STRING))), + ListType(elementType = StaticType.STRING) + ) + ), handler = expectQueryOutputType(unionOf(StaticType.INT, StaticType.BOOL, StaticType.STRING)) ), TestCase( @@ -2778,9 +2788,12 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { TestCase( name = "path on union of list and ANY", originalSql = "a[1]", - globals = mapOf("a" to unionOf( - ListType(elementType = StaticType.INT), - StaticType.ANY)), + globals = mapOf( + "a" to unionOf( + ListType(elementType = StaticType.INT), + StaticType.ANY + ) + ), handler = expectQueryOutputType(StaticType.ANY) ), TestCase( @@ -2788,7 +2801,9 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { originalSql = "a.id", globals = mapOf( "a" to unionOf( - StructType(mapOf("id" to StaticType.INT)))), + StructType(mapOf("id" to StaticType.INT)) + ) + ), handler = expectQueryOutputType(StaticType.INT) ), TestCase( @@ -2797,7 +2812,9 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { globals = mapOf( "a" to unionOf( StaticType.ANY, - StructType(mapOf("id" to StaticType.INT)))), + StructType(mapOf("id" to StaticType.INT)) + ) + ), handler = expectQueryOutputType(StaticType.ANY) ), TestCase( @@ -2806,7 +2823,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { globals = mapOf( "a" to StructType( mapOf("id" to StaticType.INT) - ).asNullable()), + ).asNullable() + ), handler = expectQueryOutputType(unionOf(StaticType.INT, StaticType.MISSING)) ), TestCase( @@ -2815,7 +2833,9 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { globals = mapOf( "a" to unionOf( StructType(mapOf("id" to StaticType.INT)), - StructType(mapOf("id" to StaticType.STRING)))), + StructType(mapOf("id" to StaticType.STRING)) + ) + ), handler = expectQueryOutputType(unionOf(StaticType.INT, StaticType.STRING)) ), TestCase( @@ -2824,7 +2844,9 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { globals = mapOf( "a" to unionOf( StructType(mapOf("id" to StaticType.INT)), - StaticType.INT)), + StaticType.INT + ) + ), handler = expectQueryOutputType(unionOf(StaticType.INT, StaticType.MISSING)) ), TestCase( @@ -2834,7 +2856,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { "a" to unionOf( StructType(mapOf("id" to StaticType.INT)), StaticType.INT - ).asNullable()), + ).asNullable() + ), handler = expectQueryOutputType(unionOf(StaticType.INT, StaticType.MISSING)) ), TestCase( @@ -2844,7 +2867,9 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { "a" to unionOf( StructType(mapOf("id" to StaticType.INT)), StaticType.INT, - StaticType.TIMESTAMP)), + StaticType.TIMESTAMP + ) + ), handler = expectQueryOutputType(unionOf(StaticType.INT, StaticType.MISSING)) ), TestCase( @@ -2852,7 +2877,9 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { originalSql = "a.id", globals = mapOf( "a" to StructType( - mapOf("id" to unionOf(StaticType.INT, StaticType.STRING)))), + mapOf("id" to unionOf(StaticType.INT, StaticType.STRING)) + ) + ), handler = expectQueryOutputType(unionOf(StaticType.INT, StaticType.STRING)) ), TestCase( @@ -2860,9 +2887,13 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { originalSql = "a.b.id", globals = mapOf( "a" to StructType( - mapOf("b" to StructType( - mapOf("id" to StaticType.INT) - ).asNullable()))), + mapOf( + "b" to StructType( + mapOf("id" to StaticType.INT) + ).asNullable() + ) + ) + ), handler = expectQueryOutputType(unionOf(StaticType.INT, StaticType.MISSING)) ), TestCase( @@ -2871,7 +2902,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { globals = mapOf( "a" to StructType( mapOf("id" to StaticType.INT) - ).asOptional()), + ).asOptional() + ), handler = expectQueryOutputType(unionOf(StaticType.INT, StaticType.MISSING)) ), TestCase( @@ -2880,7 +2912,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { globals = mapOf( "a" to StructType( mapOf("id" to StaticType.INT) - ).asNullable().asOptional()), + ).asNullable().asOptional() + ), handler = expectQueryOutputType(unionOf(StaticType.INT, StaticType.MISSING)) ), TestCase( @@ -2888,10 +2921,17 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { originalSql = "a.b.c.id", globals = mapOf( "a" to StructType( - mapOf("b" to StructType( - mapOf("c" to StructType( - mapOf("id" to StaticType.INT))))) - ).asNullable()), + mapOf( + "b" to StructType( + mapOf( + "c" to StructType( + mapOf("id" to StaticType.INT) + ) + ) + ) + ) + ).asNullable() + ), handler = expectQueryOutputType(unionOf(StaticType.INT, StaticType.MISSING)) ), TestCase( @@ -2899,10 +2939,17 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { originalSql = "a.b.c.id", globals = mapOf( "a" to StructType( - mapOf("b" to StructType( - mapOf("c" to StructType( - mapOf("id" to StaticType.INT) - ).asNullable()))))), + mapOf( + "b" to StructType( + mapOf( + "c" to StructType( + mapOf("id" to StaticType.INT) + ).asNullable() + ) + ) + ) + ) + ), handler = expectQueryOutputType(unionOf(StaticType.INT, StaticType.MISSING)) ), TestCase( @@ -2910,12 +2957,17 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { originalSql = "a.b.c.id", globals = mapOf( "a" to StructType( - mapOf("b" to StructType( - mapOf("c" to StructType( - mapOf("id" to StaticType.INT) - ).asNullable()) - ).asNullable()) - ).asNullable()), + mapOf( + "b" to StructType( + mapOf( + "c" to StructType( + mapOf("id" to StaticType.INT) + ).asNullable() + ) + ).asNullable() + ) + ).asNullable() + ), handler = expectQueryOutputType(unionOf(StaticType.INT, StaticType.MISSING)) ), TestCase( @@ -2923,9 +2975,17 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { originalSql = "a.b.c.id", globals = mapOf( "a" to StructType( - mapOf("b" to StructType( - mapOf("c" to StructType( - mapOf("id" to StaticType.ANY))))))), + mapOf( + "b" to StructType( + mapOf( + "c" to StructType( + mapOf("id" to StaticType.ANY) + ) + ) + ) + ) + ) + ), handler = expectQueryOutputType(StaticType.ANY) ), TestCase( @@ -2933,9 +2993,17 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { originalSql = "a.b.c.id", globals = mapOf( "a" to StructType( - mapOf("b" to StructType( - mapOf("c" to StructType( - mapOf("id" to StaticType.INT.asNullable()))))))), + mapOf( + "b" to StructType( + mapOf( + "c" to StructType( + mapOf("id" to StaticType.INT.asNullable()) + ) + ) + ) + ) + ) + ), handler = expectQueryOutputType(unionOf(StaticType.INT, StaticType.NULL)) ), TestCase( @@ -2943,9 +3011,17 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { originalSql = "a.b.c.id", globals = mapOf( "a" to StructType( - mapOf("b" to StructType( - mapOf("c" to StructType( - mapOf("id" to StructType(emptyMap())))))))), + mapOf( + "b" to StructType( + mapOf( + "c" to StructType( + mapOf("id" to StructType(emptyMap())) + ) + ) + ) + ) + ) + ), handler = expectQueryOutputType(StructType(emptyMap())) ), TestCase( @@ -2953,9 +3029,17 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { originalSql = "a.b.c", globals = mapOf( "a" to StructType( - mapOf("b" to StructType( - mapOf("c" to StructType( - mapOf("id" to StaticType.INT))))))), + mapOf( + "b" to StructType( + mapOf( + "c" to StructType( + mapOf("id" to StaticType.INT) + ) + ) + ) + ) + ) + ), handler = expectQueryOutputType(StructType(mapOf("id" to StaticType.INT))) ), TestCase( @@ -2963,10 +3047,17 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { originalSql = "a.b.c", globals = mapOf( "a" to StructType( - mapOf("b" to StructType( - mapOf("c" to StructType( - mapOf("id" to StaticType.INT) - ).asNullable()))))), + mapOf( + "b" to StructType( + mapOf( + "c" to StructType( + mapOf("id" to StaticType.INT) + ).asNullable() + ) + ) + ) + ) + ), handler = expectQueryOutputType(unionOf(StructType(mapOf("id" to StaticType.INT)), StaticType.NULL)) ), TestCase( @@ -2974,10 +3065,17 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { originalSql = "a.b.c", globals = mapOf( "a" to StructType( - mapOf("b" to StructType( - mapOf("c" to StructType( - mapOf("id" to StaticType.INT) - ).asOptional()))))), + mapOf( + "b" to StructType( + mapOf( + "c" to StructType( + mapOf("id" to StaticType.INT) + ).asOptional() + ) + ) + ) + ) + ), handler = expectQueryOutputType(unionOf(StructType(mapOf("id" to StaticType.INT)), StaticType.MISSING)) ), TestCase( @@ -2985,10 +3083,17 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { originalSql = "a.b.c", globals = mapOf( "a" to StructType( - mapOf("b" to StructType( - mapOf("c" to StructType( - mapOf("id" to StaticType.INT) - ).asNullable().asOptional()))))), + mapOf( + "b" to StructType( + mapOf( + "c" to StructType( + mapOf("id" to StaticType.INT) + ).asNullable().asOptional() + ) + ) + ) + ) + ), handler = expectQueryOutputType(unionOf(StructType(mapOf("id" to StaticType.INT)), StaticType.NULL, StaticType.MISSING)) ), TestCase( @@ -2996,10 +3101,17 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { originalSql = "a.b.c", globals = mapOf( "a" to StructType( - mapOf("b" to StructType( - mapOf("c" to StructType( - mapOf("id" to StaticType.INT))))) - ).asNullable()), + mapOf( + "b" to StructType( + mapOf( + "c" to StructType( + mapOf("id" to StaticType.INT) + ) + ) + ) + ) + ).asNullable() + ), handler = expectQueryOutputType(unionOf(StructType(mapOf("id" to StaticType.INT)), StaticType.MISSING)) ), TestCase( @@ -3008,7 +3120,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { globals = mapOf( "a" to StructType( mapOf("b" to ListType(elementType = StaticType.INT)) - ).asNullable()), + ).asNullable() + ), handler = expectQueryOutputType(unionOf(StaticType.INT, StaticType.MISSING)) ), TestCase( @@ -3017,7 +3130,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { globals = mapOf( "a" to StructType( mapOf("b" to ListType(elementType = StaticType.INT).asNullable()) - ).asNullable()), + ).asNullable() + ), handler = expectQueryOutputType(unionOf(StaticType.INT, StaticType.MISSING)) ), TestCase( @@ -3026,7 +3140,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { globals = mapOf( "a" to ListType( elementType = StructType(mapOf("id" to StaticType.INT)) - ).asNullable()), + ).asNullable() + ), handler = expectQueryOutputType(unionOf(StaticType.INT, StaticType.MISSING)) ), TestCase( @@ -3037,7 +3152,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { elementType = StructType( mapOf("id" to StaticType.INT) ).asNullable() - ).asNullable()), + ).asNullable() + ), handler = expectQueryOutputType(unionOf(StaticType.INT, StaticType.MISSING)) ) ) @@ -3333,15 +3449,18 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { createDataTypeMismatchError( SourceLocationMeta(3L, 30L, 6L), argTypes = listOf(unionOf(ALL_NUMERIC_TYPES.toSet()), unionOf(ALL_TEXT_TYPES.toSet())), - nAryOp = "CASE"), + nAryOp = "CASE" + ), createDataTypeMismatchError( SourceLocationMeta(4L, 30L, 5L), argTypes = listOf(unionOf(ALL_NUMERIC_TYPES.toSet()), unionOf(ALL_LOB_TYPES.toSet())), - nAryOp = "CASE"), + nAryOp = "CASE" + ), createDataTypeMismatchError( SourceLocationMeta(5L, 30L, 7L), argTypes = listOf(unionOf(ALL_NUMERIC_TYPES.toSet()), unionOf(ALL_TYPES_ONLY_COMPARABLE_TO_SELF.toSet())), - nAryOp = "CASE") + nAryOp = "CASE" + ) ) ) ) @@ -3392,194 +3511,194 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { handler = expectQueryOutputType(unionOf(BOOL, STRING, NULL)) ), TestCase( - "CASE WHEN with ELSE expression, WHEN expr INT compared to nullable INT, THEN of known types", - """ + "CASE WHEN with ELSE expression, WHEN expr INT compared to nullable INT, THEN of known types", + """ CASE WHEN t_int = t_nullable_int THEN t_int ELSE t_string END """, - mapOf( - "t_int" to INT, - "t_nullable_int" to INT.asNullable(), - "t_string" to STRING - ), - handler = expectQueryOutputType(unionOf(INT, STRING)) + mapOf( + "t_int" to INT, + "t_nullable_int" to INT.asNullable(), + "t_string" to STRING + ), + handler = expectQueryOutputType(unionOf(INT, STRING)) ), TestCase( - "CASE WHEN without ELSE expression, WHEN expr INT compared to nullable INT, THEN of known type", - """ + "CASE WHEN without ELSE expression, WHEN expr INT compared to nullable INT, THEN of known type", + """ CASE WHEN t_int = t_nullable_int THEN t_int END """, - mapOf( - "t_int" to INT, - "t_nullable_int" to INT.asNullable() - ), - handler = expectQueryOutputType(unionOf(INT, NULL)) + mapOf( + "t_int" to INT, + "t_nullable_int" to INT.asNullable() + ), + handler = expectQueryOutputType(unionOf(INT, NULL)) ), TestCase( - "CASE WHEN with ELSE expression, WHEN expr INT compared to optional INT, THEN of known types", - """ + "CASE WHEN with ELSE expression, WHEN expr INT compared to optional INT, THEN of known types", + """ CASE WHEN t_int = t_optional_int THEN t_int ELSE t_string END """, - mapOf( - "t_int" to INT, - "t_optional_int" to INT.asOptional(), - "t_string" to STRING - ), - handler = expectQueryOutputType(unionOf(INT, STRING)) + mapOf( + "t_int" to INT, + "t_optional_int" to INT.asOptional(), + "t_string" to STRING + ), + handler = expectQueryOutputType(unionOf(INT, STRING)) ), TestCase( - "CASE WHEN without ELSE expression, WHEN expr INT compared to optional INT, THEN of known types", - """ + "CASE WHEN without ELSE expression, WHEN expr INT compared to optional INT, THEN of known types", + """ CASE WHEN t_int = t_optional_int THEN t_int END """, - mapOf( - "t_int" to INT, - "t_optional_int" to INT.asOptional() - ), - handler = expectQueryOutputType(unionOf(INT, NULL)) + mapOf( + "t_int" to INT, + "t_optional_int" to INT.asOptional() + ), + handler = expectQueryOutputType(unionOf(INT, NULL)) ), TestCase( - "CASE WHEN with ELSE expression, WHEN expr union with bool, THEN of known types", - """ + "CASE WHEN with ELSE expression, WHEN expr union with bool, THEN of known types", + """ CASE WHEN u_bool_and_other_types THEN t_int ELSE t_string END """, - mapOf( - "u_bool_and_other_types" to unionOf(BOOL, INT, NULL), - "t_int" to INT, - "t_string" to STRING - ), - handler = expectQueryOutputType(unionOf(INT, STRING)) + mapOf( + "u_bool_and_other_types" to unionOf(BOOL, INT, NULL), + "t_int" to INT, + "t_string" to STRING + ), + handler = expectQueryOutputType(unionOf(INT, STRING)) ), TestCase( - "CASE WHEN without ELSE expression, WHEN expr union with bool, THEN of known type", - """ + "CASE WHEN without ELSE expression, WHEN expr union with bool, THEN of known type", + """ CASE WHEN u_bool_and_other_types THEN t_int END """, - mapOf( - "u_bool_and_other_types" to unionOf(BOOL, INT, NULL), - "t_int" to INT - ), - handler = expectQueryOutputType(unionOf(INT, NULL)) + mapOf( + "u_bool_and_other_types" to unionOf(BOOL, INT, NULL), + "t_int" to INT + ), + handler = expectQueryOutputType(unionOf(INT, NULL)) ), TestCase( - "CASE WHEN with ELSE expression, WHEN expr ANY, THEN of known type", - """ + "CASE WHEN with ELSE expression, WHEN expr ANY, THEN of known type", + """ CASE WHEN t_any THEN t_int ELSE t_string END """, - mapOf( - "t_any" to ANY, - "t_int" to INT, - "t_string" to STRING - ), - handler = expectQueryOutputType(unionOf(INT, STRING)) + mapOf( + "t_any" to ANY, + "t_int" to INT, + "t_string" to STRING + ), + handler = expectQueryOutputType(unionOf(INT, STRING)) ), TestCase( - "CASE WHEN without ELSE expression, WHEN expr ANY, THEN of known type", - """ + "CASE WHEN without ELSE expression, WHEN expr ANY, THEN of known type", + """ CASE WHEN t_any THEN t_int END """, - mapOf( - "t_any" to ANY, - "t_int" to INT - ), - handler = expectQueryOutputType(unionOf(INT, NULL)) + mapOf( + "t_any" to ANY, + "t_int" to INT + ), + handler = expectQueryOutputType(unionOf(INT, NULL)) ) ) + - // - // SearchedCaseWhen error cases below - // + // + // SearchedCaseWhen error cases below + // - // tests with non-bool, non-unknown whenExpr - ALL_NON_BOOL_NON_UNKNOWN_TYPES.flatMap { nonBool -> - listOf( - TestCase( - name = "data type mismatch error - $nonBool whenExpr", - originalSql = """ + // tests with non-bool, non-unknown whenExpr + ALL_NON_BOOL_NON_UNKNOWN_TYPES.flatMap { nonBool -> + listOf( + TestCase( + name = "data type mismatch error - $nonBool whenExpr", + originalSql = """ CASE WHEN t_non_bool THEN t_non_bool END """, - globals = mapOf("t_non_bool" to nonBool), - handler = expectSemanticProblems( - listOf( - createIncompatibleTypesForExprError(SourceLocationMeta(3L, 38L, 10L), expectedType = BOOL, actualType = nonBool) + globals = mapOf("t_non_bool" to nonBool), + handler = expectSemanticProblems( + listOf( + createIncompatibleTypesForExprError(SourceLocationMeta(3L, 38L, 10L), expectedType = BOOL, actualType = nonBool) + ) ) - ) - ), - TestCase( - name = "data type mismatch error - $nonBool whenExpr and elseExpr", - originalSql = """ + ), + TestCase( + name = "data type mismatch error - $nonBool whenExpr and elseExpr", + originalSql = """ CASE WHEN t_non_bool THEN t_non_bool ELSE t_non_bool END """, - globals = mapOf("t_non_bool" to nonBool), - handler = expectSemanticProblems( - listOf( - createIncompatibleTypesForExprError(SourceLocationMeta(3L, 38L, 10L), expectedType = BOOL, actualType = nonBool) + globals = mapOf("t_non_bool" to nonBool), + handler = expectSemanticProblems( + listOf( + createIncompatibleTypesForExprError(SourceLocationMeta(3L, 38L, 10L), expectedType = BOOL, actualType = nonBool) + ) ) ) ) - ) - } + - // tests with unknown whenExpr - ALL_UNKNOWN_TYPES.flatMap { unknownType -> - listOf( - TestCase( - name = "null or missing error - $unknownType whenExpr", - originalSql = """ + } + + // tests with unknown whenExpr + ALL_UNKNOWN_TYPES.flatMap { unknownType -> + listOf( + TestCase( + name = "null or missing error - $unknownType whenExpr", + originalSql = """ CASE WHEN t_unknown THEN t_unknown END """, - globals = mapOf("t_unknown" to unknownType), - handler = expectSemanticProblems( - listOf( - createReturnsNullOrMissingError(SourceLocationMeta(3L, 38L, 9L)) + globals = mapOf("t_unknown" to unknownType), + handler = expectSemanticProblems( + listOf( + createReturnsNullOrMissingError(SourceLocationMeta(3L, 38L, 9L)) + ) ) - ) - ), - TestCase( - name = "null or missing error - $unknownType whenExpr and elseExpr", - originalSql = """ + ), + TestCase( + name = "null or missing error - $unknownType whenExpr and elseExpr", + originalSql = """ CASE WHEN t_unknown THEN t_unknown ELSE t_unknown END """, - globals = mapOf("t_unknown" to unknownType), - handler = expectSemanticProblems( - listOf( - createReturnsNullOrMissingError(SourceLocationMeta(3L, 38L, 9L)) + globals = mapOf("t_unknown" to unknownType), + handler = expectSemanticProblems( + listOf( + createReturnsNullOrMissingError(SourceLocationMeta(3L, 38L, 9L)) + ) ) ) ) - ) - } + - listOf( - TestCase( - name = "multiple errors - non-bool whenExprs and unknown whenExprs", - originalSql = """ + } + + listOf( + TestCase( + name = "multiple errors - non-bool whenExprs and unknown whenExprs", + originalSql = """ CASE WHEN t_int THEN t_int WHEN t_string THEN t_string @@ -3588,25 +3707,25 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { WHEN t_missing THEN t_missing END """, - globals = mapOf( - "t_int" to INT, - "t_string" to STRING, - "t_any" to ANY, - "t_null" to NULL, - "t_missing" to MISSING - ), - handler = expectSemanticProblems( - listOf( - createIncompatibleTypesForExprError(SourceLocationMeta(3L, 34L, 5L), expectedType = BOOL, actualType = INT), - createIncompatibleTypesForExprError(SourceLocationMeta(4L, 34L, 8L), expectedType = BOOL, actualType = STRING), - createReturnsNullOrMissingError(SourceLocationMeta(6L, 34L, 6L)), - createReturnsNullOrMissingError(SourceLocationMeta(7L, 34L, 9L)) + globals = mapOf( + "t_int" to INT, + "t_string" to STRING, + "t_any" to ANY, + "t_null" to NULL, + "t_missing" to MISSING + ), + handler = expectSemanticProblems( + listOf( + createIncompatibleTypesForExprError(SourceLocationMeta(3L, 34L, 5L), expectedType = BOOL, actualType = INT), + createIncompatibleTypesForExprError(SourceLocationMeta(4L, 34L, 8L), expectedType = BOOL, actualType = STRING), + createReturnsNullOrMissingError(SourceLocationMeta(6L, 34L, 6L)), + createReturnsNullOrMissingError(SourceLocationMeta(7L, 34L, 9L)) + ) ) - ) - ), - TestCase( - name = "multiple errors - whenExprs of unions not containing bool", - originalSql = """ + ), + TestCase( + name = "multiple errors - whenExprs of unions not containing bool", + originalSql = """ CASE WHEN t_numeric THEN t_numeric WHEN t_text THEN t_text @@ -3614,31 +3733,34 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { WHEN t_null_or_missing THEN t_null_or_missing END """, - globals = mapOf( - "t_numeric" to unionOf(ALL_NUMERIC_TYPES.toSet()), - "t_text" to unionOf(ALL_TEXT_TYPES.toSet()), - "t_lob" to unionOf(ALL_LOB_TYPES.toSet()), - "t_null_or_missing" to NULL_OR_MISSING - ), - handler = expectSemanticProblems( - listOf( - createIncompatibleTypesForExprError( - SourceLocationMeta(3L, 34L, 9L), - expectedType = BOOL, - actualType = unionOf(ALL_NUMERIC_TYPES.toSet())), - createIncompatibleTypesForExprError( - SourceLocationMeta(4L, 34L, 6L), - expectedType = BOOL, - actualType = unionOf(ALL_TEXT_TYPES.toSet())), - createIncompatibleTypesForExprError( - SourceLocationMeta(5L, 34L, 5L), - expectedType = BOOL, - actualType = unionOf(ALL_LOB_TYPES.toSet())), - createReturnsNullOrMissingError(SourceLocationMeta(6L, 34L, 17L)) + globals = mapOf( + "t_numeric" to unionOf(ALL_NUMERIC_TYPES.toSet()), + "t_text" to unionOf(ALL_TEXT_TYPES.toSet()), + "t_lob" to unionOf(ALL_LOB_TYPES.toSet()), + "t_null_or_missing" to NULL_OR_MISSING + ), + handler = expectSemanticProblems( + listOf( + createIncompatibleTypesForExprError( + SourceLocationMeta(3L, 34L, 9L), + expectedType = BOOL, + actualType = unionOf(ALL_NUMERIC_TYPES.toSet()) + ), + createIncompatibleTypesForExprError( + SourceLocationMeta(4L, 34L, 6L), + expectedType = BOOL, + actualType = unionOf(ALL_TEXT_TYPES.toSet()) + ), + createIncompatibleTypesForExprError( + SourceLocationMeta(5L, 34L, 5L), + expectedType = BOOL, + actualType = unionOf(ALL_LOB_TYPES.toSet()) + ), + createReturnsNullOrMissingError(SourceLocationMeta(6L, 34L, 17L)) + ) ) ) ) - ) /** * Creates a SimpleCaseWhen and SearchedCaseWhen clause [TestCase] for testing the inferred static type of the @@ -3655,7 +3777,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { name: String, thenTypes: List, elseType: StaticType? = null, - expectedType: StaticType): List { + expectedType: StaticType + ): List { val globals = mutableMapOf() var simpleCaseWhenQuery = "CASE 0\n" var searchedCaseWhenQuery = "CASE\n" @@ -3680,12 +3803,15 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { name = "SimpleCaseWhen $name", originalSql = simpleCaseWhenQuery, globals = globals, - handler = expectQueryOutputType(expectedType)), + handler = expectQueryOutputType(expectedType) + ), TestCase( name = "SearchedCaseWhen $name", originalSql = searchedCaseWhenQuery, globals = globals, - handler = expectQueryOutputType(expectedType))) + handler = expectQueryOutputType(expectedType) + ) + ) } @JvmStatic @@ -3790,8 +3916,10 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { originalSql = "lhs IN rhs", globals = mapOf( "lhs" to leftType, - "rhs" to rightType), - handler = expectQueryOutputType(outputType)) + "rhs" to rightType + ), + handler = expectQueryOutputType(outputType) + ) /** * Creates a test for each [CollectionType] of the form: [leftType] IN collection([rightElementType]). Each test @@ -3808,22 +3936,28 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { originalSql = "lhs IN rhs", globals = mapOf( "lhs" to leftType, - "rhs" to ListType(elementType = rightElementType)), - handler = expectQueryOutputType(BOOL)), + "rhs" to ListType(elementType = rightElementType) + ), + handler = expectQueryOutputType(BOOL) + ), TestCase( name = "NAry op IN - $leftType IN bag($rightElementType)", originalSql = "lhs IN rhs", globals = mapOf( "lhs" to leftType, - "rhs" to BagType(elementType = rightElementType)), - handler = expectQueryOutputType(BOOL)), + "rhs" to BagType(elementType = rightElementType) + ), + handler = expectQueryOutputType(BOOL) + ), TestCase( name = "NAry op IN - $leftType IN sexp($rightElementType)", originalSql = "lhs IN rhs", globals = mapOf( "lhs" to leftType, - "rhs" to SexpType(elementType = rightElementType)), - handler = expectQueryOutputType(BOOL)), + "rhs" to SexpType(elementType = rightElementType) + ), + handler = expectQueryOutputType(BOOL) + ), // row-value constructor test TestCase( name = "NAry op IN - $leftType IN ($rightElementType)", @@ -3851,8 +3985,10 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { originalSql = "lhs IN rhs", globals = mapOf( "lhs" to leftType, - "rhs" to rightType), - handler = expectSemanticProblems(expectedProblems)) + "rhs" to rightType + ), + handler = expectSemanticProblems(expectedProblems) + ) /** * Creates a test for each [CollectionType] expecting a data type mismatch error due to an incomparable @@ -3903,273 +4039,274 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { rightElementType = it.second ) } + - generateAllUniquePairs(ALL_TEXT_TYPES, ALL_TEXT_TYPES).flatMap { - createNAryOpInAllCollectionsTest( - leftType = it.first, - rightElementType = it.second - ) - } + - generateAllUniquePairs(ALL_LOB_TYPES, ALL_LOB_TYPES).flatMap { - createNAryOpInAllCollectionsTest( - leftType = it.first, - rightElementType = it.second - ) - } + - ALL_TYPES_ONLY_COMPARABLE_TO_SELF.flatMap { - createNAryOpInAllCollectionsTest( - leftType = it, - rightElementType = it - ) - } + - listOf( - createNAryOpInTest( - name = "STRING IN unionOf(STRING, INT) LIST", - leftType = STRING, - rightType = ListType(elementType = unionOf(STRING, INT)), - outputType = BOOL - ), - createNAryOpInTest( - name = "STRING IN unionOf(STRING, NULL) LIST", - leftType = STRING, - rightType = ListType(elementType = unionOf(STRING, NULL)), - outputType = unionOf(BOOL, NULL) - ), - createNAryOpInTest( - name = "STRING IN unionOf(STRING, MISSING) LIST", - leftType = STRING, - rightType = ListType(elementType = unionOf(STRING, MISSING)), - outputType = unionOf(BOOL, MISSING) - ), - createNAryOpInTest( - name = "STRING IN unionOf(STRING, MISSING, NULL) LIST", - leftType = STRING, - rightType = ListType(elementType = unionOf(STRING, MISSING, NULL)), - outputType = unionOf(BOOL, MISSING, NULL) - ), - createNAryOpInTest( - name = "STRING IN ANY LIST", - leftType = STRING, - rightType = LIST, - outputType = unionOf(BOOL, MISSING, NULL) - ), - createNAryOpInTest( - name = "STRING IN ANY SEXP", - leftType = STRING, - rightType = SEXP, - outputType = unionOf(BOOL, MISSING, NULL) - ), - createNAryOpInTest( - name = "STRING IN ANY BAG", - leftType = STRING, - rightType = BAG, - outputType = unionOf(BOOL, MISSING, NULL) - ), - createNAryOpInTest( - name = "ANY IN ANY BAG", - leftType = ANY, - rightType = BAG, - outputType = unionOf(BOOL, MISSING, NULL) - ), - createNAryOpInTest( - name = "ANY IN ANY", - leftType = ANY, - rightType = ANY, - outputType = unionOf(BOOL, MISSING, NULL) - ), - createNAryOpInTest( - name = "ANY IN unionOf(ANY BAG, empty STRUCT)", - leftType = ANY, - rightType = unionOf(BAG, STRUCT), - outputType = unionOf(BOOL, NULL, MISSING) - ), - createNAryOpInTest( - name = "ANY IN unionOf(ANY BAG, ANY LIST)", - leftType = ANY, - rightType = unionOf(BAG, LIST), - outputType = unionOf(BOOL, NULL, MISSING) - ), - createNAryOpInTest( - name = "STRING LIST IN STRING LIST LIST", - leftType = ListType(elementType = STRING), - rightType = ListType(elementType = ListType(elementType = STRING)), - outputType = BOOL - ), - createNAryOpInTest( - name = "STRING LIST IN unionOf(STRING LIST LIST, STRING BAG BAG)", - leftType = ListType(elementType = STRING), - rightType = unionOf( - ListType(elementType = ListType(elementType = STRING)), - BagType(elementType = BagType(elementType = STRING))), - outputType = BOOL - ), - createNAryOpInTest( - name = "STRING LIST IN unionOf(STRING LIST LIST, MISSING)", - leftType = ListType(elementType = STRING), - rightType = unionOf(ListType(elementType = ListType(elementType = STRING)), MISSING), - outputType = unionOf(BOOL, MISSING) - ), - createNAryOpInTest( - name = "STRING LIST IN unionOf(STRING LIST LIST, NULL)", - leftType = ListType(elementType = STRING), - rightType = unionOf(ListType(elementType = ListType(elementType = STRING)), NULL), - outputType = unionOf(BOOL, NULL) - ), - createNAryOpInTest( - name = "STRING LIST IN unionOf(STRING LIST LIST, MISSING, NULL)", - leftType = ListType(elementType = STRING), - rightType = unionOf(ListType(elementType = ListType(elementType = STRING)), MISSING, NULL), - outputType = unionOf(BOOL, MISSING, NULL) - ), - // row-value constructor tests - TestCase( - name = "NAry op IN - IN (, , )", - originalSql = "intT IN (intT, nullT, intT)", - globals = mapOf( - "intT" to INT, - "nullT" to NULL + generateAllUniquePairs(ALL_TEXT_TYPES, ALL_TEXT_TYPES).flatMap { + createNAryOpInAllCollectionsTest( + leftType = it.first, + rightElementType = it.second + ) + } + + generateAllUniquePairs(ALL_LOB_TYPES, ALL_LOB_TYPES).flatMap { + createNAryOpInAllCollectionsTest( + leftType = it.first, + rightElementType = it.second + ) + } + + ALL_TYPES_ONLY_COMPARABLE_TO_SELF.flatMap { + createNAryOpInAllCollectionsTest( + leftType = it, + rightElementType = it + ) + } + + listOf( + createNAryOpInTest( + name = "STRING IN unionOf(STRING, INT) LIST", + leftType = STRING, + rightType = ListType(elementType = unionOf(STRING, INT)), + outputType = BOOL ), - handler = expectQueryOutputType(unionOf(BOOL, NULL)) - ), - TestCase( - name = "NAry op IN - IN (, , )", - originalSql = "intT IN (intT, missingT, intT)", - globals = mapOf( - "intT" to INT, - "missingT" to MISSING + createNAryOpInTest( + name = "STRING IN unionOf(STRING, NULL) LIST", + leftType = STRING, + rightType = ListType(elementType = unionOf(STRING, NULL)), + outputType = unionOf(BOOL, NULL) ), - handler = expectQueryOutputType(unionOf(BOOL, MISSING)) - ), - TestCase( - name = "NAry op IN - IN (, , )", - originalSql = "intT IN (intT, missingT, nullT)", - globals = mapOf( - "intT" to INT, - "missingT" to MISSING, - "nullT" to NULL + createNAryOpInTest( + name = "STRING IN unionOf(STRING, MISSING) LIST", + leftType = STRING, + rightType = ListType(elementType = unionOf(STRING, MISSING)), + outputType = unionOf(BOOL, MISSING) ), - handler = expectQueryOutputType(unionOf(BOOL, MISSING, NULL)) - ), - TestCase( - name = "NAry op IN - IN (, , )", - originalSql = "intT IN (intT, nullOrMissingT, intT)", - globals = mapOf( - "intT" to INT, - "nullOrMissingT" to NULL_OR_MISSING + createNAryOpInTest( + name = "STRING IN unionOf(STRING, MISSING, NULL) LIST", + leftType = STRING, + rightType = ListType(elementType = unionOf(STRING, MISSING, NULL)), + outputType = unionOf(BOOL, MISSING, NULL) ), - handler = expectQueryOutputType(unionOf(BOOL, MISSING, NULL)) - ) - ) + - // - // `IN` cases with an error - // - // non-unknown IN non-collection (non-unknown) -> data type mismatch - ALL_NON_UNKNOWN_TYPES.flatMap { nonUnknown -> - ALL_NON_COLLECTION_NON_UNKNOWN_TYPES.map { nonCollection -> - createNAryOpInErrorTest( - name = "$nonUnknown IN $nonCollection - data type mismatch", - leftType = nonUnknown, - rightType = nonCollection, - expectedProblems = listOf( - createDataTypeMismatchError(col = 5, argTypes = listOf(nonUnknown, nonCollection), nAryOp = "IN") - ) - ) - } - } + - // unknown IN non-collection (non-unknown) -> data type mismatch and unknown operand error - ALL_UNKNOWN_TYPES.flatMap { unknown -> - ALL_NON_COLLECTION_NON_UNKNOWN_TYPES.map { nonCollection -> - createNAryOpInErrorTest( - name = "$unknown IN $nonCollection - data type mismatch, unknown error", - leftType = unknown, - rightType = nonCollection, - expectedProblems = listOf( - createDataTypeMismatchError(col = 5, argTypes = listOf(unknown, nonCollection), nAryOp = "IN"), - createReturnsNullOrMissingError(col = 5, nAryOp = "IN") - ) + createNAryOpInTest( + name = "STRING IN ANY LIST", + leftType = STRING, + rightType = LIST, + outputType = unionOf(BOOL, MISSING, NULL) + ), + createNAryOpInTest( + name = "STRING IN ANY SEXP", + leftType = STRING, + rightType = SEXP, + outputType = unionOf(BOOL, MISSING, NULL) + ), + createNAryOpInTest( + name = "STRING IN ANY BAG", + leftType = STRING, + rightType = BAG, + outputType = unionOf(BOOL, MISSING, NULL) + ), + createNAryOpInTest( + name = "ANY IN ANY BAG", + leftType = ANY, + rightType = BAG, + outputType = unionOf(BOOL, MISSING, NULL) + ), + createNAryOpInTest( + name = "ANY IN ANY", + leftType = ANY, + rightType = ANY, + outputType = unionOf(BOOL, MISSING, NULL) + ), + createNAryOpInTest( + name = "ANY IN unionOf(ANY BAG, empty STRUCT)", + leftType = ANY, + rightType = unionOf(BAG, STRUCT), + outputType = unionOf(BOOL, NULL, MISSING) + ), + createNAryOpInTest( + name = "ANY IN unionOf(ANY BAG, ANY LIST)", + leftType = ANY, + rightType = unionOf(BAG, LIST), + outputType = unionOf(BOOL, NULL, MISSING) + ), + createNAryOpInTest( + name = "STRING LIST IN STRING LIST LIST", + leftType = ListType(elementType = STRING), + rightType = ListType(elementType = ListType(elementType = STRING)), + outputType = BOOL + ), + createNAryOpInTest( + name = "STRING LIST IN unionOf(STRING LIST LIST, STRING BAG BAG)", + leftType = ListType(elementType = STRING), + rightType = unionOf( + ListType(elementType = ListType(elementType = STRING)), + BagType(elementType = BagType(elementType = STRING)) + ), + outputType = BOOL + ), + createNAryOpInTest( + name = "STRING LIST IN unionOf(STRING LIST LIST, MISSING)", + leftType = ListType(elementType = STRING), + rightType = unionOf(ListType(elementType = ListType(elementType = STRING)), MISSING), + outputType = unionOf(BOOL, MISSING) + ), + createNAryOpInTest( + name = "STRING LIST IN unionOf(STRING LIST LIST, NULL)", + leftType = ListType(elementType = STRING), + rightType = unionOf(ListType(elementType = ListType(elementType = STRING)), NULL), + outputType = unionOf(BOOL, NULL) + ), + createNAryOpInTest( + name = "STRING LIST IN unionOf(STRING LIST LIST, MISSING, NULL)", + leftType = ListType(elementType = STRING), + rightType = unionOf(ListType(elementType = ListType(elementType = STRING)), MISSING, NULL), + outputType = unionOf(BOOL, MISSING, NULL) + ), + // row-value constructor tests + TestCase( + name = "NAry op IN - IN (, , )", + originalSql = "intT IN (intT, nullT, intT)", + globals = mapOf( + "intT" to INT, + "nullT" to NULL + ), + handler = expectQueryOutputType(unionOf(BOOL, NULL)) + ), + TestCase( + name = "NAry op IN - IN (, , )", + originalSql = "intT IN (intT, missingT, intT)", + globals = mapOf( + "intT" to INT, + "missingT" to MISSING + ), + handler = expectQueryOutputType(unionOf(BOOL, MISSING)) + ), + TestCase( + name = "NAry op IN - IN (, , )", + originalSql = "intT IN (intT, missingT, nullT)", + globals = mapOf( + "intT" to INT, + "missingT" to MISSING, + "nullT" to NULL + ), + handler = expectQueryOutputType(unionOf(BOOL, MISSING, NULL)) + ), + TestCase( + name = "NAry op IN - IN (, , )", + originalSql = "intT IN (intT, nullOrMissingT, intT)", + globals = mapOf( + "intT" to INT, + "nullOrMissingT" to NULL_OR_MISSING + ), + handler = expectQueryOutputType(unionOf(BOOL, MISSING, NULL)) ) - } - } + - // numeric IN collection(non-numeric) -> data type mismatch - createNAryOpInErrorIncomparableElementTests(ALL_NUMERIC_TYPES, ALL_NON_NUMERIC_NON_UNKNOWN_TYPES) + - // text IN collection(non-text) -> data type mismatch - createNAryOpInErrorIncomparableElementTests(ALL_TEXT_TYPES, ALL_NON_TEXT_NON_UNKNOWN_TYPES) + - // lob IN collection(non-lob) -> data type mismatch - createNAryOpInErrorIncomparableElementTests(ALL_LOB_TYPES, ALL_NON_LOB_NON_UNKNOWN_TYPES) + - // type-only-comparable-to-self IN collection(other type) -> data type mismatch - ALL_TYPES_ONLY_COMPARABLE_TO_SELF.flatMap { type -> - createNAryOpInErrorIncomparableElementTests(listOf(type), ALL_NON_UNKNOWN_TYPES.filter { it != type }) - } + - // unknown IN collection(type) -> unknown operand error - ALL_UNKNOWN_TYPES.flatMap { unknownType -> - ALL_TYPES.flatMap { type -> - listOf( + ) + + // + // `IN` cases with an error + // + // non-unknown IN non-collection (non-unknown) -> data type mismatch + ALL_NON_UNKNOWN_TYPES.flatMap { nonUnknown -> + ALL_NON_COLLECTION_NON_UNKNOWN_TYPES.map { nonCollection -> createNAryOpInErrorTest( - name = "$unknownType IN list($type) - unknown error", - leftType = unknownType, - rightType = ListType(elementType = type), + name = "$nonUnknown IN $nonCollection - data type mismatch", + leftType = nonUnknown, + rightType = nonCollection, expectedProblems = listOf( - createReturnsNullOrMissingError(col = 5, nAryOp = "IN") + createDataTypeMismatchError(col = 5, argTypes = listOf(nonUnknown, nonCollection), nAryOp = "IN") ) - ), + ) + } + } + + // unknown IN non-collection (non-unknown) -> data type mismatch and unknown operand error + ALL_UNKNOWN_TYPES.flatMap { unknown -> + ALL_NON_COLLECTION_NON_UNKNOWN_TYPES.map { nonCollection -> createNAryOpInErrorTest( - name = "$unknownType IN bag($type) - unknown error", - leftType = unknownType, - rightType = BagType(elementType = type), + name = "$unknown IN $nonCollection - data type mismatch, unknown error", + leftType = unknown, + rightType = nonCollection, expectedProblems = listOf( + createDataTypeMismatchError(col = 5, argTypes = listOf(unknown, nonCollection), nAryOp = "IN"), createReturnsNullOrMissingError(col = 5, nAryOp = "IN") ) - ), + ) + } + } + + // numeric IN collection(non-numeric) -> data type mismatch + createNAryOpInErrorIncomparableElementTests(ALL_NUMERIC_TYPES, ALL_NON_NUMERIC_NON_UNKNOWN_TYPES) + + // text IN collection(non-text) -> data type mismatch + createNAryOpInErrorIncomparableElementTests(ALL_TEXT_TYPES, ALL_NON_TEXT_NON_UNKNOWN_TYPES) + + // lob IN collection(non-lob) -> data type mismatch + createNAryOpInErrorIncomparableElementTests(ALL_LOB_TYPES, ALL_NON_LOB_NON_UNKNOWN_TYPES) + + // type-only-comparable-to-self IN collection(other type) -> data type mismatch + ALL_TYPES_ONLY_COMPARABLE_TO_SELF.flatMap { type -> + createNAryOpInErrorIncomparableElementTests(listOf(type), ALL_NON_UNKNOWN_TYPES.filter { it != type }) + } + + // unknown IN collection(type) -> unknown operand error + ALL_UNKNOWN_TYPES.flatMap { unknownType -> + ALL_TYPES.flatMap { type -> + listOf( + createNAryOpInErrorTest( + name = "$unknownType IN list($type) - unknown error", + leftType = unknownType, + rightType = ListType(elementType = type), + expectedProblems = listOf( + createReturnsNullOrMissingError(col = 5, nAryOp = "IN") + ) + ), + createNAryOpInErrorTest( + name = "$unknownType IN bag($type) - unknown error", + leftType = unknownType, + rightType = BagType(elementType = type), + expectedProblems = listOf( + createReturnsNullOrMissingError(col = 5, nAryOp = "IN") + ) + ), + createNAryOpInErrorTest( + name = "$unknownType IN sexp($type) - unknown error", + leftType = unknownType, + rightType = SexpType(elementType = type), + expectedProblems = listOf( + createReturnsNullOrMissingError(col = 5, nAryOp = "IN") + ) + ) + ) + } + } + + // type IN unknown -> unknown operand error + ALL_TYPES.flatMap { type -> + ALL_UNKNOWN_TYPES.map { unknownType -> createNAryOpInErrorTest( - name = "$unknownType IN sexp($type) - unknown error", - leftType = unknownType, - rightType = SexpType(elementType = type), + name = "$type IN $unknownType - unknown error", + leftType = type, + rightType = unknownType, expectedProblems = listOf( createReturnsNullOrMissingError(col = 5, nAryOp = "IN") ) ) - ) - } - } + - // type IN unknown -> unknown operand error - ALL_TYPES.flatMap { type -> - ALL_UNKNOWN_TYPES.map { unknownType -> + } + } + + // other tests resulting in an error + listOf( + createNAryOpInErrorTest( + name = "ANY IN INT - data type mismatch", + leftType = ANY, + rightType = INT, + expectedProblems = listOf( + createDataTypeMismatchError(col = 5, argTypes = listOf(ANY, INT), nAryOp = "IN") + ) + ), + createNAryOpInErrorTest( + name = "ANY IN unionOf(INT, empty struct) - data type mismatch", + leftType = ANY, + rightType = unionOf(INT, STRUCT), + expectedProblems = listOf( + createDataTypeMismatchError(col = 5, argTypes = listOf(ANY, unionOf(INT, STRUCT)), nAryOp = "IN") + ) + ), createNAryOpInErrorTest( - name = "$type IN $unknownType - unknown error", - leftType = type, - rightType = unknownType, + name = "ANY IN NULL - unknown error", + leftType = ANY, + rightType = NULL, expectedProblems = listOf( createReturnsNullOrMissingError(col = 5, nAryOp = "IN") ) ) - } - } + - // other tests resulting in an error - listOf( - createNAryOpInErrorTest( - name = "ANY IN INT - data type mismatch", - leftType = ANY, - rightType = INT, - expectedProblems = listOf( - createDataTypeMismatchError(col = 5, argTypes = listOf(ANY, INT), nAryOp = "IN") - ) - ), - createNAryOpInErrorTest( - name = "ANY IN unionOf(INT, empty struct) - data type mismatch", - leftType = ANY, - rightType = unionOf(INT, STRUCT), - expectedProblems = listOf( - createDataTypeMismatchError(col = 5, argTypes = listOf(ANY, unionOf(INT, STRUCT)), nAryOp = "IN") - ) - ), - createNAryOpInErrorTest( - name = "ANY IN NULL - unknown error", - leftType = ANY, - rightType = NULL, - expectedProblems = listOf( - createReturnsNullOrMissingError(col = 5, nAryOp = "IN") - ) ) - ) /** * Creates a test of the form: NULLIF([leftType], [rightType]) and expects an output type of [leftType] with @@ -4218,179 +4355,178 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { rightType = it.second ) } + - // NULLIF(, ) - generateAllUniquePairs(ALL_TEXT_TYPES, ALL_TEXT_TYPES).map { - createValidNullIfTest( - leftType = it.first, - rightType = it.second - ) - } + - // NULLIF(, ) - generateAllUniquePairs(ALL_LOB_TYPES, ALL_LOB_TYPES).map { - createValidNullIfTest( - leftType = it.first, - rightType = it.second - ) - } + - // `NULLIF` with types only comparable to self - ALL_TYPES_ONLY_COMPARABLE_TO_SELF.map { - createValidNullIfTest( - leftType = it, - rightType = it - ) - } + - // other valid `NULLIF` tests - listOf( - createValidNullIfTest( - leftType = ANY, - rightType = STRING - ), - createValidNullIfTest( - leftType = unionOf(STRING, INT), - rightType = STRING - ), - createValidNullIfTest( - leftType = unionOf(STRING, INT), - rightType = unionOf(INT8, FLOAT, SYMBOL) - ), - createValidNullIfTest( - leftType = INT.asNullable(), - rightType = INT.asOptional() - ), - createValidNullIfTest( - leftType = INT.asNullable(), - rightType = FLOAT.asOptional() - ) - ) + - // - // `NULLIF` error cases below - // - - // NULLIF with a numeric and non-numeric, non-unknown -> data type mismatch - ALL_NUMERIC_TYPES.flatMap { numericType -> - ALL_NON_NUMERIC_NON_UNKNOWN_TYPES.map { nonNumericType -> - createErrorNullIfTest( - name = "data type mismatch - NULLIF($numericType, $nonNumericType)", - leftType = numericType, - rightType = nonNumericType, - expectedProblems = listOf( - createDataTypeMismatchError(col = 1, argTypes = listOf(numericType, nonNumericType), nAryOp = "NULLIF") - ) - ) - } - } + - // NULLIF with a text and non-text, non-unknown -> data type mismatch - ALL_TEXT_TYPES.flatMap { textType -> - ALL_NON_TEXT_NON_UNKNOWN_TYPES.map { nonTextType -> - createErrorNullIfTest( - name = "data type mismatch - NULLIF($textType, $nonTextType)", - leftType = textType, - rightType = nonTextType, - expectedProblems = listOf( - createDataTypeMismatchError(col = 1, argTypes = listOf(textType, nonTextType), nAryOp = "NULLIF") - ) + // NULLIF(, ) + generateAllUniquePairs(ALL_TEXT_TYPES, ALL_TEXT_TYPES).map { + createValidNullIfTest( + leftType = it.first, + rightType = it.second ) - } - } + - // NULLIF with a lob and non-lob, non-unknown -> data type mismatch - ALL_LOB_TYPES.flatMap { lobType -> - ALL_NON_LOB_NON_UNKNOWN_TYPES.map { nonLobType -> - createErrorNullIfTest( - name = "data type mismatch - NULLIF($lobType, $nonLobType)", - leftType = lobType, - rightType = nonLobType, - expectedProblems = listOf( - createDataTypeMismatchError(col = 1, argTypes = listOf(lobType, nonLobType), nAryOp = "NULLIF") - ) + } + + // NULLIF(, ) + generateAllUniquePairs(ALL_LOB_TYPES, ALL_LOB_TYPES).map { + createValidNullIfTest( + leftType = it.first, + rightType = it.second ) - } - } + - // NULLIF with a type only comparable to itself and other non-unknown type -> data type mismatch - ALL_TYPES_ONLY_COMPARABLE_TO_SELF.flatMap { type -> - ALL_NON_UNKNOWN_TYPES.filter { it != type }.map { incomparableToType -> - createErrorNullIfTest( - name = "data type mismatch - NULLIF($type, $incomparableToType)", - leftType = type, - rightType = incomparableToType, - expectedProblems = listOf( - createDataTypeMismatchError(col = 1, argTypes = listOf(type, incomparableToType), nAryOp = "NULLIF") - ) + } + + // `NULLIF` with types only comparable to self + ALL_TYPES_ONLY_COMPARABLE_TO_SELF.map { + createValidNullIfTest( + leftType = it, + rightType = it ) - } - } + - // NULLIF with a type and unknown -> null or missing error - generateAllUniquePairs(ALL_TYPES, ALL_UNKNOWN_TYPES).map { - createErrorNullIfTest( - name = "null or missing error - ${it.first}, ${it.second}", - leftType = it.first, - rightType = it.second, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 1, nAryOp = "NULLIF")) - ) - } + - // other miscellaneous error tests - listOf( - createErrorNullIfTest( - name = "data type mismatch - NULLIF(nullable int, string)", - leftType = INT.asNullable(), - rightType = STRING, - expectedProblems = listOf( - createDataTypeMismatchError(col = 1, argTypes = listOf(INT.asNullable(), STRING), nAryOp = "NULLIF") - ) - ), - createErrorNullIfTest( - name = "data type mismatch - NULLIF(optional int, string)", - leftType = INT.asOptional(), - rightType = STRING, - expectedProblems = listOf( - createDataTypeMismatchError(col = 1, argTypes = listOf(INT.asOptional(), STRING), nAryOp = "NULLIF") - ) - ), - createErrorNullIfTest( - name = "data type mismatch - NULLIF(nullable int, nullable string)", - leftType = INT.asNullable(), - rightType = STRING.asNullable(), - expectedProblems = listOf( - createDataTypeMismatchError(col = 1, argTypes = listOf(INT.asNullable(), STRING.asNullable()), nAryOp = "NULLIF") - ) - ), - createErrorNullIfTest( - name = "data type mismatch - NULLIF(union(string, int), bool)", - leftType = unionOf(STRING, INT), - rightType = BOOL, - expectedProblems = listOf( - createDataTypeMismatchError(col = 1, argTypes = listOf(unionOf(STRING, INT), BOOL), nAryOp = "NULLIF") - ) - ), - createErrorNullIfTest( - name = "data type mismatch - NULLIF(union(string, int), union(bag, list))", - leftType = unionOf(STRING, INT), - rightType = unionOf(BAG, LIST), - expectedProblems = listOf( - createDataTypeMismatchError(col = 1, argTypes = listOf(unionOf(STRING, INT), unionOf(BAG, LIST)), nAryOp = "NULLIF") + } + + // other valid `NULLIF` tests + listOf( + createValidNullIfTest( + leftType = ANY, + rightType = STRING + ), + createValidNullIfTest( + leftType = unionOf(STRING, INT), + rightType = STRING + ), + createValidNullIfTest( + leftType = unionOf(STRING, INT), + rightType = unionOf(INT8, FLOAT, SYMBOL) + ), + createValidNullIfTest( + leftType = INT.asNullable(), + rightType = INT.asOptional() + ), + createValidNullIfTest( + leftType = INT.asNullable(), + rightType = FLOAT.asOptional() ) - ), - createErrorNullIfTest( - name = "null or missing error - NULLIF(missing, optional int)", - leftType = MISSING, - rightType = INT.asOptional(), - expectedProblems = listOf( - createReturnsNullOrMissingError(col = 1, nAryOp = "NULLIF") + ) + + // + // `NULLIF` error cases below + // + + // NULLIF with a numeric and non-numeric, non-unknown -> data type mismatch + ALL_NUMERIC_TYPES.flatMap { numericType -> + ALL_NON_NUMERIC_NON_UNKNOWN_TYPES.map { nonNumericType -> + createErrorNullIfTest( + name = "data type mismatch - NULLIF($numericType, $nonNumericType)", + leftType = numericType, + rightType = nonNumericType, + expectedProblems = listOf( + createDataTypeMismatchError(col = 1, argTypes = listOf(numericType, nonNumericType), nAryOp = "NULLIF") + ) + ) + } + } + + // NULLIF with a text and non-text, non-unknown -> data type mismatch + ALL_TEXT_TYPES.flatMap { textType -> + ALL_NON_TEXT_NON_UNKNOWN_TYPES.map { nonTextType -> + createErrorNullIfTest( + name = "data type mismatch - NULLIF($textType, $nonTextType)", + leftType = textType, + rightType = nonTextType, + expectedProblems = listOf( + createDataTypeMismatchError(col = 1, argTypes = listOf(textType, nonTextType), nAryOp = "NULLIF") + ) + ) + } + } + + // NULLIF with a lob and non-lob, non-unknown -> data type mismatch + ALL_LOB_TYPES.flatMap { lobType -> + ALL_NON_LOB_NON_UNKNOWN_TYPES.map { nonLobType -> + createErrorNullIfTest( + name = "data type mismatch - NULLIF($lobType, $nonLobType)", + leftType = lobType, + rightType = nonLobType, + expectedProblems = listOf( + createDataTypeMismatchError(col = 1, argTypes = listOf(lobType, nonLobType), nAryOp = "NULLIF") + ) + ) + } + } + + // NULLIF with a type only comparable to itself and other non-unknown type -> data type mismatch + ALL_TYPES_ONLY_COMPARABLE_TO_SELF.flatMap { type -> + ALL_NON_UNKNOWN_TYPES.filter { it != type }.map { incomparableToType -> + createErrorNullIfTest( + name = "data type mismatch - NULLIF($type, $incomparableToType)", + leftType = type, + rightType = incomparableToType, + expectedProblems = listOf( + createDataTypeMismatchError(col = 1, argTypes = listOf(type, incomparableToType), nAryOp = "NULLIF") + ) + ) + } + } + + // NULLIF with a type and unknown -> null or missing error + generateAllUniquePairs(ALL_TYPES, ALL_UNKNOWN_TYPES).map { + createErrorNullIfTest( + name = "null or missing error - ${it.first}, ${it.second}", + leftType = it.first, + rightType = it.second, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 1, nAryOp = "NULLIF")) ) - ), - createErrorNullIfTest( - name = "null or missing error - NULLIF(any, null or missing)", - leftType = ANY, - rightType = NULL_OR_MISSING, - expectedProblems = listOf( - createReturnsNullOrMissingError(col = 1, nAryOp = "NULLIF") + } + + // other miscellaneous error tests + listOf( + createErrorNullIfTest( + name = "data type mismatch - NULLIF(nullable int, string)", + leftType = INT.asNullable(), + rightType = STRING, + expectedProblems = listOf( + createDataTypeMismatchError(col = 1, argTypes = listOf(INT.asNullable(), STRING), nAryOp = "NULLIF") + ) + ), + createErrorNullIfTest( + name = "data type mismatch - NULLIF(optional int, string)", + leftType = INT.asOptional(), + rightType = STRING, + expectedProblems = listOf( + createDataTypeMismatchError(col = 1, argTypes = listOf(INT.asOptional(), STRING), nAryOp = "NULLIF") + ) + ), + createErrorNullIfTest( + name = "data type mismatch - NULLIF(nullable int, nullable string)", + leftType = INT.asNullable(), + rightType = STRING.asNullable(), + expectedProblems = listOf( + createDataTypeMismatchError(col = 1, argTypes = listOf(INT.asNullable(), STRING.asNullable()), nAryOp = "NULLIF") + ) + ), + createErrorNullIfTest( + name = "data type mismatch - NULLIF(union(string, int), bool)", + leftType = unionOf(STRING, INT), + rightType = BOOL, + expectedProblems = listOf( + createDataTypeMismatchError(col = 1, argTypes = listOf(unionOf(STRING, INT), BOOL), nAryOp = "NULLIF") + ) + ), + createErrorNullIfTest( + name = "data type mismatch - NULLIF(union(string, int), union(bag, list))", + leftType = unionOf(STRING, INT), + rightType = unionOf(BAG, LIST), + expectedProblems = listOf( + createDataTypeMismatchError(col = 1, argTypes = listOf(unionOf(STRING, INT), unionOf(BAG, LIST)), nAryOp = "NULLIF") + ) + ), + createErrorNullIfTest( + name = "null or missing error - NULLIF(missing, optional int)", + leftType = MISSING, + rightType = INT.asOptional(), + expectedProblems = listOf( + createReturnsNullOrMissingError(col = 1, nAryOp = "NULLIF") + ) + ), + createErrorNullIfTest( + name = "null or missing error - NULLIF(any, null or missing)", + leftType = ANY, + rightType = NULL_OR_MISSING, + expectedProblems = listOf( + createReturnsNullOrMissingError(col = 1, nAryOp = "NULLIF") + ) ) ) - ) - @JvmStatic @Suppress("unused") - fun parametersForStructTests () = listOf( + fun parametersForStructTests() = listOf( TestCase( "struct -- no fields", "{ }", @@ -4404,7 +4540,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { mapOf( "a" to INT, "b" to BOOL, - "c" to STRING), + "c" to STRING + ), contentClosed = true ) ) @@ -4579,13 +4716,16 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { "a_string" to StringType( StringType.StringLengthConstraint.Constrained( NumberConstraint.UpTo(10) - )) + ) + ) ), handler = expectQueryOutputType( StringType( StringType.StringLengthConstraint.Constrained( NumberConstraint.UpTo(10) - ))) + ) + ) + ) ), TestCase( name = "CAST to CHAR", @@ -4594,13 +4734,16 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { "a_string" to StringType( StringType.StringLengthConstraint.Constrained( NumberConstraint.Equals(1) - )) + ) + ) ), handler = expectQueryOutputType( StringType( StringType.StringLengthConstraint.Constrained( NumberConstraint.Equals(1) - ))) + ) + ) + ) ), TestCase( name = "CAST to CHAR(x)", @@ -4609,13 +4752,16 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { "a_string" to StringType( StringType.StringLengthConstraint.Constrained( NumberConstraint.Equals(10) - )) + ) + ) ), handler = expectQueryOutputType( StringType( StringType.StringLengthConstraint.Constrained( NumberConstraint.Equals(10) - ))) + ) + ) + ) ), TestCase( name = "CAST to DECIMAL", @@ -4760,7 +4906,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { globals = mapOf("a" to STRING), handler = expectSemanticProblems( expectedProblems = listOf( - Problem(SourceLocationMeta(1L, 1L, 4L), + Problem( + SourceLocationMeta(1L, 1L, 4L), SemanticProblemDetails.IncorrectNumberOfArgumentsToFunctionCall( functionName = "size", expectedArity = 1..1, @@ -4781,7 +4928,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { ), handler = expectSemanticProblems( expectedProblems = listOf( - Problem(SourceLocationMeta(1L, 1L, 4L), + Problem( + SourceLocationMeta(1L, 1L, 4L), SemanticProblemDetails.IncorrectNumberOfArgumentsToFunctionCall( functionName = "size", expectedArity = 1..1, @@ -4942,466 +5090,466 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { op = arithmeticOp ) } + - // concat will return string in the event of an error - createBinaryOpContinuationTypeTest( - goodType = STRING, - badType = INT, - expectedContinuationType = STRING, - op = "||" - ) + - // LIKE will return bool in the event of an error - createBinaryOpContinuationTypeTest( - goodType = STRING, - badType = INT, - expectedContinuationType = BOOL, - op = "LIKE" - ) + - // logical ops will return bool in the event of an error - OpType.LOGICAL.operators.flatMap { logicalOp -> - createBinaryOpContinuationTypeTest( - goodType = BOOL, - badType = STRING, - expectedContinuationType = BOOL, - op = logicalOp - ) - } + - // comparison ops will return bool in the event of an error - OpType.COMPARISON.operators.flatMap { logicalOp -> + // concat will return string in the event of an error createBinaryOpContinuationTypeTest( - goodType = INT, - badType = STRING, - expectedContinuationType = BOOL, - op = logicalOp - ) - } + - // equality ops will return bool in the event of an error - OpType.EQUALITY.operators.flatMap { logicalOp -> + goodType = STRING, + badType = INT, + expectedContinuationType = STRING, + op = "||" + ) + + // LIKE will return bool in the event of an error createBinaryOpContinuationTypeTest( - goodType = INT, - badType = STRING, + goodType = STRING, + badType = INT, expectedContinuationType = BOOL, - op = logicalOp - ) - } + - // unary arithmetic op tests - continuation type of numeric - listOf("+", "-").flatMap { op -> + op = "LIKE" + ) + + // logical ops will return bool in the event of an error + OpType.LOGICAL.operators.flatMap { logicalOp -> + createBinaryOpContinuationTypeTest( + goodType = BOOL, + badType = STRING, + expectedContinuationType = BOOL, + op = logicalOp + ) + } + + // comparison ops will return bool in the event of an error + OpType.COMPARISON.operators.flatMap { logicalOp -> + createBinaryOpContinuationTypeTest( + goodType = INT, + badType = STRING, + expectedContinuationType = BOOL, + op = logicalOp + ) + } + + // equality ops will return bool in the event of an error + OpType.EQUALITY.operators.flatMap { logicalOp -> + createBinaryOpContinuationTypeTest( + goodType = INT, + badType = STRING, + expectedContinuationType = BOOL, + op = logicalOp + ) + } + + // unary arithmetic op tests - continuation type of numeric + listOf("+", "-").flatMap { op -> + listOf( + TestCase( + name = "data type mismatch error: $op string -> union of numerics", + originalSql = "$op badT", + globals = mapOf("badT" to STRING), + handler = expectQueryOutputTypeAndProblems( + expectedType = unionOf(ALL_NUMERIC_TYPES.toSet()), + expectedProblems = listOf(createDataTypeMismatchError(col = 1, argTypes = listOf(STRING), nAryOp = op)) + ) + ), + TestCase( + name = "null or missing error: $op string -> union of numerics", + originalSql = "$op nullT", + globals = mapOf("nullT" to NULL), + handler = expectQueryOutputTypeAndProblems( + expectedType = unionOf(ALL_NUMERIC_TYPES.toSet()), + expectedProblems = listOf(createReturnsNullOrMissingError(col = 1, nAryOp = op)) + ) + ) + ) + } + + // LIKE tests with bad ESCAPE type - continuation type of bool listOf( TestCase( - name = "data type mismatch error: $op string -> union of numerics", - originalSql = "$op badT", - globals = mapOf("badT" to STRING), + name = "data type mismatch error: string LIKE string ESCAPE int -> bool", + originalSql = "goodT LIKE goodT ESCAPE badT", + globals = mapOf( + "goodT" to STRING, + "badT" to INT + ), handler = expectQueryOutputTypeAndProblems( - expectedType = unionOf(ALL_NUMERIC_TYPES.toSet()), - expectedProblems = listOf(createDataTypeMismatchError(col = 1, argTypes = listOf(STRING), nAryOp = op)) + expectedType = BOOL, + expectedProblems = listOf(createDataTypeMismatchError(col = 7, argTypes = listOf(STRING, STRING, INT), nAryOp = "LIKE")) ) ), TestCase( - name = "null or missing error: $op string -> union of numerics", - originalSql = "$op nullT", - globals = mapOf("nullT" to NULL), + name = "null or missing error: string LIKE string ESCAPE null -> bool", + originalSql = "goodT LIKE goodT ESCAPE badT", + globals = mapOf( + "goodT" to STRING, + "badT" to NULL + ), handler = expectQueryOutputTypeAndProblems( - expectedType = unionOf(ALL_NUMERIC_TYPES.toSet()), - expectedProblems = listOf(createReturnsNullOrMissingError(col = 1, nAryOp = op)) + expectedType = BOOL, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 7, nAryOp = "LIKE")) ) - ) - ) - } + - // LIKE tests with bad ESCAPE type - continuation type of bool - listOf( - TestCase( - name = "data type mismatch error: string LIKE string ESCAPE int -> bool", - originalSql = "goodT LIKE goodT ESCAPE badT", - globals = mapOf( - "goodT" to STRING, - "badT" to INT - ), - handler = expectQueryOutputTypeAndProblems( - expectedType = BOOL, - expectedProblems = listOf(createDataTypeMismatchError(col = 7, argTypes = listOf(STRING, STRING, INT), nAryOp = "LIKE")) - ) - ), - TestCase( - name = "null or missing error: string LIKE string ESCAPE null -> bool", - originalSql = "goodT LIKE goodT ESCAPE badT", - globals = mapOf( - "goodT" to STRING, - "badT" to NULL - ), - handler = expectQueryOutputTypeAndProblems( - expectedType = BOOL, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 7, nAryOp = "LIKE")) - ) - ), - ) + - // logical `NOT` with non-bool - continuation type of bool - listOf( - TestCase( - name = "data type mismatch error: NOT string -> bool", - originalSql = "NOT badT", - globals = mapOf("badT" to STRING), - handler = expectQueryOutputTypeAndProblems( - expectedType = BOOL, - expectedProblems = listOf(createDataTypeMismatchError(col = 1, argTypes = listOf(STRING), nAryOp = "NOT")) - ) - ), - TestCase( - name = "null or missing error: NOT null -> bool", - originalSql = "NOT nullT", - globals = mapOf("nullT" to NULL), - handler = expectQueryOutputTypeAndProblems( - expectedType = BOOL, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 1, nAryOp = "NOT")) - ) - ) - ) + - // `BETWEEN` op tests - continuation type of bool - listOf( - TestCase( - name = "data type mismatch error: int BETWEEN string AND string", - originalSql = "goodT BETWEEN badT AND badT", - globals = mapOf( - "goodT" to INT, - "badT" to STRING - ), - handler = expectQueryOutputTypeAndProblems( - expectedType = BOOL, - expectedProblems = listOf(createDataTypeMismatchError(col = 7, argTypes = listOf(INT, STRING, STRING), nAryOp = "BETWEEN")) - ) - ), - TestCase( - name = "null or missing error: null BETWEEN int AND int", - originalSql = "nullT BETWEEN goodT AND goodT", - globals = mapOf( - "nullT" to NULL, - "goodT" to INT - ), - handler = expectQueryOutputTypeAndProblems( - expectedType = BOOL, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 7, nAryOp = "BETWEEN")) - ) - ) - ) + - // `IN` op tests - continuation type of bool - listOf( - TestCase( - name = "data type mismatch error: int IN int", - originalSql = "lhs IN rhs", - globals = mapOf( - "lhs" to INT, - "rhs" to INT - ), - handler = expectQueryOutputTypeAndProblems( - expectedType = BOOL, - expectedProblems = listOf(createDataTypeMismatchError(col = 5, argTypes = listOf(INT, INT), nAryOp = "IN")) - ) - ), - TestCase( - name = "data type mismatch error (incomparable rhs element type): int IN list(string)", - originalSql = "lhs IN rhs", - globals = mapOf( - "lhs" to INT, - "rhs" to ListType(elementType = STRING) - ), - handler = expectQueryOutputTypeAndProblems( - expectedType = BOOL, - expectedProblems = listOf(createDataTypeMismatchError(col = 5, argTypes = listOf(INT, ListType(STRING)), nAryOp = "IN")) - ) - ), - TestCase( - name = "null or missing error: null IN list(string)", - originalSql = "nullT IN rhs", - globals = mapOf( - "nullT" to NULL, - "rhs" to ListType(elementType = STRING) ), - handler = expectQueryOutputTypeAndProblems( - expectedType = BOOL, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 7, nAryOp = "IN")) - ) - ), - TestCase( - name = "null or missing error: int IN null", - originalSql = "lhs IN nullT", - globals = mapOf( - "lhs" to INT, - "nullT" to NULL + ) + + // logical `NOT` with non-bool - continuation type of bool + listOf( + TestCase( + name = "data type mismatch error: NOT string -> bool", + originalSql = "NOT badT", + globals = mapOf("badT" to STRING), + handler = expectQueryOutputTypeAndProblems( + expectedType = BOOL, + expectedProblems = listOf(createDataTypeMismatchError(col = 1, argTypes = listOf(STRING), nAryOp = "NOT")) + ) ), - handler = expectQueryOutputTypeAndProblems( - expectedType = BOOL, - expectedProblems = listOf(createReturnsNullOrMissingError(col = 5, nAryOp = "IN")) + TestCase( + name = "null or missing error: NOT null -> bool", + originalSql = "NOT nullT", + globals = mapOf("nullT" to NULL), + handler = expectQueryOutputTypeAndProblems( + expectedType = BOOL, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 1, nAryOp = "NOT")) + ) ) - ) - ) + - // `NULLIF` op tests - continuation type of left argument types and null - listOf( - TestCase( - name = "data type mismatch error: NULLIF(union(INT, FLOAT), STRING)", - originalSql = "NULLIF(lhs, rhs)", - globals = mapOf( - "lhs" to unionOf(INT, FLOAT), - "rhs" to STRING + ) + + // `BETWEEN` op tests - continuation type of bool + listOf( + TestCase( + name = "data type mismatch error: int BETWEEN string AND string", + originalSql = "goodT BETWEEN badT AND badT", + globals = mapOf( + "goodT" to INT, + "badT" to STRING + ), + handler = expectQueryOutputTypeAndProblems( + expectedType = BOOL, + expectedProblems = listOf(createDataTypeMismatchError(col = 7, argTypes = listOf(INT, STRING, STRING), nAryOp = "BETWEEN")) + ) ), - handler = expectQueryOutputTypeAndProblems( - expectedType = unionOf(INT, FLOAT, NULL), - expectedProblems = listOf(createDataTypeMismatchError(col = 1, argTypes = listOf(unionOf(INT, FLOAT), STRING), nAryOp = "NULLIF")) + TestCase( + name = "null or missing error: null BETWEEN int AND int", + originalSql = "nullT BETWEEN goodT AND goodT", + globals = mapOf( + "nullT" to NULL, + "goodT" to INT + ), + handler = expectQueryOutputTypeAndProblems( + expectedType = BOOL, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 7, nAryOp = "BETWEEN")) + ) ) - ), - TestCase( - name = "null or missing error: NULLIF(union(INT, FLOAT), MISSING)", - originalSql = "NULLIF(lhs, rhs)", - globals = mapOf( - "lhs" to unionOf(INT, FLOAT), - "rhs" to MISSING + ) + + // `IN` op tests - continuation type of bool + listOf( + TestCase( + name = "data type mismatch error: int IN int", + originalSql = "lhs IN rhs", + globals = mapOf( + "lhs" to INT, + "rhs" to INT + ), + handler = expectQueryOutputTypeAndProblems( + expectedType = BOOL, + expectedProblems = listOf(createDataTypeMismatchError(col = 5, argTypes = listOf(INT, INT), nAryOp = "IN")) + ) ), - handler = expectQueryOutputTypeAndProblems( - expectedType = unionOf(INT, FLOAT, NULL), - expectedProblems = listOf(createReturnsNullOrMissingError(col = 1, nAryOp = "NULLIF")) - ) - ), - TestCase( - name = "null or missing error: NULLIF(MISSING, union(INT, FLOAT))", - originalSql = "NULLIF(lhs, rhs)", - globals = mapOf( - "lhs" to MISSING, - "rhs" to unionOf(INT, FLOAT) + TestCase( + name = "data type mismatch error (incomparable rhs element type): int IN list(string)", + originalSql = "lhs IN rhs", + globals = mapOf( + "lhs" to INT, + "rhs" to ListType(elementType = STRING) + ), + handler = expectQueryOutputTypeAndProblems( + expectedType = BOOL, + expectedProblems = listOf(createDataTypeMismatchError(col = 5, argTypes = listOf(INT, ListType(STRING)), nAryOp = "IN")) + ) ), - handler = expectQueryOutputTypeAndProblems( - expectedType = unionOf(MISSING, NULL), - expectedProblems = listOf(createReturnsNullOrMissingError(col = 1, nAryOp = "NULLIF")) - ) - ) - ) + - // SimpleCaseWhen should include all `THEN` expression types in the case of error. If no `ELSE` branch is - // included, then will also include `NULL` in the output types - listOf( - TestCase( - name = "data type mismatch error: CASE WHEN THEN WHEN THEN END", - originalSql = "CASE t_int WHEN t_string THEN t_string WHEN t_symbol THEN t_symbol END", - globals = mapOf( - "t_int" to INT, - "t_string" to STRING, - "t_symbol" to SYMBOL + TestCase( + name = "null or missing error: null IN list(string)", + originalSql = "nullT IN rhs", + globals = mapOf( + "nullT" to NULL, + "rhs" to ListType(elementType = STRING) + ), + handler = expectQueryOutputTypeAndProblems( + expectedType = BOOL, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 7, nAryOp = "IN")) + ) ), - handler = expectQueryOutputTypeAndProblems( - expectedType = unionOf(STRING, SYMBOL, NULL), - expectedProblems = listOf( - createDataTypeMismatchError(SourceLocationMeta(1L, 17L, 8L), argTypes = listOf(INT, STRING), nAryOp = "CASE"), - createDataTypeMismatchError(SourceLocationMeta(1L, 45L, 8L), argTypes = listOf(INT, SYMBOL), nAryOp = "CASE") + TestCase( + name = "null or missing error: int IN null", + originalSql = "lhs IN nullT", + globals = mapOf( + "lhs" to INT, + "nullT" to NULL + ), + handler = expectQueryOutputTypeAndProblems( + expectedType = BOOL, + expectedProblems = listOf(createReturnsNullOrMissingError(col = 5, nAryOp = "IN")) ) ) - ), - TestCase( - name = "data type mismatch error with elseExpr: CASE WHEN THEN WHEN THEN ELSE t_float END", - originalSql = "CASE t_int WHEN t_string THEN t_string WHEN t_symbol THEN t_symbol ELSE t_float END", - globals = mapOf( - "t_int" to INT, - "t_string" to STRING, - "t_symbol" to SYMBOL, - "t_float" to FLOAT + ) + + // `NULLIF` op tests - continuation type of left argument types and null + listOf( + TestCase( + name = "data type mismatch error: NULLIF(union(INT, FLOAT), STRING)", + originalSql = "NULLIF(lhs, rhs)", + globals = mapOf( + "lhs" to unionOf(INT, FLOAT), + "rhs" to STRING + ), + handler = expectQueryOutputTypeAndProblems( + expectedType = unionOf(INT, FLOAT, NULL), + expectedProblems = listOf(createDataTypeMismatchError(col = 1, argTypes = listOf(unionOf(INT, FLOAT), STRING), nAryOp = "NULLIF")) + ) ), - handler = expectQueryOutputTypeAndProblems( - expectedType = unionOf(STRING, SYMBOL, FLOAT), - expectedProblems = listOf( - createDataTypeMismatchError(SourceLocationMeta(1L, 17L, 8L), argTypes = listOf(INT, STRING), nAryOp = "CASE"), - createDataTypeMismatchError(SourceLocationMeta(1L, 45L, 8L), argTypes = listOf(INT, SYMBOL), nAryOp = "CASE") + TestCase( + name = "null or missing error: NULLIF(union(INT, FLOAT), MISSING)", + originalSql = "NULLIF(lhs, rhs)", + globals = mapOf( + "lhs" to unionOf(INT, FLOAT), + "rhs" to MISSING + ), + handler = expectQueryOutputTypeAndProblems( + expectedType = unionOf(INT, FLOAT, NULL), + expectedProblems = listOf(createReturnsNullOrMissingError(col = 1, nAryOp = "NULLIF")) ) - ) - ), - TestCase( - name = "null or missing error (from caseValue): CASE WHEN THEN WHEN THEN END", - originalSql = "CASE t_missing WHEN t_string THEN t_string WHEN t_symbol THEN t_symbol END", - globals = mapOf( - "t_missing" to MISSING, - "t_string" to STRING, - "t_symbol" to SYMBOL ), - handler = expectQueryOutputTypeAndProblems( - expectedType = unionOf(STRING, SYMBOL, NULL), - expectedProblems = listOf( - createReturnsNullOrMissingError(SourceLocationMeta(1L, 6L, 9L)) + TestCase( + name = "null or missing error: NULLIF(MISSING, union(INT, FLOAT))", + originalSql = "NULLIF(lhs, rhs)", + globals = mapOf( + "lhs" to MISSING, + "rhs" to unionOf(INT, FLOAT) + ), + handler = expectQueryOutputTypeAndProblems( + expectedType = unionOf(MISSING, NULL), + expectedProblems = listOf(createReturnsNullOrMissingError(col = 1, nAryOp = "NULLIF")) ) ) - ), - TestCase( - name = "data type mismatch and null or missing errors: CASE WHEN THEN WHEN THEN END", - originalSql = "CASE t_int WHEN t_missing THEN t_string WHEN t_symbol THEN t_symbol END", - globals = mapOf( - "t_int" to INT, - "t_missing" to MISSING, - "t_string" to STRING, - "t_symbol" to SYMBOL + ) + + // SimpleCaseWhen should include all `THEN` expression types in the case of error. If no `ELSE` branch is + // included, then will also include `NULL` in the output types + listOf( + TestCase( + name = "data type mismatch error: CASE WHEN THEN WHEN THEN END", + originalSql = "CASE t_int WHEN t_string THEN t_string WHEN t_symbol THEN t_symbol END", + globals = mapOf( + "t_int" to INT, + "t_string" to STRING, + "t_symbol" to SYMBOL + ), + handler = expectQueryOutputTypeAndProblems( + expectedType = unionOf(STRING, SYMBOL, NULL), + expectedProblems = listOf( + createDataTypeMismatchError(SourceLocationMeta(1L, 17L, 8L), argTypes = listOf(INT, STRING), nAryOp = "CASE"), + createDataTypeMismatchError(SourceLocationMeta(1L, 45L, 8L), argTypes = listOf(INT, SYMBOL), nAryOp = "CASE") + ) + ) ), - handler = expectQueryOutputTypeAndProblems( - expectedType = unionOf(STRING, SYMBOL, NULL), - expectedProblems = listOf( - createReturnsNullOrMissingError(SourceLocationMeta(1L, 17L, 9L)), - createDataTypeMismatchError(SourceLocationMeta(1L, 46L, 8L), argTypes = listOf(INT, SYMBOL), nAryOp = "CASE") + TestCase( + name = "data type mismatch error with elseExpr: CASE WHEN THEN WHEN THEN ELSE t_float END", + originalSql = "CASE t_int WHEN t_string THEN t_string WHEN t_symbol THEN t_symbol ELSE t_float END", + globals = mapOf( + "t_int" to INT, + "t_string" to STRING, + "t_symbol" to SYMBOL, + "t_float" to FLOAT + ), + handler = expectQueryOutputTypeAndProblems( + expectedType = unionOf(STRING, SYMBOL, FLOAT), + expectedProblems = listOf( + createDataTypeMismatchError(SourceLocationMeta(1L, 17L, 8L), argTypes = listOf(INT, STRING), nAryOp = "CASE"), + createDataTypeMismatchError(SourceLocationMeta(1L, 45L, 8L), argTypes = listOf(INT, SYMBOL), nAryOp = "CASE") + ) ) - ) - ) - ) + - // SearchedCaseWhen should include all `THEN` expression types in the case of error. If no `ELSE` branch is - // included, then will also include `NULL` in the output types - listOf( - TestCase( - name = "data type mismatch error: CASE WHEN THEN WHEN THEN END", - originalSql = "CASE WHEN t_int THEN t_int WHEN t_string THEN t_string END", - globals = mapOf( - "t_int" to INT, - "t_string" to STRING ), - handler = expectQueryOutputTypeAndProblems( - expectedType = unionOf(INT, STRING, NULL), - expectedProblems = listOf( - createIncompatibleTypesForExprError(SourceLocationMeta(1L, 11L, 5L), expectedType = BOOL, actualType = INT), - createIncompatibleTypesForExprError(SourceLocationMeta(1L, 33L, 8L), expectedType = BOOL, actualType = STRING) + TestCase( + name = "null or missing error (from caseValue): CASE WHEN THEN WHEN THEN END", + originalSql = "CASE t_missing WHEN t_string THEN t_string WHEN t_symbol THEN t_symbol END", + globals = mapOf( + "t_missing" to MISSING, + "t_string" to STRING, + "t_symbol" to SYMBOL + ), + handler = expectQueryOutputTypeAndProblems( + expectedType = unionOf(STRING, SYMBOL, NULL), + expectedProblems = listOf( + createReturnsNullOrMissingError(SourceLocationMeta(1L, 6L, 9L)) + ) ) - ) - ), - TestCase( - name = "data type mismatch error with elseExpr: CASE WHEN THEN WHEN THEN ELSE END", - originalSql = "CASE WHEN t_int THEN t_int WHEN t_string THEN t_string ELSE t_symbol END", - globals = mapOf( - "t_int" to INT, - "t_string" to STRING, - "t_symbol" to SYMBOL ), - handler = expectQueryOutputTypeAndProblems( - expectedType = unionOf(INT, STRING, SYMBOL), - expectedProblems = listOf( - createIncompatibleTypesForExprError(SourceLocationMeta(1L, 11L, 5L), expectedType = BOOL, actualType = INT), - createIncompatibleTypesForExprError(SourceLocationMeta(1L, 33L, 8L), expectedType = BOOL, actualType = STRING) + TestCase( + name = "data type mismatch and null or missing errors: CASE WHEN THEN WHEN THEN END", + originalSql = "CASE t_int WHEN t_missing THEN t_string WHEN t_symbol THEN t_symbol END", + globals = mapOf( + "t_int" to INT, + "t_missing" to MISSING, + "t_string" to STRING, + "t_symbol" to SYMBOL + ), + handler = expectQueryOutputTypeAndProblems( + expectedType = unionOf(STRING, SYMBOL, NULL), + expectedProblems = listOf( + createReturnsNullOrMissingError(SourceLocationMeta(1L, 17L, 9L)), + createDataTypeMismatchError(SourceLocationMeta(1L, 46L, 8L), argTypes = listOf(INT, SYMBOL), nAryOp = "CASE") + ) ) ) - ), - TestCase( - name = "null or missing error: CASE WHEN THEN WHEN THEN END", - originalSql = "CASE WHEN t_null THEN t_null WHEN t_missing THEN t_missing END", - globals = mapOf( - "t_null" to NULL, - "t_missing" to MISSING + ) + + // SearchedCaseWhen should include all `THEN` expression types in the case of error. If no `ELSE` branch is + // included, then will also include `NULL` in the output types + listOf( + TestCase( + name = "data type mismatch error: CASE WHEN THEN WHEN THEN END", + originalSql = "CASE WHEN t_int THEN t_int WHEN t_string THEN t_string END", + globals = mapOf( + "t_int" to INT, + "t_string" to STRING + ), + handler = expectQueryOutputTypeAndProblems( + expectedType = unionOf(INT, STRING, NULL), + expectedProblems = listOf( + createIncompatibleTypesForExprError(SourceLocationMeta(1L, 11L, 5L), expectedType = BOOL, actualType = INT), + createIncompatibleTypesForExprError(SourceLocationMeta(1L, 33L, 8L), expectedType = BOOL, actualType = STRING) + ) + ) + ), + TestCase( + name = "data type mismatch error with elseExpr: CASE WHEN THEN WHEN THEN ELSE END", + originalSql = "CASE WHEN t_int THEN t_int WHEN t_string THEN t_string ELSE t_symbol END", + globals = mapOf( + "t_int" to INT, + "t_string" to STRING, + "t_symbol" to SYMBOL + ), + handler = expectQueryOutputTypeAndProblems( + expectedType = unionOf(INT, STRING, SYMBOL), + expectedProblems = listOf( + createIncompatibleTypesForExprError(SourceLocationMeta(1L, 11L, 5L), expectedType = BOOL, actualType = INT), + createIncompatibleTypesForExprError(SourceLocationMeta(1L, 33L, 8L), expectedType = BOOL, actualType = STRING) + ) + ) ), - handler = expectQueryOutputTypeAndProblems( - expectedType = unionOf(NULL, MISSING), - expectedProblems = listOf( - createReturnsNullOrMissingError(SourceLocationMeta(1L, 11L, 6L)), - createReturnsNullOrMissingError(SourceLocationMeta(1L, 35L, 9L)) + TestCase( + name = "null or missing error: CASE WHEN THEN WHEN THEN END", + originalSql = "CASE WHEN t_null THEN t_null WHEN t_missing THEN t_missing END", + globals = mapOf( + "t_null" to NULL, + "t_missing" to MISSING + ), + handler = expectQueryOutputTypeAndProblems( + expectedType = unionOf(NULL, MISSING), + expectedProblems = listOf( + createReturnsNullOrMissingError(SourceLocationMeta(1L, 11L, 6L)), + createReturnsNullOrMissingError(SourceLocationMeta(1L, 35L, 9L)) + ) ) - ) - ), - TestCase( - name = "data type mismatch and null or missing errors: whenExprs of non-bools and unknown", - originalSql = "CASE WHEN t_int THEN t_int WHEN t_string THEN t_string WHEN t_missing THEN t_missing END", - globals = mapOf( - "t_int" to INT, - "t_string" to STRING, - "t_missing" to MISSING ), - handler = expectQueryOutputTypeAndProblems( - expectedType = unionOf(INT, MISSING, STRING, NULL), - expectedProblems = listOf( - createIncompatibleTypesForExprError(SourceLocationMeta(1L, 11L, 5L), expectedType = BOOL, actualType = INT), - createIncompatibleTypesForExprError(SourceLocationMeta(1L, 33L, 8L), expectedType = BOOL, actualType = STRING), - createReturnsNullOrMissingError(SourceLocationMeta(1L, 61L, 9L)) + TestCase( + name = "data type mismatch and null or missing errors: whenExprs of non-bools and unknown", + originalSql = "CASE WHEN t_int THEN t_int WHEN t_string THEN t_string WHEN t_missing THEN t_missing END", + globals = mapOf( + "t_int" to INT, + "t_string" to STRING, + "t_missing" to MISSING + ), + handler = expectQueryOutputTypeAndProblems( + expectedType = unionOf(INT, MISSING, STRING, NULL), + expectedProblems = listOf( + createIncompatibleTypesForExprError(SourceLocationMeta(1L, 11L, 5L), expectedType = BOOL, actualType = INT), + createIncompatibleTypesForExprError(SourceLocationMeta(1L, 33L, 8L), expectedType = BOOL, actualType = STRING), + createReturnsNullOrMissingError(SourceLocationMeta(1L, 61L, 9L)) + ) ) ) - ) - ) + - // function calls with invalid arguments leading to errors have a continuation type of the function - // signature's return type - listOf( - TestCase( - name = "invalid function call arg: UPPER(INT) -> STRING", - originalSql = "UPPER(x)", - globals = mapOf("x" to INT), - handler = expectQueryOutputTypeAndProblems( - expectedType = STRING, - expectedProblems = listOf( - createInvalidArgumentTypeForFunctionError( - sourceLocation = SourceLocationMeta(1L, 7L, 1L), - functionName = "upper", - expectedArgType = unionOf(STRING, SYMBOL), - actualType = INT + ) + + // function calls with invalid arguments leading to errors have a continuation type of the function + // signature's return type + listOf( + TestCase( + name = "invalid function call arg: UPPER(INT) -> STRING", + originalSql = "UPPER(x)", + globals = mapOf("x" to INT), + handler = expectQueryOutputTypeAndProblems( + expectedType = STRING, + expectedProblems = listOf( + createInvalidArgumentTypeForFunctionError( + sourceLocation = SourceLocationMeta(1L, 7L, 1L), + functionName = "upper", + expectedArgType = unionOf(STRING, SYMBOL), + actualType = INT + ) ) ) - ) - ), - TestCase( - name = "null function call arg: UPPER(NULL) -> STRING", - originalSql = "UPPER(x)", - globals = mapOf("x" to NULL), - handler = expectQueryOutputTypeAndProblems( - expectedType = STRING, - expectedProblems = listOf( - createNullOrMissingFunctionArgumentError( - sourceLocation = SourceLocationMeta(1L, 7L, 1L), - functionName = "upper" + ), + TestCase( + name = "null function call arg: UPPER(NULL) -> STRING", + originalSql = "UPPER(x)", + globals = mapOf("x" to NULL), + handler = expectQueryOutputTypeAndProblems( + expectedType = STRING, + expectedProblems = listOf( + createNullOrMissingFunctionArgumentError( + sourceLocation = SourceLocationMeta(1L, 7L, 1L), + functionName = "upper" + ) ) ) - ) - ), - TestCase( - name = "invalid function call arg and null in optional: SUBSTRING(STRING, NULL, BOOL) -> STRING", - originalSql = "SUBSTRING('123456789', x, y)", - globals = mapOf("x" to BOOL, "y" to NULL), - handler = expectQueryOutputTypeAndProblems( - expectedType = STRING, - expectedProblems = listOf( - createInvalidArgumentTypeForFunctionError( - sourceLocation = SourceLocationMeta(1L, 24L, 1L), - functionName = "substring", - expectedArgType = INT, - actualType = BOOL - ), - createNullOrMissingFunctionArgumentError( - sourceLocation = SourceLocationMeta(1L, 27L, 1L), - functionName = "substring" + ), + TestCase( + name = "invalid function call arg and null in optional: SUBSTRING(STRING, NULL, BOOL) -> STRING", + originalSql = "SUBSTRING('123456789', x, y)", + globals = mapOf("x" to BOOL, "y" to NULL), + handler = expectQueryOutputTypeAndProblems( + expectedType = STRING, + expectedProblems = listOf( + createInvalidArgumentTypeForFunctionError( + sourceLocation = SourceLocationMeta(1L, 24L, 1L), + functionName = "substring", + expectedArgType = INT, + actualType = BOOL + ), + createNullOrMissingFunctionArgumentError( + sourceLocation = SourceLocationMeta(1L, 27L, 1L), + functionName = "substring" + ) ) ) - ) - ), - TestCase( - name = "invalid function call arg in variadic arg and missing: TRIM(BOTH INT FROM MISSING)", - originalSql = "TRIM(BOTH x FROM y)", - globals = mapOf("x" to INT, "y" to MISSING), - handler = expectQueryOutputTypeAndProblems( - expectedType = STRING, - expectedProblems = listOf( - createInvalidArgumentTypeForFunctionError( - sourceLocation = SourceLocationMeta(1L, 11L, 1L), - functionName = "trim", - expectedArgType = STRING, - actualType = INT - ), - createNullOrMissingFunctionArgumentError( - sourceLocation = SourceLocationMeta(1L, 18L, 1L), - functionName = "trim" + ), + TestCase( + name = "invalid function call arg in variadic arg and missing: TRIM(BOTH INT FROM MISSING)", + originalSql = "TRIM(BOTH x FROM y)", + globals = mapOf("x" to INT, "y" to MISSING), + handler = expectQueryOutputTypeAndProblems( + expectedType = STRING, + expectedProblems = listOf( + createInvalidArgumentTypeForFunctionError( + sourceLocation = SourceLocationMeta(1L, 11L, 1L), + functionName = "trim", + expectedArgType = STRING, + actualType = INT + ), + createNullOrMissingFunctionArgumentError( + sourceLocation = SourceLocationMeta(1L, 18L, 1L), + functionName = "trim" + ) ) ) ) - ) - ) + - // operations that can be chained (i.e. left-associative, binary operation) with a data type mismatch - // should not lead to multiple errors - OpType.ARITHMETIC.operators.flatMap { arithmeticOp -> - createChainedOpSingleErrorTests( - goodType = INT, - badType = STRING, - op = arithmeticOp - ) - } + - createChainedOpSingleErrorTests( - goodType = STRING, - badType = INT, - op = "||" - ) + - OpType.LOGICAL.operators.flatMap { logicalOp -> + ) + + // operations that can be chained (i.e. left-associative, binary operation) with a data type mismatch + // should not lead to multiple errors + OpType.ARITHMETIC.operators.flatMap { arithmeticOp -> + createChainedOpSingleErrorTests( + goodType = INT, + badType = STRING, + op = arithmeticOp + ) + } + createChainedOpSingleErrorTests( - goodType = BOOL, - badType = STRING, - op = logicalOp - ) - } + goodType = STRING, + badType = INT, + op = "||" + ) + + OpType.LOGICAL.operators.flatMap { logicalOp -> + createChainedOpSingleErrorTests( + goodType = BOOL, + badType = STRING, + op = logicalOp + ) + } private val JOIN_WITH_PREDICATE = listOf("JOIN", "INNER JOIN", "LEFT JOIN", "RIGHT JOIN") @@ -5462,74 +5610,74 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { // `JOIN` predicates with valid types containing `BOOL`. These tests are meant to just test the `JOIN` // predicate inference behavior. createJoinPredicateTypeValidTests(predicateType = BOOL) + - createJoinPredicateTypeValidTests(predicateType = BOOL.asNullable()) + - createJoinPredicateTypeValidTests(predicateType = BOOL.asOptional()) + - createJoinPredicateTypeValidTests(predicateType = BOOL.asNullable().asOptional()) + - createJoinPredicateTypeValidTests(predicateType = unionOf(BOOL, INT, STRING)) + - // - // `JOIN` predicates with invalid types below - // - // incompatible types for predicate expression -> incompatible types for expression error - ALL_NON_BOOL_NON_UNKNOWN_TYPES.flatMap { nonBoolType -> + createJoinPredicateTypeValidTests(predicateType = BOOL.asNullable()) + + createJoinPredicateTypeValidTests(predicateType = BOOL.asOptional()) + + createJoinPredicateTypeValidTests(predicateType = BOOL.asNullable().asOptional()) + + createJoinPredicateTypeValidTests(predicateType = unionOf(BOOL, INT, STRING)) + + // + // `JOIN` predicates with invalid types below + // + // incompatible types for predicate expression -> incompatible types for expression error + ALL_NON_BOOL_NON_UNKNOWN_TYPES.flatMap { nonBoolType -> + createJoinPredicateContinuationTypeTests( + predicateType = nonBoolType, + expectedProblems = listOf( + createIncompatibleTypesForExprError( + SourceLocationMeta(4L, 28L, 1L), + expectedType = BOOL, + actualType = nonBoolType + ) + ) + ) + } + + // unknown types for predicate expression -> null or missing error + ALL_UNKNOWN_TYPES.flatMap { unknownType -> + createJoinPredicateContinuationTypeTests( + predicateType = unknownType, + expectedProblems = listOf(createReturnsNullOrMissingError(SourceLocationMeta(4L, 28L, 1L))) + ) + } + + // other predicate types resulting in an error createJoinPredicateContinuationTypeTests( - predicateType = nonBoolType, + predicateType = INT.asNullable(), expectedProblems = listOf( createIncompatibleTypesForExprError( SourceLocationMeta(4L, 28L, 1L), expectedType = BOOL, - actualType = nonBoolType + actualType = INT.asNullable() ) ) - ) - } + - // unknown types for predicate expression -> null or missing error - ALL_UNKNOWN_TYPES.flatMap { unknownType -> + ) + createJoinPredicateContinuationTypeTests( - predicateType = unknownType, - expectedProblems = listOf(createReturnsNullOrMissingError(SourceLocationMeta(4L, 28L, 1L))) - ) - } + - // other predicate types resulting in an error - createJoinPredicateContinuationTypeTests( - predicateType = INT.asNullable(), - expectedProblems = listOf( - createIncompatibleTypesForExprError( - SourceLocationMeta(4L, 28L, 1L), - expectedType = BOOL, - actualType = INT.asNullable() - ) - ) - ) + - createJoinPredicateContinuationTypeTests( - predicateType = INT.asOptional(), - expectedProblems = listOf( - createIncompatibleTypesForExprError( - SourceLocationMeta(4L, 28L, 1L), - expectedType = BOOL, - actualType = INT.asOptional() + predicateType = INT.asOptional(), + expectedProblems = listOf( + createIncompatibleTypesForExprError( + SourceLocationMeta(4L, 28L, 1L), + expectedType = BOOL, + actualType = INT.asOptional() + ) ) - ) - ) + - createJoinPredicateContinuationTypeTests( - predicateType = INT.asNullable().asOptional(), - expectedProblems = listOf( - createIncompatibleTypesForExprError( - SourceLocationMeta(4L, 28L, 1L), - expectedType = BOOL, - actualType = INT.asNullable().asOptional() + ) + + createJoinPredicateContinuationTypeTests( + predicateType = INT.asNullable().asOptional(), + expectedProblems = listOf( + createIncompatibleTypesForExprError( + SourceLocationMeta(4L, 28L, 1L), + expectedType = BOOL, + actualType = INT.asNullable().asOptional() + ) ) - ) - ) + - createJoinPredicateContinuationTypeTests( - predicateType = unionOf(INT, FLOAT, STRING), - expectedProblems = listOf( - createIncompatibleTypesForExprError( - SourceLocationMeta(4L, 28L, 1L), - expectedType = BOOL, - actualType = unionOf(INT, FLOAT, STRING) + ) + + createJoinPredicateContinuationTypeTests( + predicateType = unionOf(INT, FLOAT, STRING), + expectedProblems = listOf( + createIncompatibleTypesForExprError( + SourceLocationMeta(4L, 28L, 1L), + expectedType = BOOL, + actualType = unionOf(INT, FLOAT, STRING) + ) ) ) - ) /** * Creates a simple SFW query with a where expression of type [whereType]. Verifies that no errors are @@ -5578,72 +5726,72 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { createSelectWhereTypeValidTests(whereType = BOOL.asNullable().asOptional()), createSelectWhereTypeValidTests(whereType = unionOf(BOOL, INT, STRING)) ) + - // - // `WHERE` expressions with invalid types below - // - // incompatible types for where expression -> incompatible types for expression error - ALL_NON_BOOL_NON_UNKNOWN_TYPES.map { nonBoolType -> - createSelectWhereContinuationTypeTests( - whereType = nonBoolType, - expectedProblems = listOf( - createIncompatibleTypesForExprError( - SourceLocationMeta(1L, 23L, 9L), - expectedType = BOOL, - actualType = nonBoolType + // + // `WHERE` expressions with invalid types below + // + // incompatible types for where expression -> incompatible types for expression error + ALL_NON_BOOL_NON_UNKNOWN_TYPES.map { nonBoolType -> + createSelectWhereContinuationTypeTests( + whereType = nonBoolType, + expectedProblems = listOf( + createIncompatibleTypesForExprError( + SourceLocationMeta(1L, 23L, 9L), + expectedType = BOOL, + actualType = nonBoolType + ) ) ) - ) - } + - // unknown types for where expression -> null or missing error - ALL_UNKNOWN_TYPES.map { unknownType -> - createSelectWhereContinuationTypeTests( - whereType = unknownType, - expectedProblems = listOf(createReturnsNullOrMissingError(SourceLocationMeta(1L, 23L, 9L))) - ) - } + - listOf( - // other where expression types resulting in an error - createSelectWhereContinuationTypeTests( - whereType = INT.asNullable(), - expectedProblems = listOf( - createIncompatibleTypesForExprError( - SourceLocationMeta(1L, 23L, 9L), - expectedType = BOOL, - actualType = INT.asNullable() - ) + } + + // unknown types for where expression -> null or missing error + ALL_UNKNOWN_TYPES.map { unknownType -> + createSelectWhereContinuationTypeTests( + whereType = unknownType, + expectedProblems = listOf(createReturnsNullOrMissingError(SourceLocationMeta(1L, 23L, 9L))) ) - ), - createSelectWhereContinuationTypeTests( - whereType = INT.asOptional(), - expectedProblems = listOf( - createIncompatibleTypesForExprError( - SourceLocationMeta(1L, 23L, 9L), - expectedType = BOOL, - actualType = INT.asOptional() + } + + listOf( + // other where expression types resulting in an error + createSelectWhereContinuationTypeTests( + whereType = INT.asNullable(), + expectedProblems = listOf( + createIncompatibleTypesForExprError( + SourceLocationMeta(1L, 23L, 9L), + expectedType = BOOL, + actualType = INT.asNullable() + ) ) - ) - ), - createSelectWhereContinuationTypeTests( - whereType = INT.asNullable().asOptional(), - expectedProblems = listOf( - createIncompatibleTypesForExprError( - SourceLocationMeta(1L, 23L, 9L), - expectedType = BOOL, - actualType = INT.asNullable().asOptional() + ), + createSelectWhereContinuationTypeTests( + whereType = INT.asOptional(), + expectedProblems = listOf( + createIncompatibleTypesForExprError( + SourceLocationMeta(1L, 23L, 9L), + expectedType = BOOL, + actualType = INT.asOptional() + ) ) - ) - ), - createSelectWhereContinuationTypeTests( - whereType = unionOf(INT, FLOAT, STRING), - expectedProblems = listOf( - createIncompatibleTypesForExprError( - SourceLocationMeta(1L, 23L, 9L), - expectedType = BOOL, - actualType = unionOf(INT, FLOAT, STRING) + ), + createSelectWhereContinuationTypeTests( + whereType = INT.asNullable().asOptional(), + expectedProblems = listOf( + createIncompatibleTypesForExprError( + SourceLocationMeta(1L, 23L, 9L), + expectedType = BOOL, + actualType = INT.asNullable().asOptional() + ) + ) + ), + createSelectWhereContinuationTypeTests( + whereType = unionOf(INT, FLOAT, STRING), + expectedProblems = listOf( + createIncompatibleTypesForExprError( + SourceLocationMeta(1L, 23L, 9L), + expectedType = BOOL, + actualType = unionOf(INT, FLOAT, STRING) + ) ) ) ) - ) @JvmStatic @Suppress("unused") @@ -5652,7 +5800,7 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { TestCase( name = "SimpleCaseWhen error in WHEN expression", originalSql = - """ + """ CASE t_int WHEN t_string || t_string || t_string THEN t_string WHEN t_symbol || t_symbol || t_symbol THEN t_symbol @@ -5673,7 +5821,7 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { TestCase( name = "SearchedCaseWhen error in WHEN expression", originalSql = - """ + """ CASE WHEN t_string || t_string || t_string THEN t_string WHEN t_symbol || t_symbol || t_symbol THEN t_symbol @@ -6382,7 +6530,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { ), handler = expectSemanticErrors( expectedErrors = listOf( - Problem(SourceLocationMeta(1L, 26L), + Problem( + SourceLocationMeta(1L, 26L), SemanticProblemDetails.DuplicateAliasesInSelectListItem ) ) @@ -6529,13 +6678,13 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { // Evaluator behavior when this test case was written - // PartiQL> select f, x from sexp(1,2,3) as f at x // | - //===' - //<< + // ===' + // << // { // 'f': `(1 2 3)` // } - //>> - //--- + // >> + // --- "elem" to SexpType(StaticType.INT), "atVal" to StaticType.MISSING ), @@ -6873,7 +7022,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { "UPPER()", handler = expectSemanticErrors( expectedErrors = listOf( - Problem(SourceLocationMeta(1L, 1L), + Problem( + SourceLocationMeta(1L, 1L), SemanticProblemDetails.IncorrectNumberOfArgumentsToFunctionCall( functionName = "upper", expectedArity = 1..1, @@ -6888,7 +7038,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { "UPPER('test', 'test')", handler = expectSemanticErrors( expectedErrors = listOf( - Problem(SourceLocationMeta(1L, 1L), + Problem( + SourceLocationMeta(1L, 1L), SemanticProblemDetails.IncorrectNumberOfArgumentsToFunctionCall( functionName = "upper", expectedArity = 1..1, @@ -6917,7 +7068,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { "utcnow(null)", handler = expectSemanticErrors( expectedErrors = listOf( - Problem(SourceLocationMeta(1L, 1L), + Problem( + SourceLocationMeta(1L, 1L), SemanticProblemDetails.IncorrectNumberOfArgumentsToFunctionCall( functionName = "utcnow", expectedArity = 0..0, @@ -7021,7 +7173,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { "TO_TIMESTAMP('February 2016', 'MMMM yyyy', 'extra arg')", handler = expectSemanticErrors( expectedErrors = listOf( - Problem(SourceLocationMeta(1L, 1L), + Problem( + SourceLocationMeta(1L, 1L), SemanticProblemDetails.IncorrectNumberOfArgumentsToFunctionCall( functionName = "to_timestamp", expectedArity = 1..2, @@ -7042,7 +7195,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { "non_existent(null)", handler = expectSemanticErrors( expectedErrors = listOf( - Problem(SourceLocationMeta(1L, 1L), + Problem( + SourceLocationMeta(1L, 1L), SemanticProblemDetails.NoSuchFunction( functionName = "non_existent" ) @@ -7067,33 +7221,42 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { TestCase( "Array index with numeric literal ", "SELECT a.l[1] AS x FROM a", - mapOf("a" to StructType( - mapOf("l" to ListType(elementType = StaticType.BOOL)) - )), - handler= expectQueryOutputType( + mapOf( + "a" to StructType( + mapOf("l" to ListType(elementType = StaticType.BOOL)) + ) + ), + handler = expectQueryOutputType( BagType(StructType(mapOf("x" to StaticType.BOOL), contentClosed = true)) ) ), TestCase( "Array index with call to operator ", "SELECT a.l[1 + 1] AS x FROM a", - mapOf("a" to StructType( - mapOf("l" to ListType(elementType = StaticType.BOOL)) - )), - handler= expectQueryOutputType( + mapOf( + "a" to StructType( + mapOf("l" to ListType(elementType = StaticType.BOOL)) + ) + ), + handler = expectQueryOutputType( BagType(StructType(mapOf("x" to StaticType.BOOL), contentClosed = true)) ) ), TestCase( "Struct index with call to operator ", "SELECT a.l[1 + 1] AS x, a.l.y AS p FROM a", - mapOf("a" to StructType( - mapOf("l" to StructType(mapOf("y" to StaticType.BOOL))) - )), - handler= expectQueryOutputType( - BagType(StructType( - mapOf("x" to StaticType.MISSING, "p" to StaticType.BOOL), - contentClosed = true)) + mapOf( + "a" to StructType( + mapOf("l" to StructType(mapOf("y" to StaticType.BOOL))) + ) + ), + handler = expectQueryOutputType( + BagType( + StructType( + mapOf("x" to StaticType.MISSING, "p" to StaticType.BOOL), + contentClosed = true + ) + ) ) ), TestCase( @@ -7101,25 +7264,31 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { "SELECT a.b.c.d AS x FROM a", mapOf( "a" to StructType( - mapOf("b" to StructType( - mapOf( - "c" to StructType( - mapOf( - "d" to StaticType.BOOL, - "e" to StaticType.DECIMAL - )), - "xx" to StaticType.BLOB)), - "ww" to StaticType.CLOB))), - handler= expectQueryOutputType( + mapOf( + "b" to StructType( + mapOf( + "c" to StructType( + mapOf( + "d" to StaticType.BOOL, + "e" to StaticType.DECIMAL + ) + ), + "xx" to StaticType.BLOB + ) + ), + "ww" to StaticType.CLOB + ) + ) + ), + handler = expectQueryOutputType( BagType(StructType(mapOf("x" to StaticType.BOOL), contentClosed = true)) ) ) ) - @JvmStatic @Suppress("unused") - fun parametersForSimplePathsOnStructs() : List { + fun parametersForSimplePathsOnStructs(): List { val VALID_PATH_EXPR_SOURCES = setOf(StaticType.ANY, StaticType.LIST, StaticType.SEXP, StaticType.STRUCT) val incompatibleTypeForB = @@ -7132,7 +7301,8 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { mapOf( "a" to StructType( mapOf("b" to type, "c" to StaticType.INT) - )), + ) + ), handler = expectQueryOutputType(StaticType.MISSING) ), TestCase( @@ -7141,50 +7311,58 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { mapOf( "a" to StructType( mapOf("b" to type, "c" to StaticType.INT) - )), + ) + ), handler = expectQueryOutputType(StaticType.MISSING) ) ) } + val bHasAnyType = StaticType.ALL_TYPES.flatMap { + listOf( + TestCase( + "Simple path on struct: a.b.c", + "a.b.c", + mapOf( + "a" to StructType( + mapOf("b" to ANY, "c" to it) + ) + ), + handler = expectQueryOutputType(StaticType.ANY) + ), + TestCase( + "Simple path on struct: a['b'].c", + "a['b'].c", + mapOf( + "a" to StructType( + mapOf("b" to ANY, "c" to it) + ) + ), + handler = expectQueryOutputType(StaticType.ANY) + ) + ) + } - val bHasAnyType = StaticType.ALL_TYPES.flatMap { - listOf( - TestCase( "Simple path on struct: a.b.c", - "a.b.c", - mapOf( - "a" to StructType( - mapOf("b" to ANY, "c" to it) - )), - handler = expectQueryOutputType(StaticType.ANY) - ), - TestCase( "Simple path on struct: a['b'].c", - "a['b'].c", - mapOf( - "a" to StructType( - mapOf("b" to ANY, "c" to it) - )), - handler = expectQueryOutputType(StaticType.ANY) - ) - ) - } - - val validTypeForB = StaticType.ALL_TYPES.flatMap { + val validTypeForB = StaticType.ALL_TYPES.flatMap { listOf( - TestCase( "Simple path on struct: a.b", + TestCase( + "Simple path on struct: a.b", "a.b", mapOf( "a" to StructType( mapOf("b" to it) - )), + ) + ), handler = expectQueryOutputType(it) ), - TestCase( "Simple path on struct: a['b']", + TestCase( + "Simple path on struct: a['b']", "a['b']", mapOf( "a" to StructType( mapOf("b" to it) - )), + ) + ), handler = expectQueryOutputType(it) ) ) @@ -7196,13 +7374,14 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { @Suppress("unused") fun parametersForSimplePathsOnSequences(): List { val INT_TYPES = setOf(StaticType.INT, StaticType.INT2, StaticType.INT4, StaticType.INT8) - val incompatibleTypeForIndex = StaticType.ALL_TYPES.filter {it !in INT_TYPES}.map { + val incompatibleTypeForIndex = StaticType.ALL_TYPES.filter { it !in INT_TYPES }.map { TestCase( "simple path for lists a[b] -- b is not INT type", "a[b]", mapOf( - "a" to ListType ( elementType = StaticType.STRING), - "b" to it), + "a" to ListType(elementType = StaticType.STRING), + "b" to it + ), handler = expectQueryOutputType(StaticType.MISSING) ) } @@ -7230,8 +7409,10 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { is ResolveTestResult.Value -> { assertEquals("Failed assertion for \"${result.testCase.name}\"", expectedType, result.staticType) val actualWarnings = result.problems // these should all be warnings - assertEquals("Expected ${expectedWarnings.size} warnings but received ${actualWarnings.size} warnings", - expectedWarnings.size, actualWarnings.size) + assertEquals( + "Expected ${expectedWarnings.size} warnings but received ${actualWarnings.size} warnings", + expectedWarnings.size, actualWarnings.size + ) assertEquals(expectedWarnings.toSet(), actualWarnings.toSet()) } }.let { } @@ -7242,8 +7423,10 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { is ResolveTestResult.Value -> fail("Expected failure for \"${result.testCase.name}\" but got $result") is ResolveTestResult.Failure -> { val actualErrors = result.problems.filter { it.details.severity == ProblemSeverity.ERROR } - assertEquals("Expected ${expectedErrors.size} errors but received ${actualErrors.size} errors", - expectedErrors.size, actualErrors.size) + assertEquals( + "Expected ${expectedErrors.size} errors but received ${actualErrors.size} errors", + expectedErrors.size, actualErrors.size + ) assertEquals(expectedErrors.toSet(), actualErrors.toSet()) } }.let { } @@ -7255,8 +7438,10 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { is ResolveTestResult.Failure -> { assertEquals("Failed assertion for \"${result.testCase.name}\"", expectedType, result.staticType) val actualProblems = result.problems - assertEquals("Expected ${expectedProblems.size} problems but received ${actualProblems.size} problems", - expectedProblems.size, actualProblems.size) + assertEquals( + "Expected ${expectedProblems.size} problems but received ${actualProblems.size} problems", + expectedProblems.size, actualProblems.size + ) assertEquals(expectedProblems.toSet(), actualProblems.toSet()) } }.let { } @@ -7270,8 +7455,10 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { is ResolveTestResult.Value -> fail("Expected failure for \"${result.testCase.name}\" but got $result") is ResolveTestResult.Failure -> { val actualProblems = result.problems - assertEquals("Expected ${expectedProblems.size} problems but received ${actualProblems.size} problems", - expectedProblems.size, actualProblems.size) + assertEquals( + "Expected ${expectedProblems.size} problems but received ${actualProblems.size} problems", + expectedProblems.size, actualProblems.size + ) assertEquals(expectedProblems.toSet(), actualProblems.toSet()) // additional assertions using the annotated [PartiqlAst.Statement] @@ -7282,8 +7469,10 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { private fun expectSemanticProblems(expectedProblems: List): (ResolveTestResult) -> Unit = { result: ResolveTestResult -> val actualProblems = result.problems - assertEquals("Expected ${expectedProblems.size} problems but received ${actualProblems.size} problems", - expectedProblems.size, actualProblems.size) + assertEquals( + "Expected ${expectedProblems.size} problems but received ${actualProblems.size} problems", + expectedProblems.size, actualProblems.size + ) assertEquals(expectedProblems.toSet(), actualProblems.toSet()) } @@ -7337,14 +7526,14 @@ class StaticTypeInferenceVisitorTransformTest : VisitorTransformTestBase() { data class Failure(val testCase: TestCase, val staticType: StaticType, val partiqlAst: PartiqlAst.Statement, override val problems: List) : ResolveTestResult() } - private val formatFunc = object : ExprFunction { override val signature = FunctionSignature( name = "format", requiredParameters = listOf(StaticType.STRING), variadicParameter = VarargFormalParameter(StaticType.ANY, 0), - returnType = StaticType.STRING) + returnType = StaticType.STRING + ) } } } diff --git a/lang/test/org/partiql/lang/eval/visitors/StaticTypeVisitorTransformTests.kt b/lang/test/org/partiql/lang/eval/visitors/StaticTypeVisitorTransformTests.kt index 328574befc..21c5a9591c 100644 --- a/lang/test/org/partiql/lang/eval/visitors/StaticTypeVisitorTransformTests.kt +++ b/lang/test/org/partiql/lang/eval/visitors/StaticTypeVisitorTransformTests.kt @@ -31,11 +31,13 @@ import java.io.StringWriter class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { - data class STRTestCase(val originalSql: String, - val globals: Map, - val handler: (ResolveTestResult) -> Unit, - val constraints: Set = setOf(), - val expectedAst: String? = null) { + data class STRTestCase( + val originalSql: String, + val globals: Map, + val handler: (ResolveTestResult) -> Unit, + val constraints: Set = setOf(), + val expectedAst: String? = null + ) { override fun toString(): String = "originalSql=$originalSql, globals=$globals, constraints=$constraints, expectedSql=$expectedAst" @@ -59,12 +61,11 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { @Parameters fun sfwTest(tc: STRTestCase) = runSTRTest(tc) - // In the test cases below there exists comments consisting of a bunch of numbers. They can be // used to quickly determine the column number of the text immediately beneath it. For example, // it's easy to see the token "fiftyFive" starts at character 55: // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 // fiftyFive // The first line is the 10's place of the column number, while the second line is the 1's place. // This helps to speed up the finding of the column number when it is used as part of the @@ -79,7 +80,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT x FROM b AS x", mapOf("b" to StaticType.BAG), expectVariableReferences( @@ -89,7 +90,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT a FROM b", mapOf( "B" to StaticType.LIST, @@ -104,7 +105,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT COUNT(*) FROM b", mapOf("B" to StaticType.BAG), expectVariableReferences( @@ -113,7 +114,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT y FROM b AS x", mapOf("B" to StaticType.BAG), expectSubNode( @@ -121,19 +122,24 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { path( id( "x", - partiqlAstCaseSensitive, - partiqlAstLocalsFirst, - StaticType.ANY.toMetas() + metas(1, 8)), - listOf(pathExpr( - lit(ion.newString("y").toIonElement(), metas(1, 8)), - partiqlAstCaseInSensitive)), - metas(1, 8)) + partiqlAstCaseSensitive, + partiqlAstLocalsFirst, + StaticType.ANY.toMetas() + metas(1, 8) + ), + listOf( + pathExpr( + lit(ion.newString("y").toIonElement(), metas(1, 8)), + partiqlAstCaseInSensitive + ) + ), + metas(1, 8) + ) } ) ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT y.z FROM b AS x", mapOf("B" to StaticType.BAG), expectSubNode( @@ -143,7 +149,8 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { "x", partiqlAstCaseSensitive, partiqlAstLocalsFirst, - StaticType.ANY.toMetas() + metas(1, 8)), + StaticType.ANY.toMetas() + metas(1, 8) + ), listOf( pathExpr( lit(ion.newString("y").toIonElement(), metas(1, 8)), @@ -152,7 +159,8 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { pathExpr( lit(ion.newString("z").toIonElement(), metas(1, 10)), partiqlAstCaseInSensitive - )), + ) + ), emptyMetaContainer() ) } @@ -160,7 +168,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT x FROM b AT x", mapOf("b" to StaticType.BAG), expectVariableReferences( @@ -170,7 +178,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT y FROM b AS x, x AS whatever AT y", mapOf("b" to StaticType.BAG), expectVariableReferences( @@ -181,7 +189,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT x FROM b LET 1 AS x", mapOf("b" to StaticType.BAG), expectVariableReferences( @@ -192,7 +200,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { // multiple unique bindings STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT x, y FROM b LET char_length(b.name) AS x, x + 1 AS y", mapOf("b" to StaticType.BAG), expectVariableReferences( @@ -204,7 +212,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { // shadow binding from global variable STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT b FROM b LET 1 AS b", mapOf("b" to StaticType.BAG), expectErr( @@ -217,7 +225,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { // shadow binding from local variable STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT x FROM b LET 1 AS x, 2 AS x", mapOf("b" to StaticType.BAG), expectErr( @@ -229,54 +237,67 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT a FROM b", emptyMap(), - expectErr(ErrorCode.SEMANTIC_UNBOUND_BINDING, + expectErr( + ErrorCode.SEMANTIC_UNBOUND_BINDING, BINDING_NAME to "b", LINE_NUMBER to 1L, - COLUMN_NUMBER to 15L) + COLUMN_NUMBER to 15L + ) ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT a FROM b,c", - mapOf("B" to StaticType.ANY, - "C" to StaticType.ANY), - expectErr(ErrorCode.SEMANTIC_UNBOUND_BINDING, + mapOf( + "B" to StaticType.ANY, + "C" to StaticType.ANY + ), + expectErr( + ErrorCode.SEMANTIC_UNBOUND_BINDING, BINDING_NAME to "a", LINE_NUMBER to 1L, - COLUMN_NUMBER to 8L) + COLUMN_NUMBER to 8L + ) ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT a FROM \"b\"", mapOf("B" to StaticType.ANY), - expectErr(ErrorCode.SEMANTIC_UNBOUND_QUOTED_BINDING, + expectErr( + ErrorCode.SEMANTIC_UNBOUND_QUOTED_BINDING, BINDING_NAME to "b", LINE_NUMBER to 1L, - COLUMN_NUMBER to 15L) + COLUMN_NUMBER to 15L + ) ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT a FROM b, c", - mapOf("B" to StaticType.ANY, - "C" to StaticType.ANY), - expectErr(ErrorCode.SEMANTIC_UNBOUND_BINDING, + mapOf( + "B" to StaticType.ANY, + "C" to StaticType.ANY + ), + expectErr( + ErrorCode.SEMANTIC_UNBOUND_BINDING, BINDING_NAME to "a", LINE_NUMBER to 1L, - COLUMN_NUMBER to 8L) + COLUMN_NUMBER to 8L + ) ), // variable scoping within SELECT should resolve implicit lexical alias over global STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT a FROM b", mapOf( "b" to StaticType.BAG, - "a" to StaticType.BAG), + "a" to StaticType.BAG + ), expectSubNode( PartiqlAst.build { path( @@ -284,100 +305,119 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { "b", partiqlAstCaseSensitive, partiqlAstLocalsFirst, - StaticType.ANY.toMetas() + metas(1, 8)), + StaticType.ANY.toMetas() + metas(1, 8) + ), listOf( pathExpr( lit(ion.newString("a").toIonElement(), metas(1, 8)), - partiqlAstCaseInSensitive)), - metas(1, 8)) + partiqlAstCaseInSensitive + ) + ), + metas(1, 8) + ) } ) ), // ambiguous binding introduced in FROM clause (same AS-binding introduced twice) STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT * FROM b AS b, b AS B", mapOf("B" to StaticType.ANY), - expectErr(ErrorCode.SEMANTIC_AMBIGUOUS_BINDING, + expectErr( + ErrorCode.SEMANTIC_AMBIGUOUS_BINDING, BINDING_NAME to "B", LINE_NUMBER to 1L, - COLUMN_NUMBER to 28L) + COLUMN_NUMBER to 28L + ) ), // ambiguous binding introduced in FROM clause (AS binding given same name as AT binding) STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT * FROM b AS b AT b", mapOf("B" to StaticType.ANY), - expectErr(ErrorCode.SEMANTIC_AMBIGUOUS_BINDING, + expectErr( + ErrorCode.SEMANTIC_AMBIGUOUS_BINDING, BINDING_NAME to "b", LINE_NUMBER to 1L, - COLUMN_NUMBER to 20L) + COLUMN_NUMBER to 20L + ) ), // ambiguous binding introduced in FROM clause (AS binding given same name as BY binding) STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT * FROM b AS b BY b", mapOf("B" to StaticType.ANY), - expectErr(ErrorCode.SEMANTIC_AMBIGUOUS_BINDING, + expectErr( + ErrorCode.SEMANTIC_AMBIGUOUS_BINDING, BINDING_NAME to "b", LINE_NUMBER to 1L, - COLUMN_NUMBER to 20L) + COLUMN_NUMBER to 20L + ) ), // ambiguous binding introduced in FROM clause (AT binding given same name as BY binding) STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT * FROM b AS x AT b BY b", mapOf("B" to StaticType.ANY), - expectErr(ErrorCode.SEMANTIC_AMBIGUOUS_BINDING, + expectErr( + ErrorCode.SEMANTIC_AMBIGUOUS_BINDING, BINDING_NAME to "b", LINE_NUMBER to 1L, - COLUMN_NUMBER to 30L) + COLUMN_NUMBER to 30L + ) ), // join should not allow implicit attribute without schema STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT a FROM b AS x, B AS y", mapOf("B" to StaticType.ANY), - expectErr(ErrorCode.SEMANTIC_UNBOUND_BINDING, + expectErr( + ErrorCode.SEMANTIC_UNBOUND_BINDING, BINDING_NAME to "a", LINE_NUMBER to 1L, - COLUMN_NUMBER to 8L) + COLUMN_NUMBER to 8L + ) ), // nested query should not allow implicit attribute as variable without schema STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT a FROM b AS x WHERE EXISTS (SELECT y FROM x)", mapOf("B" to StaticType.ANY), - expectErr(ErrorCode.SEMANTIC_UNBOUND_BINDING, + expectErr( + ErrorCode.SEMANTIC_UNBOUND_BINDING, BINDING_NAME to "y", LINE_NUMBER to 1L, - COLUMN_NUMBER to 43L) + COLUMN_NUMBER to 43L + ) ), // local variable with same name as global should not shadow global in from source STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT 1 FROM a AS b, b AS c, c as d", - mapOf("a" to StaticType.ANY, - "b" to StaticType.ANY), + mapOf( + "a" to StaticType.ANY, + "b" to StaticType.ANY + ), expectVariableReferences( VarExpectation("a", 1, 15, StaticType.ANY, partiqlAstUnqualified), // The [VarExpectation] below proves that the "b" in the "b AS c" from source was resolved in the global scope. VarExpectation("b", 1, 23, StaticType.ANY, partiqlAstUnqualified), - VarExpectation("c", 1, 31, StaticType.ANY, partiqlAstLocalsFirst)) + VarExpectation("c", 1, 31, StaticType.ANY, partiqlAstLocalsFirst) + ) ), // @ causes the local b to be resolved instead of the global b. STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT 1 FROM b as b, @b.c", mapOf( "B" to StaticType.BAG @@ -390,7 +430,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { // Group By should not be allowed STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT * FROM b as x GROUP BY x.name", mapOf("b" to StaticType.ANY), expectErr(ErrorCode.UNIMPLEMENTED_FEATURE, FEATURE_NAME to "GROUP BY") @@ -405,18 +445,21 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { // DML happy paths STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "FROM x INSERT INTO y << 'doesnt matter' >>", - mapOf("x" to StaticType.BAG, - "y" to StaticType.BOOL), + mapOf( + "x" to StaticType.BAG, + "y" to StaticType.BOOL + ), expectVariableReferences( VarExpectation("x", 1, 6, StaticType.BAG, partiqlAstUnqualified), - VarExpectation("x", 1, 20, StaticType.ANY, partiqlAstLocalsFirst)) - //No expectation for y because `FROM x INSERT INTO y ...` is transformed to `FROM x INSERT INTO x.y ...` + VarExpectation("x", 1, 20, StaticType.ANY, partiqlAstLocalsFirst) + ) + // No expectation for y because `FROM x INSERT INTO y ...` is transformed to `FROM x INSERT INTO x.y ...` ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "INSERT INTO x VALUE 5", mapOf("x" to StaticType.BAG), expectVariableReferences( @@ -425,19 +468,21 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "FROM x INSERT INTO y VALUE 5", mapOf( "x" to StaticType.BAG, - "y" to StaticType.BOOL), + "y" to StaticType.BOOL + ), expectVariableReferences( VarExpectation("x", 1, 6, StaticType.BAG, partiqlAstUnqualified), - VarExpectation("x", 1, 20, StaticType.ANY, partiqlAstLocalsFirst)) - //No expectation for y because `FROM x INSERT INTO y ...` is transformed to `FROM x INSERT INTO x.y ...` + VarExpectation("x", 1, 20, StaticType.ANY, partiqlAstLocalsFirst) + ) + // No expectation for y because `FROM x INSERT INTO y ...` is transformed to `FROM x INSERT INTO x.y ...` ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "FROM x AS y SET doesntmatter = 1", mapOf("x" to StaticType.BAG), expectVariableReferences( @@ -447,10 +492,12 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "DELETE FROM x WHERE y", - mapOf("x" to StaticType.BAG, - "y" to StaticType.BOOL), + mapOf( + "x" to StaticType.BAG, + "y" to StaticType.BOOL + ), expectVariableReferences( VarExpectation("x", 1, 13, StaticType.BAG, partiqlAstUnqualified), VarExpectation("x", 1, 21, StaticType.ANY, partiqlAstLocalsFirst) @@ -458,11 +505,13 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "FROM x WHERE z REMOVE y", - mapOf("x" to StaticType.BAG, - "y" to StaticType.BOOL, - "z" to StaticType.INT), + mapOf( + "x" to StaticType.BAG, + "y" to StaticType.BOOL, + "z" to StaticType.INT + ), expectVariableReferences( VarExpectation("x", 1, 6, StaticType.BAG, partiqlAstUnqualified), VarExpectation("x", 1, 14, StaticType.ANY, partiqlAstLocalsFirst), @@ -471,7 +520,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "FROM canines AS dogs, dogs AS d WHERE d.name = 'Timmy' SET d.colour = 'blue merle'", mapOf("canines" to StaticType.BAG), expectVariableReferences( @@ -483,7 +532,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "FROM animals AS a, a.dogs AS d WHERE d.name = 'Timmy' SET d.colour = 'blue merle'", mapOf("animals" to StaticType.BAG), expectVariableReferences( @@ -496,17 +545,19 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { // DML undefined variables. STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "FROM y SET doesntmatter = 1", mapOf(), - expectErr(ErrorCode.SEMANTIC_UNBOUND_BINDING, + expectErr( + ErrorCode.SEMANTIC_UNBOUND_BINDING, BINDING_NAME to "y", LINE_NUMBER to 1L, - COLUMN_NUMBER to 6L) + COLUMN_NUMBER to 6L + ) ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "DELETE FROM x as y WHERE z", mapOf("x" to StaticType.BAG), expectVariableReferences( @@ -516,17 +567,17 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "UPDATE dogs as d SET name = 'Timmy' WHERE color = 'Blue merle'", mapOf("dogs" to StaticType.BAG), expectVariableReferences( VarExpectation("dogs", 1, 8, StaticType.BAG, partiqlAstUnqualified), VarExpectation("d", 1, 22, StaticType.ANY, partiqlAstLocalsFirst), - VarExpectation("d", 1, 43, StaticType.ANY, partiqlAstLocalsFirst)) + VarExpectation("d", 1, 43, StaticType.ANY, partiqlAstLocalsFirst) + ) ) ) - @Test @Parameters fun ddlTest(tc: STRTestCase) = runSTRTest(tc) @@ -537,7 +588,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { // and this test ensures we don't treat [identifier] as if it were a normal variable. STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "DROP INDEX IDX_foo ON SomeTable", mapOf(), expectVariableReferences() @@ -547,7 +598,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { // and this test ensures we don't treat [keys] as if it were a normal variable. STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "CREATE INDEX ON SomeTable (SomeColumn)", mapOf(), expectVariableReferences() @@ -564,38 +615,42 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { // even though a global 'a' is defined, it is not accessible. // (need the JOIN because a is transformed to b.a when there is only one from source) // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT a FROM b, c", mapOf( "B" to StaticType.BAG, "A" to StaticType.BAG, "C" to StaticType.BAG ), - expectErr(ErrorCode.SEMANTIC_ILLEGAL_GLOBAL_VARIABLE_ACCESS, + expectErr( + ErrorCode.SEMANTIC_ILLEGAL_GLOBAL_VARIABLE_ACCESS, BINDING_NAME to "a", LINE_NUMBER to 1L, - COLUMN_NUMBER to 8L), + COLUMN_NUMBER to 8L + ), setOf(StaticTypeVisitorTransformConstraints.PREVENT_GLOBALS_EXCEPT_IN_FROM) ), STRTestCase( // Verify that a shadowed global ("b") doesn't get resolved instead of illegal global access error. // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "select * from a as b, (select * from b)", mapOf( "a" to StaticType.BAG, "b" to StaticType.BAG ), - expectErr(ErrorCode.SEMANTIC_ILLEGAL_GLOBAL_VARIABLE_ACCESS, + expectErr( + ErrorCode.SEMANTIC_ILLEGAL_GLOBAL_VARIABLE_ACCESS, BINDING_NAME to "b", LINE_NUMBER to 1L, - COLUMN_NUMBER to 38L), + COLUMN_NUMBER to 38L + ), setOf(StaticTypeVisitorTransformConstraints.PREVENT_GLOBALS_IN_NESTED_QUERIES) ), STRTestCase( // basic happy path with PREVENT_GLOBALS_EXCEPT_IN_FROM // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT b1.a FROM b AS b1", mapOf( "B" to StaticType.BAG @@ -609,7 +664,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { STRTestCase( // nested happy case with PREVENT_GLOBALS_EXCEPT_IN_FROM // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT 1, (SELECT c1.d FROM c as c1) FROM b, (SELECT 2 FROM c)", mapOf( "B" to StaticType.BAG, @@ -626,50 +681,57 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { // multiple joins STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "select 1 from actors a, a.movies am, movies m", - mapOf("movies" to StaticType.BAG, - "actors" to StaticType.BAG), + mapOf( + "movies" to StaticType.BAG, + "actors" to StaticType.BAG + ), expectVariableReferences( VarExpectation("actors", 1, 15, StaticType.BAG, partiqlAstUnqualified), VarExpectation("a", 1, 25, StaticType.ANY, partiqlAstLocalsFirst), VarExpectation("movies", 1, 38, StaticType.BAG, partiqlAstUnqualified) ), - setOf(StaticTypeVisitorTransformConstraints.PREVENT_GLOBALS_EXCEPT_IN_FROM)), + setOf(StaticTypeVisitorTransformConstraints.PREVENT_GLOBALS_EXCEPT_IN_FROM) + ), STRTestCase( // failure case with PREVENT_GLOBALS_IN_NESTED_QUERIES though // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT 1, (SELECT c1.d FROM c as c1) FROM b", mapOf( "B" to StaticType.BAG, "C" to StaticType.BAG ), - expectErr(ErrorCode.SEMANTIC_ILLEGAL_GLOBAL_VARIABLE_ACCESS, + expectErr( + ErrorCode.SEMANTIC_ILLEGAL_GLOBAL_VARIABLE_ACCESS, BINDING_NAME to "c", LINE_NUMBER to 1L, - COLUMN_NUMBER to 29L), + COLUMN_NUMBER to 29L + ), setOf(StaticTypeVisitorTransformConstraints.PREVENT_GLOBALS_IN_NESTED_QUERIES) ), STRTestCase( // checking PREVENT_GLOBALS_IN_NESTED_QUERIES failure within outer FROM // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT 1 FROM b, (SELECT c1.d FROM c as c1)", mapOf( "B" to StaticType.BAG, "C" to StaticType.BAG ), - expectErr(ErrorCode.SEMANTIC_ILLEGAL_GLOBAL_VARIABLE_ACCESS, + expectErr( + ErrorCode.SEMANTIC_ILLEGAL_GLOBAL_VARIABLE_ACCESS, BINDING_NAME to "c", LINE_NUMBER to 1L, - COLUMN_NUMBER to 36L), + COLUMN_NUMBER to 36L + ), setOf(StaticTypeVisitorTransformConstraints.PREVENT_GLOBALS_IN_NESTED_QUERIES) ), STRTestCase( // nested happy case with PREVENT_GLOBALS_IN_NESTED_QUERIES // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "SELECT 1, (SELECT c1.d FROM c as c1) FROM b as b, @b.c as c1, (SELECT 2 FROM c1)", mapOf( "B" to StaticType.BAG, @@ -688,10 +750,12 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "select collies FROM dogs d", - mapOf("dogs" to StaticType.BAG, - "collies" to StaticType.BAG), + mapOf( + "dogs" to StaticType.BAG, + "collies" to StaticType.BAG + ), expectSubNode( PartiqlAst.build { path( @@ -699,22 +763,29 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { "d", partiqlAstCaseSensitive, partiqlAstLocalsFirst, - StaticType.ANY.toMetas() + metas(1, 8)), + StaticType.ANY.toMetas() + metas(1, 8) + ), listOf( pathExpr( lit(ion.newString("collies").toIonElement(), metas(1, 8)), - partiqlAstCaseInSensitive)), - metas(1, 8)) - }), + partiqlAstCaseInSensitive + ) + ), + metas(1, 8) + ) + } + ), setOf(StaticTypeVisitorTransformConstraints.PREVENT_GLOBALS_EXCEPT_IN_FROM) ), // DML with PREVENT_GLOBALS_EXCEPT_IN_FROM STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "FROM dogs d INSERT INTO collies VALUE 'Timmy'", - mapOf("dogs" to StaticType.BAG, - "collies" to StaticType.BAG), + mapOf( + "dogs" to StaticType.BAG, + "collies" to StaticType.BAG + ), expectSubNode( PartiqlAst.build { path( @@ -722,34 +793,43 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { "d", partiqlAstCaseSensitive, partiqlAstLocalsFirst, - StaticType.ANY.toMetas() + metas(1, 25)), + StaticType.ANY.toMetas() + metas(1, 25) + ), listOf( pathExpr( lit(ion.newString("collies").toIonElement(), metas(1, 8)), - partiqlAstCaseInSensitive)), - metas(1, 25)) - }), - setOf(StaticTypeVisitorTransformConstraints.PREVENT_GLOBALS_EXCEPT_IN_FROM)), + partiqlAstCaseInSensitive + ) + ), + metas(1, 25) + ) + } + ), + setOf(StaticTypeVisitorTransformConstraints.PREVENT_GLOBALS_EXCEPT_IN_FROM) + ), STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "INSERT INTO dogs VALUE 'Timmy'", mapOf("dogs" to StaticType.BAG), expectVariableReferences( VarExpectation("dogs", 1, 13, StaticType.BAG, partiqlAstUnqualified) ), - setOf(StaticTypeVisitorTransformConstraints.PREVENT_GLOBALS_EXCEPT_IN_FROM)), + setOf(StaticTypeVisitorTransformConstraints.PREVENT_GLOBALS_EXCEPT_IN_FROM) + ), // captures the var ranging over dogs and implicitly prefixes id and owner STRTestCase( // 1 2 3 4 5 6 7 8 - //2345678901234567890123456789012345678901234567890123456789012345678901234567890 + // 2345678901234567890123456789012345678901234567890123456789012345678901234567890 "UPDATE dogs d SET name = 'Timmy' WHERE owner = 'Margaret'", mapOf("dogs" to StaticType.BAG), expectVariableReferences( VarExpectation("dogs", 1, 8, StaticType.BAG, partiqlAstUnqualified), VarExpectation("d", 1, 19, StaticType.ANY, partiqlAstLocalsFirst), - VarExpectation("d", 1, 40, StaticType.ANY, partiqlAstLocalsFirst)), - setOf(StaticTypeVisitorTransformConstraints.PREVENT_GLOBALS_EXCEPT_IN_FROM)) + VarExpectation("d", 1, 40, StaticType.ANY, partiqlAstLocalsFirst) + ), + setOf(StaticTypeVisitorTransformConstraints.PREVENT_GLOBALS_EXCEPT_IN_FROM) + ) ) sealed class ResolveTestResult { @@ -765,8 +845,8 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { properties.forEach { (property, expectedValue) -> assertEquals( "${property.propertyName} in error doesn't match", - expectedValue, it.error.errorContext?.get(property)?.value) - + expectedValue, it.error.errorContext?.get(property)?.value + ) } } } @@ -792,13 +872,13 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { when (node) { is PartiqlAst.Expr.Id -> { val sourceLocationMeta = node.metas[SourceLocationMeta.TAG] as SourceLocationMeta? - ?: error("VariableReference '${node.name.text}' had no SourceLocationMeta") + ?: error("VariableReference '${node.name.text}' had no SourceLocationMeta") // Find a VarExpectation that matches the given VariableReference val matchingExpectation = remainingExpectations.firstOrNull { it.id == node.name.text && - it.line == sourceLocationMeta.lineNum && - it.charOffset == sourceLocationMeta.charOffset + it.line == sourceLocationMeta.lineNum && + it.charOffset == sourceLocationMeta.charOffset } ?: error("No expectation found for VariableReference ${node.name.text} at $sourceLocationMeta") remainingExpectations.remove(matchingExpectation) @@ -810,7 +890,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { ) val staticTypeMeta = node.metas[StaticTypeMeta.TAG] as StaticTypeMeta? - ?: error("VariableReference '${node.name.text}' at $sourceLocationMeta had no StaticTypeMeta") + ?: error("VariableReference '${node.name.text}' at $sourceLocationMeta had no StaticTypeMeta") assertEquals( "VariableReference ${node.name.text} at $sourceLocationMeta static type must match expectation", @@ -823,7 +903,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { } override fun walkExpr(node: PartiqlAst.Expr) { - //do not walk the name of a function call this should be a symbolic name in another namespace (AST is over generalized here) + // do not walk the name of a function call this should be a symbolic name in another namespace (AST is over generalized here) when { node is PartiqlAst.Expr.Call -> { visitExpr(node) @@ -853,15 +933,17 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { visitor.walkStatement(result.node) - if(remainingExpectations.any()) { + if (remainingExpectations.any()) { println("Unmet expectations:") remainingExpectations.forEach { println(it) } - fail("${remainingExpectations.size} variable expectations were not met.\n" + - "The first was: ${remainingExpectations.first()}\n" + - "See standard output for a complete list") + fail( + "${remainingExpectations.size} variable expectations were not met.\n" + + "The first was: ${remainingExpectations.first()}\n" + + "See standard output for a complete list" + ) } } } @@ -888,8 +970,10 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { } private fun metas(line: Long, column: Long, type: StaticType? = null): com.amazon.ionelement.api.MetaContainer = - (metaContainerOf(SourceLocationMeta(line, column)) + - (type?.let { metaContainerOf(StaticTypeMeta(it)) } ?: emptyMetaContainer())) + ( + metaContainerOf(SourceLocationMeta(line, column)) + + (type?.let { metaContainerOf(StaticTypeMeta(it)) } ?: emptyMetaContainer()) + ) private fun StaticType.toMetas(): com.amazon.ionelement.api.MetaContainer = metaContainerOf(StaticTypeMeta(this)) @@ -907,8 +991,7 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { val transformedAst = try { transformer.transformStatement(originalAst) - } - catch (e: SemanticException) { + } catch (e: SemanticException) { tc.handler(ResolveTestResult.Error(tc, e)) return } @@ -1004,11 +1087,22 @@ class StaticTypeVisitorTransformTests : VisitorTransformTestBase() { name = "foo", case = partiqlAstCaseInSensitive, qualifier = partiqlAstUnqualified - ).withMeta("staticType", StaticTypeMeta(ListType(IntType(metas = mapOf(ISL_META_KEY to listOf( - IonSchemaModel.build { - typeDefinition("bar", constraintList(typeConstraint(namedType("int", ionBool(false))))) - } - )))))) + ).withMeta( + "staticType", + StaticTypeMeta( + ListType( + IntType( + metas = mapOf( + ISL_META_KEY to listOf( + IonSchemaModel.build { + typeDefinition("bar", constraintList(typeConstraint(namedType("int", ionBool(false))))) + } + ) + ) + ) + ) + ) + ) } // note: we do not test v0 here because it's not able to store metas other than [SourceLocationMeta] diff --git a/lang/test/org/partiql/lang/eval/visitors/SubstitutionVisitorTransformTest.kt b/lang/test/org/partiql/lang/eval/visitors/SubstitutionVisitorTransformTest.kt index 7fc6ca28ca..5365de8780 100644 --- a/lang/test/org/partiql/lang/eval/visitors/SubstitutionVisitorTransformTest.kt +++ b/lang/test/org/partiql/lang/eval/visitors/SubstitutionVisitorTransformTest.kt @@ -18,8 +18,8 @@ import com.amazon.ionelement.api.emptyMetaContainer import com.amazon.ionelement.api.ionInt import org.junit.Test import org.partiql.lang.ast.SourceLocationMeta -import org.partiql.lang.domains.metaContainerOf import org.partiql.lang.domains.PartiqlAst +import org.partiql.lang.domains.metaContainerOf import kotlin.test.assertEquals import kotlin.test.assertNull @@ -70,5 +70,4 @@ class SubstitutionVisitorTransformTest { // (lit 3) should still be (lit 3) assertEquals(3, transformedExpr.toIonElement().values[1].longValue) } - } diff --git a/lang/test/org/partiql/lang/eval/visitors/VisitorTransformTestBase.kt b/lang/test/org/partiql/lang/eval/visitors/VisitorTransformTestBase.kt index c608eeb67d..9db2aefbc4 100644 --- a/lang/test/org/partiql/lang/eval/visitors/VisitorTransformTestBase.kt +++ b/lang/test/org/partiql/lang/eval/visitors/VisitorTransformTestBase.kt @@ -14,9 +14,9 @@ package org.partiql.lang.eval.visitors +import org.junit.jupiter.api.fail import org.partiql.lang.domains.PartiqlAst import org.partiql.lang.syntax.SqlParserTestBase -import org.junit.jupiter.api.fail /** Provides some basic functionality for parameterized testing implementation of [PartiqlAst.VisitorTransform]. */ abstract class VisitorTransformTestBase : SqlParserTestBase() { @@ -45,10 +45,10 @@ abstract class VisitorTransformTestBase : SqlParserTestBase() { assertEquals( "The second pass of ${transform.javaClass.name} pass should not change the AST", actualAst, - anotherActualAst) + anotherActualAst + ) } - /** * Parses [TransformTestCase.originalSql], then runs the specified transformers on the AST. * Parses [TransformTestCase.expectedSql], and asserts the transformed AST is equivalent to the expected AST. @@ -75,4 +75,4 @@ abstract class VisitorTransformTestBase : SqlParserTestBase() { fail("Expected block to not throw but it threw: $message", e) } } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/mappers/E2EMapperTests.kt b/lang/test/org/partiql/lang/mappers/E2EMapperTests.kt index 83484d3e55..9f596d797a 100644 --- a/lang/test/org/partiql/lang/mappers/E2EMapperTests.kt +++ b/lang/test/org/partiql/lang/mappers/E2EMapperTests.kt @@ -3,14 +3,14 @@ package org.partiql.lang.mappers import com.amazon.ionelement.api.ionBool import com.amazon.ionelement.api.ionInt import com.amazon.ionelement.api.loadAllElements -import org.partiql.ionschema.model.IonSchemaModel -import org.partiql.ionschema.model.toIsl -import org.partiql.ionschema.parser.parseSchema import org.junit.Test import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Assertions.assertThrows import org.junit.jupiter.params.ParameterizedTest import org.junit.jupiter.params.provider.MethodSource +import org.partiql.ionschema.model.IonSchemaModel +import org.partiql.ionschema.model.toIsl +import org.partiql.ionschema.parser.parseSchema import org.partiql.lang.types.AnyOfType import org.partiql.lang.types.AnyType import org.partiql.lang.types.BagType @@ -34,7 +34,7 @@ internal fun buildTypeDef(name: String? = null, vararg constraints: IonSchemaMod IonSchemaModel.build { typeDefinition(name, constraintList(constraints.toList())) } internal fun buildTypeConstraint(name: String, nullable: Boolean = false) = - IonSchemaModel.build { typeConstraint(namedType(name, ionBool(nullable)))} + IonSchemaModel.build { typeConstraint(namedType(name, ionBool(nullable))) } private const val islHeader = "schema_header::{imports: [{ id: \"partiql.isl\" }]}" private const val islFooter = "schema_footer::{}" @@ -50,70 +50,76 @@ internal class MapperE2ETestCase( } internal class E2EMapperTests { - + @ParameterizedTest @MethodSource("parametersForE2ETests") fun tests(tc: MapperE2ETestCase) { val sourceIsl = tc.sourceIsl val staticType = tc.expectedStaticType val expectedIsl = tc.expectedIsl - + verifyAssertions(sourceIsl, staticType) verifyAssertions(staticType, expectedIsl) } - + private fun verifyAssertions(sourceIsl: String, expectedType: StaticType) { // Create ISL domain model from raw isl val schema = parseSchema(loadAllElements(sourceIsl).toList()) - + // Convert to StaticType val actualType = StaticTypeMapper(schema).toStaticType(typeName) - + // Create expected type with metas - if the test already provides metas, use them val expectedTypeWithMetas = if (expectedType.metas.containsKey(ISL_META_KEY)) { expectedType } else { expectedType.withMetas( - mapOf(ISL_META_KEY to schema.statements - .filterIsInstance() - .map { it.typeDef }) + mapOf( + ISL_META_KEY to schema.statements + .filterIsInstance() + .map { it.typeDef } + ) ) } - + // Assert StaticType is as expected // Throwing AssertionError in order to print a more readable, multi-line message // instead of a single-line message that [assertEquals] displays if (expectedTypeWithMetas != actualType) { - throw AssertionError(""" + throw AssertionError( + """ StaticType must match the expected. Expected: $expectedTypeWithMetas Actual: $actualType - """.trimIndent()) + """.trimIndent() + ) } } - + private fun verifyAssertions(staticType: StaticType, expectedIsl: String) { // Create ISL domain model from input ISL val expectedSchema = parseSchema(loadAllElements(islHeader + expectedIsl + islFooter).toList()) - + // Map StaticType to ISL domain model val actualSchema = IonSchemaMapper(staticType).toIonSchema(typeName) - + // Ensure domain model is as expected. This assertion checks for semantic equivalence. // Throwing AssertionError in order to print a more readable, multi-line message // instead of a single-line message that [assertEquals] displays if (expectedSchema != actualSchema) { - throw AssertionError(""" + throw AssertionError( + """ Parsed object model must match the expected. Expected ISL: ${expectedSchema.toIsl()} Actual ISL: ${actualSchema.toIsl()} Expected schema: $expectedSchema Actual schema: $actualSchema - """.trimIndent()) + """.trimIndent() + ) } } - + companion object { @JvmStatic fun parametersForE2ETests() = basicSingleTypeTests() + @@ -128,27 +134,31 @@ internal class E2EMapperTests { bagWithCustomElementTests() + structWithCustomFieldTests() } - + @Test fun `field of MissingType should be excluded from ISL`() { verifyAssertions( - staticType = StructType(mapOf( - "a" to StaticType.MISSING - )), + staticType = StructType( + mapOf( + "a" to StaticType.MISSING + ) + ), expectedIsl = "type::{ name: $typeName, type: struct, fields: {} }" ) } - + @Test fun `field of AnyType should return field as optional and nullable in ISL`() { verifyAssertions( - staticType = StructType(mapOf( - "a" to StaticType.ANY - )), + staticType = StructType( + mapOf( + "a" to StaticType.ANY + ) + ), expectedIsl = "type::{ name: $typeName, type: struct, fields: { a: nullable::any } }" ) } - + @Test fun `verify ISL can be created without StaticType metas too`() { verifyAssertions( @@ -156,21 +166,21 @@ internal class E2EMapperTests { expectedIsl = "type::{ name: $typeName, type: list, element: string }" ) } - + @Test fun `type to be mapped does not exist in schema`() { val isl = "type::{ name: $typeName, type: string }" val schema = parseSchema(loadAllElements(isl).toList()) assertTypeNotFoundException(schema, unavailableType) } - + @Test fun `referenced top level type does not exist in schema`() { val isl = "type::{ name: $typeName, type: $unavailableType }" val schema = parseSchema(loadAllElements(isl).toList()) assertTypeNotFoundException(schema, typeName) } - + private fun assertTypeNotFoundException(schema: IonSchemaModel.Schema, typeName: String) { val exception = assertThrows(TypeNotFoundException::class.java) { StaticTypeMapper(schema).toStaticType(typeName) @@ -281,25 +291,39 @@ internal fun basicSingleTypeTests() = listOf( MapperE2ETestCase( "type::{ name: $typeName, type: nullable::{ type: string }}", StaticType.unionOf( - StringType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef(null, buildTypeConstraint("string")) - ))), + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef(null, buildTypeConstraint("string")) + ) + ) + ), StaticType.NULL, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef(typeName, IonSchemaModel.build { typeConstraint( - inlineType(buildTypeDef(null, buildTypeConstraint("string")), ionBool(true)) + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + typeName, + IonSchemaModel.build { + typeConstraint( + inlineType(buildTypeDef(null, buildTypeConstraint("string")), ionBool(true)) + ) + } + ) ) - }) - )) + ) ), "type::{ name: $typeName, type: nullable::string }" ), // symbol type with codepoint_length constraint MapperE2ETestCase( "type::{ name: $typeName, type: symbol, codepoint_length: 5 }", - SymbolType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef(typeName, buildTypeConstraint("symbol"), IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }) - ))) + SymbolType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef(typeName, buildTypeConstraint("symbol"), IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }) + ) + ) + ) ), // clob MapperE2ETestCase( @@ -408,9 +432,14 @@ internal fun basicSingleTypeTests() = listOf( type::{ name: $typeName, type: string } type::{ name: $typeName, type: list, element: string } """, - ListType(StaticType.STRING, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef(typeName, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("string", ionBool(false))) }) - ))), + ListType( + StaticType.STRING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef(typeName, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("string", ionBool(false))) }) + ) + ) + ), "type::{ name: $typeName, type: list, element: string }" ), // recursive type @@ -420,25 +449,30 @@ internal fun basicSingleTypeTests() = listOf( """, StaticType.unionOf( StaticType.STRING, - ListType(StaticType.ANY, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - typeName, - IonSchemaModel.build { - anyOf( - namedType("string", ionBool(false)), - inlineType( - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType(typeName, ionBool(false))) } - ), - ionBool(false) - ) - ) - } - ), - buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType(typeName, ionBool(false))) }) - ))) + ListType( + StaticType.ANY, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + typeName, + IonSchemaModel.build { + anyOf( + namedType("string", ionBool(false)), + inlineType( + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType(typeName, ionBool(false))) } + ), + ionBool(false) + ) + ) + } + ), + buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType(typeName, ionBool(false))) }) + ) + ) + ) ) ) ) @@ -447,79 +481,110 @@ internal fun basicAnyOfTests() = listOf( // named and inline types MapperE2ETestCase( "type::{ name: $typeName, any_of: [int, string, {type: list, element: string}] }", - AnyOfType(setOf( - StaticType.INT, - StaticType.STRING, - ListType(StaticType.STRING, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("string", ionBool(false))) } + AnyOfType( + setOf( + StaticType.INT, + StaticType.STRING, + ListType( + StaticType.STRING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("string", ionBool(false))) } + ) + ) + ) ) - ))) - )) + ) + ) ), // named and inline type with nested any_of constraint MapperE2ETestCase( "type::{ name: $typeName, any_of: [int, string, {type: list, element: { any_of: [int, string] }}] }", - AnyOfType(setOf( - StaticType.INT, - StaticType.STRING, - ListType( - AnyOfType(setOf(StaticType.INT, StaticType.STRING), metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - IonSchemaModel.build { anyOf(namedType("int", ionBool(false)), namedType("string", ionBool(false))) } - ) - ))), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(inlineType( + AnyOfType( + setOf( + StaticType.INT, + StaticType.STRING, + ListType( + AnyOfType( + setOf(StaticType.INT, StaticType.STRING), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + IonSchemaModel.build { anyOf(namedType("int", ionBool(false)), namedType("string", ionBool(false))) } + ) + ) + ) + ), + metas = mapOf( + ISL_META_KEY to listOf( buildTypeDef( null, - anyOf(namedType("int", ionBool(false)), namedType("string", ionBool(false))) - ), - ionBool(false) - ))} + buildTypeConstraint("list"), + IonSchemaModel.build { + element( + inlineType( + buildTypeDef( + null, + anyOf(namedType("int", ionBool(false)), namedType("string", ionBool(false))) + ), + ionBool(false) + ) + ) + } + ) + ) ) - )) + ) ) - )) + ) ), // nullable, named and inline type MapperE2ETestCase( "type::{ name: $typeName, any_of: [nullable::int, nullable::{type: list, element: string}] }", - AnyOfType(setOf( - StaticType.unionOf(StaticType.INT, StaticType.NULL), - StaticType.unionOf( - ListType(StaticType.STRING, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("string", ionBool(false))) } - ) - ))), - StaticType.NULL, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("string", ionBool(false))) } + AnyOfType( + setOf( + StaticType.unionOf(StaticType.INT, StaticType.NULL), + StaticType.unionOf( + ListType( + StaticType.STRING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("string", ionBool(false))) } + ) + ) + ) + ), + StaticType.NULL, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("string", ionBool(false))) } + ) + ) ) - )) + ) ) - )) + ) ), // named types (zero nullable types) MapperE2ETestCase( "type::{ name: $typeName, any_of: [int, string, float] }", - AnyOfType(setOf( - StaticType.INT, - StaticType.STRING, - StaticType.FLOAT - )) + AnyOfType( + setOf( + StaticType.INT, + StaticType.STRING, + StaticType.FLOAT + ) + ) ), // some nullable types // This tests that if a single type within a union type is nullable, then all types are essentially nullable @@ -527,19 +592,23 @@ internal fun basicAnyOfTests() = listOf( // This is based on the understanding that, for all practical purposes, null.string is equivalent to null, hence null.string is an acceptable value MapperE2ETestCase( "type::{ name: $typeName, any_of: [nullable::int, string] }", - AnyOfType(setOf( - StaticType.unionOf(StaticType.NULL, StaticType.INT), - StaticType.STRING - )), + AnyOfType( + setOf( + StaticType.unionOf(StaticType.NULL, StaticType.INT), + StaticType.STRING + ) + ), "type::{ name: $typeName, any_of: [nullable::int, nullable::string] }" ), // all nullable types MapperE2ETestCase( "type::{ name: $typeName, any_of: [nullable::int, nullable::string] }", - AnyOfType(setOf( - StaticType.unionOf(StaticType.NULL, StaticType.INT), - StaticType.unionOf(StaticType.NULL, StaticType.STRING) - )) + AnyOfType( + setOf( + StaticType.unionOf(StaticType.NULL, StaticType.INT), + StaticType.unionOf(StaticType.NULL, StaticType.STRING) + ) + ) ), // mix of nullable, named and inline types MapperE2ETestCase( @@ -548,12 +617,19 @@ internal fun basicAnyOfTests() = listOf( StaticType.unionOf(StaticType.NULL, StaticType.INT), StaticType.STRING, FloatType(metas = mapOf(ISL_META_KEY to listOf(buildTypeDef(null, buildTypeConstraint("float"))))), - ListType(StaticType.STRING, metas = mapOf(ISL_META_KEY to - listOf(buildTypeDef(null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("string", ionBool(false))) } - )) - )) + ListType( + StaticType.STRING, + metas = mapOf( + ISL_META_KEY to + listOf( + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("string", ionBool(false))) } + ) + ) + ) + ) ), "type::{ name: $typeName, any_of:[nullable::int, nullable::string, nullable::float, nullable::{type: list, element: string}] }" ), @@ -592,19 +668,25 @@ internal fun listTests() = listOf( "type::{ name: $typeName, type: list, element: nullable::{type: string}}", ListType( StaticType.unionOf( - StringType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("string") + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string") + ) + ) ) - ))), + ), StaticType.NULL, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("string") + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string") + ) ) - )) + ) ) ), "type::{ name: $typeName, type: list, element: nullable::string }" @@ -612,11 +694,13 @@ internal fun listTests() = listOf( // Same as above, just another way of expressing input in ISL MapperE2ETestCase( "type::{ name: $typeName, type: list, element: { type: nullable::int } }", - ListType(StaticType.unionOf( - StaticType.NULL, - StaticType.INT, - metas = mapOf(ISL_META_KEY to listOf(buildTypeDef(null, buildTypeConstraint("int", true)))) - )), + ListType( + StaticType.unionOf( + StaticType.NULL, + StaticType.INT, + metas = mapOf(ISL_META_KEY to listOf(buildTypeDef(null, buildTypeConstraint("int", true)))) + ) + ), "type::{ name: $typeName, type: list, element: nullable::int }" ), // element that has a constraint @@ -625,11 +709,15 @@ internal fun listTests() = listOf( ListType( StringType( StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(5)), - metas = mapOf(ISL_META_KEY to listOf(buildTypeDef( - null, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) } - ))) + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) } + ) + ) + ) ) ) ), @@ -640,20 +728,38 @@ internal fun listTests() = listOf( StaticType.unionOf( StringType( StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(5)), - metas = mapOf(ISL_META_KEY to listOf(buildTypeDef( - null, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) } - ))) + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) } + ) + ) + ) ), StaticType.NULL, - metas = mapOf(ISL_META_KEY to listOf(buildTypeDef( - null, - IonSchemaModel.build { typeConstraint(inlineType(typeDefinition(null, constraintList( - typeConstraint(namedType("string", ionBool(false))), - codepointLength(equalsNumber(ionInt(5))) - )), ionBool(true))) } - ))) + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + IonSchemaModel.build { + typeConstraint( + inlineType( + typeDefinition( + null, + constraintList( + typeConstraint(namedType("string", ionBool(false))), + codepointLength(equalsNumber(ionInt(5))) + ) + ), + ionBool(true) + ) + ) + } + ) + ) + ) ) ), "type::{ name: $typeName, type: list, element: nullable::{type: string, codepoint_length:5}}" @@ -664,13 +770,15 @@ internal fun listTests() = listOf( ListType( StringType( StringType.StringLengthConstraint.Unconstrained, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))))) } + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))))) } + ) ) - )) + ) ) ) ), @@ -680,14 +788,16 @@ internal fun listTests() = listOf( ListType( DecimalType( DecimalType.PrecisionScaleConstraint.Unconstrained, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("decimal"), - IonSchemaModel.build { precision(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(47))))) }, - IonSchemaModel.build { scale(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(37))))) } + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("decimal"), + IonSchemaModel.build { precision(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(47))))) }, + IonSchemaModel.build { scale(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(37))))) } + ) ) - )) + ) ) ) ), @@ -696,57 +806,89 @@ internal fun listTests() = listOf( ListType( IntType( IntType.IntRangeConstraint.INT4, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("int"), - IonSchemaModel.build { - validValues(rangeOfValidValues(numRange(numberRange( - inclusive(ionInt(Int.MIN_VALUE.toLong())), - inclusive(ionInt(Int.MAX_VALUE.toLong())) - )))) - } - ) - )) - ) - ) - ), - MapperE2ETestCase( - "type::{ name: $typeName, type: list, element: {type: nullable::{type: int, valid_values: range::[${Int.MIN_VALUE}, ${Int.MAX_VALUE}]}}}", + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("int"), + IonSchemaModel.build { + validValues( + rangeOfValidValues( + numRange( + numberRange( + inclusive(ionInt(Int.MIN_VALUE.toLong())), + inclusive(ionInt(Int.MAX_VALUE.toLong())) + ) + ) + ) + ) + } + ) + ) + ) + ) + ) + ), + MapperE2ETestCase( + "type::{ name: $typeName, type: list, element: {type: nullable::{type: int, valid_values: range::[${Int.MIN_VALUE}, ${Int.MAX_VALUE}]}}}", ListType( StaticType.unionOf( IntType( IntType.IntRangeConstraint.INT4, - metas = mapOf(ISL_META_KEY to listOf( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("int"), + IonSchemaModel.build { + validValues( + rangeOfValidValues( + numRange( + numberRange( + inclusive(ionInt(Int.MIN_VALUE.toLong())), + inclusive(ionInt(Int.MAX_VALUE.toLong())) + ) + ) + ) + ) + } + ) + ) + ) + ), + StaticType.NULL, + metas = mapOf( + ISL_META_KEY to listOf( buildTypeDef( null, - buildTypeConstraint("int"), IonSchemaModel.build { - validValues(rangeOfValidValues(numRange(numberRange( - inclusive(ionInt(Int.MIN_VALUE.toLong())), - inclusive(ionInt(Int.MAX_VALUE.toLong())) - )))) + typeConstraint( + inlineType( + typeDefinition( + null, + constraintList( + typeConstraint(namedType("int", ionBool(false))), + IonSchemaModel.build { + validValues( + rangeOfValidValues( + numRange( + numberRange( + inclusive(ionInt(Int.MIN_VALUE.toLong())), + inclusive(ionInt(Int.MAX_VALUE.toLong())) + ) + ) + ) + ) + } + ) + ), + ionBool(true) + ) + ) } ) - )) - ), - StaticType.NULL, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - IonSchemaModel.build { - typeConstraint(inlineType(typeDefinition(null, constraintList( - typeConstraint(namedType("int", ionBool(false))), - IonSchemaModel.build { - validValues(rangeOfValidValues(numRange(numberRange( - inclusive(ionInt(Int.MIN_VALUE.toLong())), - inclusive(ionInt(Int.MAX_VALUE.toLong())) - )))) - })), ionBool(true) - )) - } ) - )) + ) ) ), "type::{ name: $typeName, type: list, element: nullable::{type: int, valid_values: range::[${Int.MIN_VALUE}, ${Int.MAX_VALUE}]}}" @@ -757,9 +899,15 @@ internal fun listTests() = listOf( type::{ name: bar, type: string } type::{ name: $typeName, type: list, element: bar } """, - ListType(StringType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string")) - )))) + ListType( + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("string")) + ) + ) + ) + ) ), // element as inline top-level type MapperE2ETestCase( @@ -767,9 +915,15 @@ internal fun listTests() = listOf( type::{ name: bar, type: string } type::{ name: $typeName, type: list, element: { type: bar } } """, - ListType(StringType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string")), - buildTypeDef(null, buildTypeConstraint("bar"))))) + ListType( + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("string")), + buildTypeDef(null, buildTypeConstraint("bar")) + ) + ) + ) ), """ type::{ name: bar, type: string } @@ -782,15 +936,23 @@ internal fun listTests() = listOf( type::{ name: bar, type: string } type::{ name: $typeName, type: list, element: nullable::bar } """, - ListType(StaticType.unionOf( - StaticType.NULL, - StringType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string")) - ))), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string")) - )) - )) + ListType( + StaticType.unionOf( + StaticType.NULL, + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("string")) + ) + ) + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("string")) + ) + ) + ) + ) ), // Same as above, with more constraints on the custom type MapperE2ETestCase( @@ -798,38 +960,52 @@ internal fun listTests() = listOf( type::{ name: bar, type: string, codepoint_length: 5, utf8_byte_length: 5 } type::{ name: $typeName, type: list, element: nullable::bar } """, - ListType(StaticType.unionOf( - StaticType.NULL, - StringType( - StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(5)), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - "bar", - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, - IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } + ListType( + StaticType.unionOf( + StaticType.NULL, + StringType( + StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(5)), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, + IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } + ) + ) ) - ))), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - "bar", - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, - IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } - ) - )) - )) + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, + IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } + ) + ) + ) + ) + ) ), // element as collection type MapperE2ETestCase( "type::{ name: $typeName, type: list, element: { type: list, element: int } }", - ListType(ListType(StaticType.INT, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("int", ionBool(false))) } + ListType( + ListType( + StaticType.INT, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("int", ionBool(false))) } + ) + ) + ) ) - )))) + ) ), // element as collection type with nullable element MapperE2ETestCase( @@ -837,27 +1013,34 @@ internal fun listTests() = listOf( ListType( ListType( StaticType.unionOf(StaticType.STRING, StaticType.NULL), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("string", ionBool(true))) } + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("string", ionBool(true))) } + ) ) - )) + ) ) ) ), // list with other constraints MapperE2ETestCase( "type::{ name: $typeName, type: list, element: int, contains: [1, 5] }", - ListType(StaticType.INT, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - typeName, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("int", ionBool(false))) }, - IonSchemaModel.build { contains(listOf(ionInt(1), ionInt(5))) } + ListType( + StaticType.INT, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + typeName, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("int", ionBool(false))) }, + IonSchemaModel.build { contains(listOf(ionInt(1), ionInt(5))) } + ) + ) ) - ))) + ) ), // element as 'any' MapperE2ETestCase( @@ -868,12 +1051,25 @@ internal fun listTests() = listOf( // element as any_of MapperE2ETestCase( "type::{ name: $typeName, type: list, element: { any_of: [int, string] } }", - ListType(AnyOfType(setOf(StaticType.INT, StaticType.STRING), metas = mapOf(ISL_META_KEY to - listOf(buildTypeDef(null, IonSchemaModel.build { anyOf( - namedType("int", ionBool(false)), - namedType("string", ionBool(false)) - )})) - ))) + ListType( + AnyOfType( + setOf(StaticType.INT, StaticType.STRING), + metas = mapOf( + ISL_META_KEY to + listOf( + buildTypeDef( + null, + IonSchemaModel.build { + anyOf( + namedType("int", ionBool(false)), + namedType("string", ionBool(false)) + ) + } + ) + ) + ) + ) + ) ), // element as any_of with custom type MapperE2ETestCase( @@ -881,17 +1077,28 @@ internal fun listTests() = listOf( type::{ name: bar, type: int } type::{ name: $typeName, type: list, element: { any_of: [bar, string] } } """, - ListType(AnyOfType(setOf( - IntType(metas = mapOf(ISL_META_KEY to listOf(buildTypeDef("bar", buildTypeConstraint("int"))))), - StaticType.STRING), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("int")), - buildTypeDef(null, IonSchemaModel.build { anyOf( - namedType("bar", ionBool(false)), - namedType("string", ionBool(false)) - )}) - ) - ))) + ListType( + AnyOfType( + setOf( + IntType(metas = mapOf(ISL_META_KEY to listOf(buildTypeDef("bar", buildTypeConstraint("int"))))), + StaticType.STRING + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("int")), + buildTypeDef( + null, + IonSchemaModel.build { + anyOf( + namedType("bar", ionBool(false)), + namedType("string", ionBool(false)) + ) + } + ) + ) + ) + ) + ) ) ) @@ -917,19 +1124,25 @@ internal fun sexpTests() = listOf( "type::{ name: $typeName, type: sexp, element: nullable::{type: string}}", SexpType( StaticType.unionOf( - StringType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("string") + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string") + ) + ) ) - ))), + ), StaticType.NULL, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("string") + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string") + ) ) - )) + ) ) ), "type::{ name: $typeName, type: sexp, element: nullable::string }" @@ -937,11 +1150,13 @@ internal fun sexpTests() = listOf( // Same as above, just another way of expressing input in ISL MapperE2ETestCase( "type::{ name: $typeName, type: sexp, element: { type: nullable::int } }", - SexpType(StaticType.unionOf( - StaticType.NULL, - StaticType.INT, - metas = mapOf(ISL_META_KEY to listOf(buildTypeDef(null, buildTypeConstraint("int", true)))) - )), + SexpType( + StaticType.unionOf( + StaticType.NULL, + StaticType.INT, + metas = mapOf(ISL_META_KEY to listOf(buildTypeDef(null, buildTypeConstraint("int", true)))) + ) + ), "type::{ name: $typeName, type: sexp, element: nullable::int }" ), // element that has a constraint @@ -950,11 +1165,15 @@ internal fun sexpTests() = listOf( SexpType( StringType( StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(5)), - metas = mapOf(ISL_META_KEY to listOf(buildTypeDef( - null, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) } - ))) + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) } + ) + ) + ) ) ) ), @@ -965,20 +1184,38 @@ internal fun sexpTests() = listOf( StaticType.unionOf( StringType( StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(5)), - metas = mapOf(ISL_META_KEY to listOf(buildTypeDef( - null, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) } - ))) + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) } + ) + ) + ) ), StaticType.NULL, - metas = mapOf(ISL_META_KEY to listOf(buildTypeDef( - null, - IonSchemaModel.build { typeConstraint(inlineType(typeDefinition(null, constraintList( - typeConstraint(namedType("string", ionBool(false))), - codepointLength(equalsNumber(ionInt(5))) - )), ionBool(true))) } - ))) + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + IonSchemaModel.build { + typeConstraint( + inlineType( + typeDefinition( + null, + constraintList( + typeConstraint(namedType("string", ionBool(false))), + codepointLength(equalsNumber(ionInt(5))) + ) + ), + ionBool(true) + ) + ) + } + ) + ) + ) ) ), "type::{ name: $typeName, type: sexp, element: nullable::{type: string, codepoint_length:5}}" @@ -989,13 +1226,15 @@ internal fun sexpTests() = listOf( SexpType( StringType( StringType.StringLengthConstraint.Unconstrained, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))))) } + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))))) } + ) ) - )) + ) ) ) ), @@ -1005,14 +1244,16 @@ internal fun sexpTests() = listOf( SexpType( DecimalType( DecimalType.PrecisionScaleConstraint.Unconstrained, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("decimal"), - IonSchemaModel.build { precision(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(47))))) }, - IonSchemaModel.build { scale(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(37))))) } + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("decimal"), + IonSchemaModel.build { precision(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(47))))) }, + IonSchemaModel.build { scale(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(37))))) } + ) ) - )) + ) ) ) ), @@ -1021,18 +1262,26 @@ internal fun sexpTests() = listOf( SexpType( IntType( IntType.IntRangeConstraint.INT4, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("int"), - IonSchemaModel.build { - validValues(rangeOfValidValues(numRange(numberRange( - inclusive(ionInt(Int.MIN_VALUE.toLong())), - inclusive(ionInt(Int.MAX_VALUE.toLong())) - )))) - } + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("int"), + IonSchemaModel.build { + validValues( + rangeOfValidValues( + numRange( + numberRange( + inclusive(ionInt(Int.MIN_VALUE.toLong())), + inclusive(ionInt(Int.MAX_VALUE.toLong())) + ) + ) + ) + ) + } + ) ) - )) + ) ) ) ), @@ -1042,36 +1291,60 @@ internal fun sexpTests() = listOf( StaticType.unionOf( IntType( IntType.IntRangeConstraint.INT4, - metas = mapOf(ISL_META_KEY to listOf( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("int"), + IonSchemaModel.build { + validValues( + rangeOfValidValues( + numRange( + numberRange( + inclusive(ionInt(Int.MIN_VALUE.toLong())), + inclusive(ionInt(Int.MAX_VALUE.toLong())) + ) + ) + ) + ) + } + ) + ) + ) + ), + StaticType.NULL, + metas = mapOf( + ISL_META_KEY to listOf( buildTypeDef( null, - buildTypeConstraint("int"), IonSchemaModel.build { - validValues(rangeOfValidValues(numRange(numberRange( - inclusive(ionInt(Int.MIN_VALUE.toLong())), - inclusive(ionInt(Int.MAX_VALUE.toLong())) - )))) + typeConstraint( + inlineType( + typeDefinition( + null, + constraintList( + typeConstraint(namedType("int", ionBool(false))), + IonSchemaModel.build { + validValues( + rangeOfValidValues( + numRange( + numberRange( + inclusive(ionInt(Int.MIN_VALUE.toLong())), + inclusive(ionInt(Int.MAX_VALUE.toLong())) + ) + ) + ) + ) + } + ) + ), + ionBool(true) + ) + ) } ) - )) - ), - StaticType.NULL, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - IonSchemaModel.build { - typeConstraint(inlineType(typeDefinition(null, constraintList( - typeConstraint(namedType("int", ionBool(false))), - IonSchemaModel.build { - validValues(rangeOfValidValues(numRange(numberRange( - inclusive(ionInt(Int.MIN_VALUE.toLong())), - inclusive(ionInt(Int.MAX_VALUE.toLong())) - )))) - })), ionBool(true) - )) - } ) - )) + ) ) ), "type::{ name: $typeName, type: sexp, element: nullable::{type: int, valid_values: range::[${Int.MIN_VALUE}, ${Int.MAX_VALUE}]}}" @@ -1082,19 +1355,31 @@ internal fun sexpTests() = listOf( type::{ name: bar, type: string } type::{ name: $typeName, type: sexp, element: bar } """, - SexpType(StringType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string")) - )))) - ), - // element as inline top-level type - MapperE2ETestCase( - """ - type::{ name: bar, type: string } + SexpType( + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("string")) + ) + ) + ) + ) + ), + // element as inline top-level type + MapperE2ETestCase( + """ + type::{ name: bar, type: string } type::{ name: $typeName, type: sexp, element: { type: bar } } """, - SexpType(StringType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string")), - buildTypeDef(null, buildTypeConstraint("bar"))))) + SexpType( + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("string")), + buildTypeDef(null, buildTypeConstraint("bar")) + ) + ) + ) ), """ type::{ name: bar, type: string } @@ -1107,15 +1392,23 @@ internal fun sexpTests() = listOf( type::{ name: bar, type: string } type::{ name: $typeName, type: sexp, element: nullable::bar } """, - SexpType(StaticType.unionOf( - StaticType.NULL, - StringType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string")) - ))), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string")) - )) - )) + SexpType( + StaticType.unionOf( + StaticType.NULL, + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("string")) + ) + ) + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("string")) + ) + ) + ) + ) ), // Same as above, with more constraints on the custom type MapperE2ETestCase( @@ -1123,38 +1416,52 @@ internal fun sexpTests() = listOf( type::{ name: bar, type: string, codepoint_length: 5, utf8_byte_length: 5 } type::{ name: $typeName, type: sexp, element: nullable::bar } """, - SexpType(StaticType.unionOf( - StaticType.NULL, - StringType( - StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(5)), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - "bar", - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, - IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } + SexpType( + StaticType.unionOf( + StaticType.NULL, + StringType( + StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(5)), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, + IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } + ) + ) ) - ))), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - "bar", - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, - IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } - ) - )) - )) + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, + IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } + ) + ) + ) + ) + ) ), // element as collection type MapperE2ETestCase( "type::{ name: $typeName, type: sexp, element: { type: list, element: int } }", - SexpType(ListType(StaticType.INT, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("int", ionBool(false))) } + SexpType( + ListType( + StaticType.INT, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("int", ionBool(false))) } + ) + ) + ) ) - )))) + ) ), // element as collection type with nullable element MapperE2ETestCase( @@ -1162,27 +1469,34 @@ internal fun sexpTests() = listOf( SexpType( ListType( StaticType.unionOf(StaticType.STRING, StaticType.NULL), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("string", ionBool(true))) } + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("string", ionBool(true))) } + ) ) - )) + ) ) ) ), // sexp with other constraints MapperE2ETestCase( "type::{ name: $typeName, type: sexp, element: int, contains: [1, 5] }", - SexpType(StaticType.INT, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - typeName, - buildTypeConstraint("sexp"), - IonSchemaModel.build { element(namedType("int", ionBool(false))) }, - IonSchemaModel.build { contains(listOf(ionInt(1), ionInt(5))) } + SexpType( + StaticType.INT, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + typeName, + buildTypeConstraint("sexp"), + IonSchemaModel.build { element(namedType("int", ionBool(false))) }, + IonSchemaModel.build { contains(listOf(ionInt(1), ionInt(5))) } + ) + ) ) - ))) + ) ), // element as 'any' MapperE2ETestCase( @@ -1193,12 +1507,25 @@ internal fun sexpTests() = listOf( // element as any_of MapperE2ETestCase( "type::{ name: $typeName, type: sexp, element: { any_of: [int, string] } }", - SexpType(AnyOfType(setOf(StaticType.INT, StaticType.STRING), metas = mapOf(ISL_META_KEY to - listOf(buildTypeDef(null, IonSchemaModel.build { anyOf( - namedType("int", ionBool(false)), - namedType("string", ionBool(false)) - )})) - ))) + SexpType( + AnyOfType( + setOf(StaticType.INT, StaticType.STRING), + metas = mapOf( + ISL_META_KEY to + listOf( + buildTypeDef( + null, + IonSchemaModel.build { + anyOf( + namedType("int", ionBool(false)), + namedType("string", ionBool(false)) + ) + } + ) + ) + ) + ) + ) ), // element as any_of with custom type MapperE2ETestCase( @@ -1206,17 +1533,28 @@ internal fun sexpTests() = listOf( type::{ name: bar, type: int } type::{ name: $typeName, type: sexp, element: { any_of: [bar, string] } } """, - SexpType(AnyOfType(setOf( - IntType(metas = mapOf(ISL_META_KEY to listOf(buildTypeDef("bar", buildTypeConstraint("int"))))), - StaticType.STRING), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("int")), - buildTypeDef(null, IonSchemaModel.build { anyOf( - namedType("bar", ionBool(false)), - namedType("string", ionBool(false)) - )}) - ) - ))) + SexpType( + AnyOfType( + setOf( + IntType(metas = mapOf(ISL_META_KEY to listOf(buildTypeDef("bar", buildTypeConstraint("int"))))), + StaticType.STRING + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("int")), + buildTypeDef( + null, + IonSchemaModel.build { + anyOf( + namedType("bar", ionBool(false)), + namedType("string", ionBool(false)) + ) + } + ) + ) + ) + ) + ) ) ) @@ -1242,19 +1580,25 @@ internal fun bagTests() = listOf( "type::{ name: $typeName, type: bag, element: nullable::{type: string}}", BagType( StaticType.unionOf( - StringType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("string") + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string") + ) + ) ) - ))), + ), StaticType.NULL, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("string") + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string") + ) ) - )) + ) ) ), "type::{ name: $typeName, type: bag, element: nullable::string }" @@ -1262,11 +1606,13 @@ internal fun bagTests() = listOf( // Same as above, just another way of expressing input in ISL MapperE2ETestCase( "type::{ name: $typeName, type: bag, element: { type: nullable::int } }", - BagType(StaticType.unionOf( - StaticType.NULL, - StaticType.INT, - metas = mapOf(ISL_META_KEY to listOf(buildTypeDef(null, buildTypeConstraint("int", true)))) - )), + BagType( + StaticType.unionOf( + StaticType.NULL, + StaticType.INT, + metas = mapOf(ISL_META_KEY to listOf(buildTypeDef(null, buildTypeConstraint("int", true)))) + ) + ), "type::{ name: $typeName, type: bag, element: nullable::int }" ), // element that has a constraint @@ -1275,11 +1621,15 @@ internal fun bagTests() = listOf( BagType( StringType( StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(5)), - metas = mapOf(ISL_META_KEY to listOf(buildTypeDef( - null, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) } - ))) + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) } + ) + ) + ) ) ) ), @@ -1290,20 +1640,38 @@ internal fun bagTests() = listOf( StaticType.unionOf( StringType( StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(5)), - metas = mapOf(ISL_META_KEY to listOf(buildTypeDef( - null, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) } - ))) + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) } + ) + ) + ) ), StaticType.NULL, - metas = mapOf(ISL_META_KEY to listOf(buildTypeDef( - null, - IonSchemaModel.build { typeConstraint(inlineType(typeDefinition(null, constraintList( - typeConstraint(namedType("string", ionBool(false))), - codepointLength(equalsNumber(ionInt(5))) - )), ionBool(true))) } - ))) + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + IonSchemaModel.build { + typeConstraint( + inlineType( + typeDefinition( + null, + constraintList( + typeConstraint(namedType("string", ionBool(false))), + codepointLength(equalsNumber(ionInt(5))) + ) + ), + ionBool(true) + ) + ) + } + ) + ) + ) ) ), "type::{ name: $typeName, type: bag, element: nullable::{type: string, codepoint_length:5}}" @@ -1314,13 +1682,15 @@ internal fun bagTests() = listOf( BagType( StringType( StringType.StringLengthConstraint.Unconstrained, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))))) } + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))))) } + ) ) - )) + ) ) ) ), @@ -1330,14 +1700,16 @@ internal fun bagTests() = listOf( BagType( DecimalType( DecimalType.PrecisionScaleConstraint.Unconstrained, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("decimal"), - IonSchemaModel.build { precision(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(47))))) }, - IonSchemaModel.build { scale(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(37))))) } + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("decimal"), + IonSchemaModel.build { precision(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(47))))) }, + IonSchemaModel.build { scale(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(37))))) } + ) ) - )) + ) ) ) ), @@ -1346,18 +1718,26 @@ internal fun bagTests() = listOf( BagType( IntType( IntType.IntRangeConstraint.INT4, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("int"), - IonSchemaModel.build { - validValues(rangeOfValidValues(numRange(numberRange( - inclusive(ionInt(Int.MIN_VALUE.toLong())), - inclusive(ionInt(Int.MAX_VALUE.toLong())) - )))) - } + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("int"), + IonSchemaModel.build { + validValues( + rangeOfValidValues( + numRange( + numberRange( + inclusive(ionInt(Int.MIN_VALUE.toLong())), + inclusive(ionInt(Int.MAX_VALUE.toLong())) + ) + ) + ) + ) + } + ) ) - )) + ) ) ) ), @@ -1367,36 +1747,60 @@ internal fun bagTests() = listOf( StaticType.unionOf( IntType( IntType.IntRangeConstraint.INT4, - metas = mapOf(ISL_META_KEY to listOf( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("int"), + IonSchemaModel.build { + validValues( + rangeOfValidValues( + numRange( + numberRange( + inclusive(ionInt(Int.MIN_VALUE.toLong())), + inclusive(ionInt(Int.MAX_VALUE.toLong())) + ) + ) + ) + ) + } + ) + ) + ) + ), + StaticType.NULL, + metas = mapOf( + ISL_META_KEY to listOf( buildTypeDef( null, - buildTypeConstraint("int"), IonSchemaModel.build { - validValues(rangeOfValidValues(numRange(numberRange( - inclusive(ionInt(Int.MIN_VALUE.toLong())), - inclusive(ionInt(Int.MAX_VALUE.toLong())) - )))) + typeConstraint( + inlineType( + typeDefinition( + null, + constraintList( + typeConstraint(namedType("int", ionBool(false))), + IonSchemaModel.build { + validValues( + rangeOfValidValues( + numRange( + numberRange( + inclusive(ionInt(Int.MIN_VALUE.toLong())), + inclusive(ionInt(Int.MAX_VALUE.toLong())) + ) + ) + ) + ) + } + ) + ), + ionBool(true) + ) + ) } ) - )) - ), - StaticType.NULL, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - IonSchemaModel.build { - typeConstraint(inlineType(typeDefinition(null, constraintList( - typeConstraint(namedType("int", ionBool(false))), - IonSchemaModel.build { - validValues(rangeOfValidValues(numRange(numberRange( - inclusive(ionInt(Int.MIN_VALUE.toLong())), - inclusive(ionInt(Int.MAX_VALUE.toLong())) - )))) - })), ionBool(true) - )) - } ) - )) + ) ) ), "type::{ name: $typeName, type: bag, element: nullable::{type: int, valid_values: range::[${Int.MIN_VALUE}, ${Int.MAX_VALUE}]}}" @@ -1404,13 +1808,20 @@ internal fun bagTests() = listOf( // element as collection type MapperE2ETestCase( "type::{ name: $typeName, type: bag, element: { type: list, element: int } }", - BagType(ListType(StaticType.INT, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("int", ionBool(false))) } + BagType( + ListType( + StaticType.INT, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("int", ionBool(false))) } + ) + ) + ) ) - )))) + ) ), // element as collection type with nullable element MapperE2ETestCase( @@ -1418,27 +1829,34 @@ internal fun bagTests() = listOf( BagType( ListType( StaticType.unionOf(StaticType.STRING, StaticType.NULL), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("string", ionBool(true))) } + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("string", ionBool(true))) } + ) ) - )) + ) ) ) ), // list with other constraints MapperE2ETestCase( "type::{ name: $typeName, type: bag, element: int, contains: [1, 5] }", - BagType(StaticType.INT, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - typeName, - buildTypeConstraint("bag"), - IonSchemaModel.build { element(namedType("int", ionBool(false))) }, - IonSchemaModel.build { contains(listOf(ionInt(1), ionInt(5))) } + BagType( + StaticType.INT, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + typeName, + buildTypeConstraint("bag"), + IonSchemaModel.build { element(namedType("int", ionBool(false))) }, + IonSchemaModel.build { contains(listOf(ionInt(1), ionInt(5))) } + ) + ) ) - ))) + ) ), // element as 'any' MapperE2ETestCase( @@ -1449,12 +1867,25 @@ internal fun bagTests() = listOf( // element as any_of MapperE2ETestCase( "type::{ name: $typeName, type: bag, element: { any_of: [int, string] } }", - BagType(AnyOfType(setOf(StaticType.INT, StaticType.STRING), metas = mapOf(ISL_META_KEY to - listOf(buildTypeDef(null, IonSchemaModel.build { anyOf( - namedType("int", ionBool(false)), - namedType("string", ionBool(false)) - )})) - ))) + BagType( + AnyOfType( + setOf(StaticType.INT, StaticType.STRING), + metas = mapOf( + ISL_META_KEY to + listOf( + buildTypeDef( + null, + IonSchemaModel.build { + anyOf( + namedType("int", ionBool(false)), + namedType("string", ionBool(false)) + ) + } + ) + ) + ) + ) + ) ), // element as any_of with custom type MapperE2ETestCase( @@ -1462,19 +1893,30 @@ internal fun bagTests() = listOf( type::{ name: bar, type: int } type::{ name: $typeName, type: bag, element: { any_of: [bar, string] } } """, - BagType(AnyOfType(setOf( - IntType(metas = mapOf(ISL_META_KEY to listOf(buildTypeDef("bar", buildTypeConstraint("int"))))), - StaticType.STRING), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("int")), - buildTypeDef(null, IonSchemaModel.build { anyOf( - namedType("bar", ionBool(false)), - namedType("string", ionBool(false)) - )}) - ) - ))) - ), - // element as struct + BagType( + AnyOfType( + setOf( + IntType(metas = mapOf(ISL_META_KEY to listOf(buildTypeDef("bar", buildTypeConstraint("int"))))), + StaticType.STRING + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("int")), + buildTypeDef( + null, + IonSchemaModel.build { + anyOf( + namedType("bar", ionBool(false)), + namedType("string", ionBool(false)) + ) + } + ) + ) + ) + ) + ) + ), + // element as struct MapperE2ETestCase( """ type::{ name: $typeName, type: bag, element: { @@ -1491,74 +1933,108 @@ internal fun bagTests() = listOf( "a" to StaticType.unionOf(StaticType.INT, StaticType.NULL, StaticType.MISSING), "b" to StaticType.unionOf( StaticType.INT, - ListType(StaticType.STRING, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("string", ionBool(false))) } - ) - ))), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - IonSchemaModel.build { anyOf( - namedType("int", ionBool(false)), - inlineType( - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("string", ionBool(false))) } - ), ionBool(false) + ListType( + StaticType.STRING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("string", ionBool(false))) } ) - ) }, - IonSchemaModel.build { occurs(occursRequired()) } + ) + ) + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + IonSchemaModel.build { + anyOf( + namedType("int", ionBool(false)), + inlineType( + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("string", ionBool(false))) } + ), + ionBool(false) + ) + ) + }, + IonSchemaModel.build { occurs(occursRequired()) } + ) ) - )) - ), - "c" to StaticType.unionOf(StaticType.INT, StaticType.STRING, StaticType.MISSING, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - IonSchemaModel.build { anyOf( - namedType("int", ionBool(false)), - namedType("string", ionBool(false)) - ) } ) - ))) - ), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("struct"), - IonSchemaModel.build { fields( - field("a", namedType("int", ionBool(true))), - field("b", inlineType( + ), + "c" to StaticType.unionOf( + StaticType.INT, StaticType.STRING, StaticType.MISSING, + metas = mapOf( + ISL_META_KEY to listOf( buildTypeDef( null, - IonSchemaModel.build { anyOf( - namedType("int", ionBool(false)), + IonSchemaModel.build { + anyOf( + namedType("int", ionBool(false)), + namedType("string", ionBool(false)) + ) + } + ) + ) + ) + ) + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("struct"), + IonSchemaModel.build { + fields( + field("a", namedType("int", ionBool(true))), + field( + "b", inlineType( buildTypeDef( null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("string", ionBool(false))) } - ), ionBool(false) + IonSchemaModel.build { + anyOf( + namedType("int", ionBool(false)), + inlineType( + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("string", ionBool(false))) } + ), + ionBool(false) + ) + ) + }, + IonSchemaModel.build { occurs(occursRequired()) } + ), + ionBool(false) ) - ) }, - IonSchemaModel.build { occurs(occursRequired()) } - ), ionBool(false) - )), - field("c", inlineType( - buildTypeDef( - null, - IonSchemaModel.build { anyOf( - namedType("int", ionBool(false)), - namedType("string", ionBool(false)) - ) } - ), ionBool(false) - )) - ) } + ), + field( + "c", + inlineType( + buildTypeDef( + null, + IonSchemaModel.build { + anyOf( + namedType("int", ionBool(false)), + namedType("string", ionBool(false)) + ) + } + ), + ionBool(false) + ) + ) + ) + } + ) ) - )) + ) ) ) ) @@ -1573,179 +2049,261 @@ internal fun structTests() = listOf( // single field, inline type MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a: {type: int} } }", - StructType(mapOf("a" to StaticType.unionOf( - IntType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef(null, buildTypeConstraint("int")) - ))), - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef(null, buildTypeConstraint("int")) - )) - ))), + StructType( + mapOf( + "a" to StaticType.unionOf( + IntType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef(null, buildTypeConstraint("int")) + ) + ) + ), + StaticType.MISSING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef(null, buildTypeConstraint("int")) + ) + ) + ) + ) + ), "type::{ name: $typeName, type: struct, fields: { a: int } }" ), // single field, type with constraint MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a: {type: string, codepoint_length: range::[0, 5]} } }", - StructType(mapOf("a" to StaticType.unionOf( - StringType(StringType.StringLengthConstraint.Constrained(NumberConstraint.UpTo(5)), metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(0)), inclusive(ionInt(5))))) } - ) - ))), - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(0)), inclusive(ionInt(5))))) } - ) - )) + StructType( + mapOf( + "a" to StaticType.unionOf( + StringType( + StringType.StringLengthConstraint.Constrained(NumberConstraint.UpTo(5)), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(0)), inclusive(ionInt(5))))) } + ) + ) + ) + ), + StaticType.MISSING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(0)), inclusive(ionInt(5))))) } + ) + ) + ) + ) + ) ) - )) ), MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a: {type: int, valid_values: range::[${Int.MIN_VALUE}, ${Int.MAX_VALUE}]} } }", - StructType(mapOf("a" to StaticType.unionOf( - IntType(IntType.IntRangeConstraint.INT4, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("int"), - IonSchemaModel.build { validValues(rangeOfValidValues(numRange(numberRange(inclusive(ionInt(-2147483648)), inclusive(ionInt(2147483647)))))) } - ) - ))), - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("int"), - IonSchemaModel.build { validValues(rangeOfValidValues(numRange(numberRange(inclusive(ionInt(-2147483648)), inclusive(ionInt(2147483647)))))) } - ) - )) - ))) + StructType( + mapOf( + "a" to StaticType.unionOf( + IntType( + IntType.IntRangeConstraint.INT4, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("int"), + IonSchemaModel.build { validValues(rangeOfValidValues(numRange(numberRange(inclusive(ionInt(-2147483648)), inclusive(ionInt(2147483647)))))) } + ) + ) + ) + ), + StaticType.MISSING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("int"), + IonSchemaModel.build { validValues(rangeOfValidValues(numRange(numberRange(inclusive(ionInt(-2147483648)), inclusive(ionInt(2147483647)))))) } + ) + ) + ) + ) + ) + ) ), // single field, required, type with constraint MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a: {type: string, codepoint_length: range::[0, 5], occurs: required} } }", - StructType(mapOf("a" to StringType(StringType.StringLengthConstraint.Constrained(NumberConstraint.UpTo(5)), metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(0)), inclusive(ionInt(5))))) }, - IonSchemaModel.build { occurs(occursRequired()) } + StructType( + mapOf( + "a" to StringType( + StringType.StringLengthConstraint.Constrained(NumberConstraint.UpTo(5)), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(0)), inclusive(ionInt(5))))) }, + IonSchemaModel.build { occurs(occursRequired()) } + ) + ) + ) + ) ) - ))))) + ) ), MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a: {type: int, valid_values: range::[${Int.MIN_VALUE}, ${Int.MAX_VALUE}], occurs: required} } }", - StructType(mapOf("a" to IntType(IntType.IntRangeConstraint.INT4, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("int"), - IonSchemaModel.build { validValues(rangeOfValidValues(numRange(numberRange(inclusive(ionInt(-2147483648)), inclusive(ionInt(2147483647)))))) }, - IonSchemaModel.build { occurs(occursRequired()) } + StructType( + mapOf( + "a" to IntType( + IntType.IntRangeConstraint.INT4, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("int"), + IonSchemaModel.build { validValues(rangeOfValidValues(numRange(numberRange(inclusive(ionInt(-2147483648)), inclusive(ionInt(2147483647)))))) }, + IonSchemaModel.build { occurs(occursRequired()) } + ) + ) + ) + ) ) - ))))) + ) ), // single field, nullable type with constraint MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a: nullable::{type: string, codepoint_length: range::[0, 5]} } }", - StructType(mapOf("a" to StaticType.unionOf( - StringType(StringType.StringLengthConstraint.Constrained(NumberConstraint.UpTo(5)), metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(0)), inclusive(ionInt(5))))) } - ) - ))), - StaticType.MISSING, - StaticType.NULL, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(0)), inclusive(ionInt(5))))) } + StructType( + mapOf( + "a" to StaticType.unionOf( + StringType( + StringType.StringLengthConstraint.Constrained(NumberConstraint.UpTo(5)), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(0)), inclusive(ionInt(5))))) } + ) + ) + ) + ), + StaticType.MISSING, + StaticType.NULL, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(0)), inclusive(ionInt(5))))) } + ) + ) + ) ) - )) - ))) + ) + ) ), MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a: nullable::{type: string, codepoint_length: range::[1, 2048]}}}", - StructType(mapOf( - "a" to StaticType.unionOf( - StringType( - StringType.StringLengthConstraint.Unconstrained, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))))) } + StructType( + mapOf( + "a" to StaticType.unionOf( + StringType( + StringType.StringLengthConstraint.Unconstrained, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))))) } + ) + ) + ) + ), + StaticType.NULL, + StaticType.MISSING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))))) } + ) ) - )) - ), - StaticType.NULL, - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))))) } ) - )) - )) + ) + ) ) ), MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a: nullable::{type: decimal, precision: range::[1, 47], scale: range::[1,37]}}}", - StructType(mapOf( - "a" to StaticType.unionOf( - DecimalType( - DecimalType.PrecisionScaleConstraint.Unconstrained, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("decimal"), - IonSchemaModel.build { precision(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(47))))) }, - IonSchemaModel.build { scale(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(37))))) } + StructType( + mapOf( + "a" to StaticType.unionOf( + DecimalType( + DecimalType.PrecisionScaleConstraint.Unconstrained, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("decimal"), + IonSchemaModel.build { precision(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(47))))) }, + IonSchemaModel.build { scale(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(37))))) } + ) + ) + ) + ), + StaticType.NULL, + StaticType.MISSING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("decimal"), + IonSchemaModel.build { precision(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(47))))) }, + IonSchemaModel.build { scale(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(37))))) } + ) ) - )) - ), - StaticType.NULL, - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("decimal"), - IonSchemaModel.build { precision(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(47))))) }, - IonSchemaModel.build { scale(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(37))))) } ) - )) - )) + ) + ) ) ), MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a: nullable::{type: int, valid_values: range::[${Int.MIN_VALUE}, ${Int.MAX_VALUE}]} } }", - StructType(mapOf("a" to StaticType.unionOf( - IntType(IntType.IntRangeConstraint.INT4, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("int"), - IonSchemaModel.build { validValues(rangeOfValidValues(numRange(numberRange(inclusive(ionInt(-2147483648)), inclusive(ionInt(2147483647)))))) } - ) - ))), - StaticType.MISSING, - StaticType.NULL, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("int"), - IonSchemaModel.build { validValues(rangeOfValidValues(numRange(numberRange(inclusive(ionInt(-2147483648)), inclusive(ionInt(2147483647)))))) } + StructType( + mapOf( + "a" to StaticType.unionOf( + IntType( + IntType.IntRangeConstraint.INT4, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("int"), + IonSchemaModel.build { validValues(rangeOfValidValues(numRange(numberRange(inclusive(ionInt(-2147483648)), inclusive(ionInt(2147483647)))))) } + ) + ) + ) + ), + StaticType.MISSING, + StaticType.NULL, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("int"), + IonSchemaModel.build { validValues(rangeOfValidValues(numRange(numberRange(inclusive(ionInt(-2147483648)), inclusive(ionInt(2147483647)))))) } + ) + ) + ) ) - )) + ) ) - )) ), // single field, named type nullable MapperE2ETestCase( @@ -1755,145 +2313,225 @@ internal fun structTests() = listOf( // single field, inline type nullable MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a: {type: nullable::int} } }", - StructType(mapOf("a" to StaticType.unionOf( - StaticType.INT, - StaticType.NULL, - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef(null, buildTypeConstraint("int", true)) - )) - ))), + StructType( + mapOf( + "a" to StaticType.unionOf( + StaticType.INT, + StaticType.NULL, + StaticType.MISSING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef(null, buildTypeConstraint("int", true)) + ) + ) + ) + ) + ), "type::{ name: $typeName, type: struct, fields: { a: nullable::int } }" ), // single field, nullable inline single type, required MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a: { type: nullable::int, occurs: required } } }", - StructType(mapOf("a" to StaticType.unionOf( - StaticType.INT, - StaticType.NULL, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("int", true), - IonSchemaModel.build { occurs(occursRequired()) } - ) - )) - ))), + StructType( + mapOf( + "a" to StaticType.unionOf( + StaticType.INT, + StaticType.NULL, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("int", true), + IonSchemaModel.build { occurs(occursRequired()) } + ) + ) + ) + ) + ) + ), "type::{ name: $typeName, type: struct, fields: { a: nullable::{ type: nullable::int, occurs: required } } }" ), // single field, inline type, optional MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a: { type: list, element: int } } }", - StructType(mapOf("a" to StaticType.unionOf( - ListType(StaticType.INT, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("int", ionBool(false))) } - ) - ))), - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("int", ionBool(false))) } - ) - )) - ))) + StructType( + mapOf( + "a" to StaticType.unionOf( + ListType( + StaticType.INT, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("int", ionBool(false))) } + ) + ) + ) + ), + StaticType.MISSING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("int", ionBool(false))) } + ) + ) + ) + ) + ) + ) ), // single field, inline type with nullable values, optional MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a: { type: list, element: nullable::int } } }", - StructType(mapOf("a" to StaticType.unionOf( - ListType(StaticType.unionOf(StaticType.NULL, StaticType.INT), metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(true))) }) - ))), - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(true))) }) - )) - ))) + StructType( + mapOf( + "a" to StaticType.unionOf( + ListType( + StaticType.unionOf(StaticType.NULL, StaticType.INT), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(true))) }) + ) + ) + ), + StaticType.MISSING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(true))) }) + ) + ) + ) + ) + ) ), // single field, inline single type, required MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a: { type: int, occurs: required } } }", - StructType(mapOf("a" to IntType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef(null, buildTypeConstraint("int"), IonSchemaModel.build { occurs(occursRequired()) }) - ))))) + StructType( + mapOf( + "a" to IntType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef(null, buildTypeConstraint("int"), IonSchemaModel.build { occurs(occursRequired()) }) + ) + ) + ) + ) + ) ), // single field, inline collection type, required MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a: { type: list, element: int, occurs: required } } }", - StructType(mapOf("a" to ListType(StaticType.INT, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("int", ionBool(false))) }, - IonSchemaModel.build { occurs(occursRequired()) } + StructType( + mapOf( + "a" to ListType( + StaticType.INT, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("int", ionBool(false))) }, + IonSchemaModel.build { occurs(occursRequired()) } + ) + ) + ) + ) ) - ))))) + ) ), // single field, multiple types allowed MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a : { any_of: [int, string] } } }", - StructType(mapOf("a" to AnyOfType(setOf( - StaticType.INT, - StaticType.STRING, - StaticType.MISSING), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - IonSchemaModel.build { anyOf(namedType("int", ionBool(false)), namedType("string", ionBool(false))) } + StructType( + mapOf( + "a" to AnyOfType( + setOf( + StaticType.INT, + StaticType.STRING, + StaticType.MISSING + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + IonSchemaModel.build { anyOf(namedType("int", ionBool(false)), namedType("string", ionBool(false))) } + ) + ) + ) ) - )) - ))) + ) + ) ), // same as above, with required field MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a : { any_of: [int, string], occurs: required } } }", - StructType(mapOf("a" to AnyOfType(setOf( - StaticType.INT, - StaticType.STRING), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - IonSchemaModel.build { anyOf(namedType("int", ionBool(false)), namedType("string", ionBool(false))) }, - IonSchemaModel.build { occurs(occursRequired()) } + StructType( + mapOf( + "a" to AnyOfType( + setOf( + StaticType.INT, + StaticType.STRING + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + IonSchemaModel.build { anyOf(namedType("int", ionBool(false)), namedType("string", ionBool(false))) }, + IonSchemaModel.build { occurs(occursRequired()) } + ) + ) + ) ) - )) - ))) + ) + ) ), // same as above, with nullable type MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a : { any_of: [int, nullable::string] } } }", - StructType(mapOf("a" to AnyOfType(setOf( - StaticType.INT, - StaticType.MISSING, - StaticType.unionOf(StaticType.NULL, StaticType.STRING) - ), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - IonSchemaModel.build { anyOf(namedType("int", ionBool(false)), namedType("string", ionBool(true))) } + StructType( + mapOf( + "a" to AnyOfType( + setOf( + StaticType.INT, + StaticType.MISSING, + StaticType.unionOf(StaticType.NULL, StaticType.STRING) + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + IonSchemaModel.build { anyOf(namedType("int", ionBool(false)), namedType("string", ionBool(true))) } + ) + ) + ) ) - )) - ))), + ) + ), "type::{ name: $typeName, type: struct, fields: { a : { any_of: [nullable::int, nullable::string] } } }" ), // same as above, with required field MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a : { any_of: [int, nullable::string], occurs: required } } }", - StructType(mapOf("a" to AnyOfType( - setOf(StaticType.INT, StaticType.unionOf(StaticType.NULL, StaticType.STRING)), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - IonSchemaModel.build { anyOf(namedType("int", ionBool(false)), namedType("string", ionBool(true))) }, - IonSchemaModel.build { occurs(occursRequired()) } - ) - )) - ))), + StructType( + mapOf( + "a" to AnyOfType( + setOf(StaticType.INT, StaticType.unionOf(StaticType.NULL, StaticType.STRING)), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + IonSchemaModel.build { anyOf(namedType("int", ionBool(false)), namedType("string", ionBool(true))) }, + IonSchemaModel.build { occurs(occursRequired()) } + ) + ) + ) + ) + ) + ), "type::{ name: $typeName, type: struct, fields: { a : { any_of: [nullable::int, nullable::string], occurs: required } } }" ), // multiple fields @@ -1904,10 +2542,12 @@ internal fun structTests() = listOf( b : nullable::int }} """, - StructType(mapOf( - "a" to StaticType.unionOf(StaticType.INT, StaticType.MISSING), - "b" to StaticType.unionOf(StaticType.INT, StaticType.NULL, StaticType.MISSING) - )) + StructType( + mapOf( + "a" to StaticType.unionOf(StaticType.INT, StaticType.MISSING), + "b" to StaticType.unionOf(StaticType.INT, StaticType.NULL, StaticType.MISSING) + ) + ) ), // union of named and inline types MapperE2ETestCase( @@ -1916,27 +2556,38 @@ internal fun structTests() = listOf( a : { any_of: [int, {type:list, element:string}] } }} """, - StructType(mapOf( - "a" to StaticType.unionOf( - StaticType.INT, - ListType(StaticType.STRING, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("string", ionBool(false))) }) - ))), - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - IonSchemaModel.build { anyOf( - namedType("int", ionBool(false)), - inlineType( - buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("string", ionBool(false))) }), - ionBool(false) + StructType( + mapOf( + "a" to StaticType.unionOf( + StaticType.INT, + ListType( + StaticType.STRING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("string", ionBool(false))) }) ) - ) } + ) + ), + StaticType.MISSING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + IonSchemaModel.build { + anyOf( + namedType("int", ionBool(false)), + inlineType( + buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("string", ionBool(false))) }), + ionBool(false) + ) + ) + } + ) + ) ) - )) + ) ) - )) + ) ), // inline type, required, with other constraints MapperE2ETestCase( @@ -1945,39 +2596,57 @@ internal fun structTests() = listOf( a : { type:list, element: string, container_length:5, occurs:required } }} """, - StructType(mapOf( - "a" to ListType(StaticType.STRING, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("string", ionBool(false))) }, - IonSchemaModel.build { containerLength(equalsNumber(ionInt(5))) }, - IonSchemaModel.build { occurs(occursRequired()) } - ) - ))) - )) + StructType( + mapOf( + "a" to ListType( + StaticType.STRING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("string", ionBool(false))) }, + IonSchemaModel.build { containerLength(equalsNumber(ionInt(5))) }, + IonSchemaModel.build { occurs(occursRequired()) } + ) + ) + ) + ) + ) + ) ), // inline type, optional, with other constraints MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a: { type: list, container_length: 5 } } }", - StructType(mapOf( - "a" to StaticType.unionOf( - StaticType.MISSING, - ListType(StaticType.ANY, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { containerLength(equalsNumber(ionInt(5))) }) - ))), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { containerLength(equalsNumber(ionInt(5))) }) - )) + StructType( + mapOf( + "a" to StaticType.unionOf( + StaticType.MISSING, + ListType( + StaticType.ANY, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { containerLength(equalsNumber(ionInt(5))) }) + ) + ) + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { containerLength(equalsNumber(ionInt(5))) }) + ) + ) + ) ) - )) + ) ), // field is "any" type and should return union of AnyType and MissingType MapperE2ETestCase( "type::{ name: $typeName, type: struct, fields: { a: any } }", - StructType(mapOf( - "a" to StaticType.unionOf(StaticType.ANY, StaticType.MISSING) - )), + StructType( + mapOf( + "a" to StaticType.unionOf(StaticType.ANY, StaticType.MISSING) + ) + ), "type::{ name: $typeName, type: struct, fields: { a: nullable::any } }", ), // struct without fields @@ -2015,9 +2684,13 @@ internal fun bagWithCustomElementTests() = listOf( type::{ name: $typeName, type: bag, element: bar } """, BagType( - BoolType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("bool")) - ))) + BoolType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("bool")) + ) + ) + ) ) ), // top-level bag and int type @@ -2027,9 +2700,13 @@ internal fun bagWithCustomElementTests() = listOf( type::{ name: $typeName, type: bag, element: bar } """, BagType( - IntType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("int")) - ))) + IntType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("int")) + ) + ) + ) ) ), // top-level bag and float type @@ -2039,9 +2716,13 @@ internal fun bagWithCustomElementTests() = listOf( type::{ name: $typeName, type: bag, element: bar } """, BagType( - FloatType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("float")) - ))) + FloatType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("float")) + ) + ) + ) ) ), // top-level bag and decimal type @@ -2051,9 +2732,13 @@ internal fun bagWithCustomElementTests() = listOf( type::{ name: $typeName, type: bag, element: bar } """, BagType( - DecimalType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("decimal")) - ))) + DecimalType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("decimal")) + ) + ) + ) ) ), // top-level bag and timestamp type @@ -2063,9 +2748,13 @@ internal fun bagWithCustomElementTests() = listOf( type::{ name: $typeName, type: bag, element: bar } """, BagType( - TimestampType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("timestamp")) - ))) + TimestampType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("timestamp")) + ) + ) + ) ) ), // top-level bag and symbol type @@ -2075,9 +2764,13 @@ internal fun bagWithCustomElementTests() = listOf( type::{ name: $typeName, type: bag, element: bar } """, BagType( - SymbolType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("symbol")) - ))) + SymbolType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("symbol")) + ) + ) + ) ) ), // top-level bag and string type @@ -2087,9 +2780,13 @@ internal fun bagWithCustomElementTests() = listOf( type::{ name: $typeName, type: bag, element: bar } """, BagType( - StringType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string")) - ))) + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("string")) + ) + ) + ) ) ), // top-level bag and clob type @@ -2099,9 +2796,13 @@ internal fun bagWithCustomElementTests() = listOf( type::{ name: $typeName, type: bag, element: bar } """, BagType( - ClobType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("clob")) - ))) + ClobType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("clob")) + ) + ) + ) ) ), // top-level bag and blob type @@ -2111,9 +2812,13 @@ internal fun bagWithCustomElementTests() = listOf( type::{ name: $typeName, type: bag, element: bar } """, BagType( - BlobType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("blob")) - ))) + BlobType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("blob")) + ) + ) + ) ) ), // top-level bag and list type @@ -2125,9 +2830,11 @@ internal fun bagWithCustomElementTests() = listOf( BagType( ListType( StaticType.INT, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }) - )) + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }) + ) + ) ) ) ), @@ -2140,9 +2847,11 @@ internal fun bagWithCustomElementTests() = listOf( BagType( SexpType( StaticType.INT, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("sexp"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }) - )) + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("sexp"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }) + ) + ) ) ) ), @@ -2155,66 +2864,101 @@ internal fun bagWithCustomElementTests() = listOf( BagType( StructType( mapOf("a" to StaticType.unionOf(StaticType.INT, StaticType.MISSING)), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - "bar", - buildTypeConstraint("struct"), - IonSchemaModel.build { - fields(field("a", namedType("int", ionBool(false)))) - } + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + buildTypeConstraint("struct"), + IonSchemaModel.build { + fields(field("a", namedType("int", ionBool(false)))) + } + ) + ) + ) + ) + ) + ), + // top-level bag and struct type with inline field + MapperE2ETestCase( + """ + type::{ name: bar, type: struct, fields: { a: {type: list, element: string} } } + type::{ name: $typeName, type: bag, element: bar } + """, + BagType( + StructType( + mapOf( + "a" to StaticType.unionOf( + StaticType.MISSING, + ListType( + StaticType.STRING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("string", ionBool(false))) }) + ) + ) + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("string", ionBool(false))) }) + ) + ) + ) + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", buildTypeConstraint("struct"), + IonSchemaModel.build { + fields( + field( + "a", + inlineType( + buildTypeDef( + null, + buildTypeConstraint("list"), element(namedType("string", ionBool(false))) + ), + ionBool(false) + ) + ) + ) + } + ) ) - )) + ) ) ) ), - // top-level bag and struct type with inline field + // element as named top-level type MapperE2ETestCase( """ - type::{ name: bar, type: struct, fields: { a: {type: list, element: string} } } + type::{ name: bar, type: string } type::{ name: $typeName, type: bag, element: bar } """, BagType( - StructType( - mapOf("a" to StaticType.unionOf( - StaticType.MISSING, - ListType(StaticType.STRING, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("string", ionBool(false))) }) - ))), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("string", ionBool(false))) }) - )) - )), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("struct"), IonSchemaModel.build { - fields(field("a", inlineType( - buildTypeDef(null, - buildTypeConstraint("list"), element(namedType("string", ionBool(false))) - ), ionBool(false) - ))) - }) - )) + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("string")) + ) + ) ) ) ), - // element as named top-level type - MapperE2ETestCase( - """ - type::{ name: bar, type: string } - type::{ name: $typeName, type: bag, element: bar } - """, - BagType(StringType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string")) - )))) - ), // element as inline top-level type MapperE2ETestCase( """ type::{ name: bar, type: string } type::{ name: $typeName, type: bag, element: { type: bar } } """, - BagType(StringType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string")), - buildTypeDef(null, buildTypeConstraint("bar"))))) + BagType( + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("string")), + buildTypeDef(null, buildTypeConstraint("bar")) + ) + ) + ) ), """ type::{ name: bar, type: string } @@ -2227,15 +2971,23 @@ internal fun bagWithCustomElementTests() = listOf( type::{ name: bar, type: string } type::{ name: $typeName, type: bag, element: nullable::bar } """, - BagType(StaticType.unionOf( - StaticType.NULL, - StringType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string")) - ))), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string")) - )) - )) + BagType( + StaticType.unionOf( + StaticType.NULL, + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("string")) + ) + ) + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("string")) + ) + ) + ) + ) ), // Same as above, with more constraints on the custom type MapperE2ETestCase( @@ -2243,27 +2995,34 @@ internal fun bagWithCustomElementTests() = listOf( type::{ name: bar, type: string, codepoint_length: 5, utf8_byte_length: 5 } type::{ name: $typeName, type: bag, element: nullable::bar } """, - BagType(StaticType.unionOf( - StaticType.NULL, - StringType( - StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(5)), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - "bar", - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, - IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } + BagType( + StaticType.unionOf( + StaticType.NULL, + StringType( + StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(5)), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, + IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } + ) + ) ) - ))), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - "bar", - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, - IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } - ) - )) - )) + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, + IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } + ) + ) + ) + ) + ) ) ) @@ -2274,13 +3033,15 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: bar, type: int } type::{ name: $typeName, type: struct, fields: { a : bar } } """, - StructType(mapOf( - "a" to StaticType.unionOf( - IntType(metas = mapOf(ISL_META_KEY to listOf(buildTypeDef("bar", buildTypeConstraint("int"))))), - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf(buildTypeDef("bar", buildTypeConstraint("int")))) + StructType( + mapOf( + "a" to StaticType.unionOf( + IntType(metas = mapOf(ISL_META_KEY to listOf(buildTypeDef("bar", buildTypeConstraint("int"))))), + StaticType.MISSING, + metas = mapOf(ISL_META_KEY to listOf(buildTypeDef("bar", buildTypeConstraint("int")))) + ) ) - )) + ) ), // Result of a CAST to a custom type, optional // Custom type is scalar type with additional constraints @@ -2289,23 +3050,45 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: bar, type: string, utf8_byte_length: range::[1,2048]} type::{ name: $typeName, type: struct, fields: { a : bar }} """, - StructType(mapOf( - "a" to asOptional(StringType(metas = mapOf(ISL_META_KEY to listOf(buildTypeDef( - "bar", - buildTypeConstraint("string"), - IonSchemaModel.build { utf8ByteLength(equalsRange( - numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))) - )) } - ))))).withMetas(mapOf(ISL_META_KEY to listOf( - buildTypeDef( - "bar", - buildTypeConstraint("string"), - IonSchemaModel.build { utf8ByteLength(equalsRange( - numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))) - ))} - ) - ))) - )) + StructType( + mapOf( + "a" to asOptional( + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + buildTypeConstraint("string"), + IonSchemaModel.build { + utf8ByteLength( + equalsRange( + numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))) + ) + ) + } + ) + ) + ) + ) + ).withMetas( + mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + buildTypeConstraint("string"), + IonSchemaModel.build { + utf8ByteLength( + equalsRange( + numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))) + ) + ) + } + ) + ) + ) + ) + ) + ) ), // required field, custom type is scalar type without additional constraints MapperE2ETestCase( @@ -2313,12 +3096,18 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: bar, type: int } type::{ name: $typeName, type: struct, fields: { a : { type: bar, occurs: required } } } """, - StructType(mapOf( - "a" to IntType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("int")), - buildTypeDef(null, buildTypeConstraint("bar"), IonSchemaModel.build { occurs(occursRequired()) }) - ))) - )) + StructType( + mapOf( + "a" to IntType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("int")), + buildTypeDef(null, buildTypeConstraint("bar"), IonSchemaModel.build { occurs(occursRequired()) }) + ) + ) + ) + ) + ) ), // Result of a CAST to a custom type // Same as above, but custom type is a scalar type with additional constraints @@ -2327,18 +3116,28 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: bar, type: string, utf8_byte_length: range::[1,2048]} type::{ name: $typeName, type: struct, fields: { a : { type: bar, occurs: required } } } """, - StructType(mapOf( - "a" to StringType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - "bar", - buildTypeConstraint("string"), - IonSchemaModel.build { utf8ByteLength(equalsRange( - numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))) - ))} - ), - buildTypeDef(null, buildTypeConstraint("bar"), IonSchemaModel.build { occurs(occursRequired()) }) - ))) - )) + StructType( + mapOf( + "a" to StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + buildTypeConstraint("string"), + IonSchemaModel.build { + utf8ByteLength( + equalsRange( + numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))) + ) + ) + } + ), + buildTypeDef(null, buildTypeConstraint("bar"), IonSchemaModel.build { occurs(occursRequired()) }) + ) + ) + ) + ) + ) ), // Result of a CAST to a custom type, nullable MapperE2ETestCase( @@ -2346,24 +3145,46 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: bar, type: string, utf8_byte_length: range::[1,2048]} type::{ name: $typeName, type: struct, fields: { a : {type: nullable::bar, occurs: required} } } """, - StructType(mapOf( - "a" to asNullable(StringType(metas = mapOf(ISL_META_KEY to listOf(buildTypeDef( - "bar", - buildTypeConstraint("string"), - IonSchemaModel.build { utf8ByteLength(equalsRange( - numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))) - )) } - ))))).withMetas(mapOf(ISL_META_KEY to listOf( - buildTypeDef( - "bar", - buildTypeConstraint("string"), - IonSchemaModel.build { utf8ByteLength(equalsRange( - numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))) - ))} - ), - buildTypeDef(null, buildTypeConstraint("bar", true), IonSchemaModel.build { occurs(occursRequired()) }) - ))) - )), + StructType( + mapOf( + "a" to asNullable( + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + buildTypeConstraint("string"), + IonSchemaModel.build { + utf8ByteLength( + equalsRange( + numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))) + ) + ) + } + ) + ) + ) + ) + ).withMetas( + mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + buildTypeConstraint("string"), + IonSchemaModel.build { + utf8ByteLength( + equalsRange( + numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))) + ) + ) + } + ), + buildTypeDef(null, buildTypeConstraint("bar", true), IonSchemaModel.build { occurs(occursRequired()) }) + ) + ) + ) + ) + ), """ type::{ name: bar, type: string, utf8_byte_length: range::[1,2048]} type::{ name: $typeName, type: struct, fields: { a : nullable::{type: nullable::bar, occurs: required} } } @@ -2375,21 +3196,47 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: bar, type: string, utf8_byte_length: range::[1,2048]} type::{ name: $typeName, type: struct, fields: { a : nullable::bar }} """, - StructType(mapOf( - "a" to asOptional(asNullable(StringType(metas = mapOf(ISL_META_KEY to listOf(buildTypeDef( - "bar", - buildTypeConstraint("string"), - IonSchemaModel.build { utf8ByteLength(equalsRange( - numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))) - )) } - )))))).withMetas(mapOf(ISL_META_KEY to listOf(buildTypeDef( - "bar", - buildTypeConstraint("string"), - IonSchemaModel.build { utf8ByteLength(equalsRange( - numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))) - )) } - )))) - )) + StructType( + mapOf( + "a" to asOptional( + asNullable( + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + buildTypeConstraint("string"), + IonSchemaModel.build { + utf8ByteLength( + equalsRange( + numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))) + ) + ) + } + ) + ) + ) + ) + ) + ).withMetas( + mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + buildTypeConstraint("string"), + IonSchemaModel.build { + utf8ByteLength( + equalsRange( + numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))) + ) + ) + } + ) + ) + ) + ) + ) + ) ), // custom type is collection type MapperE2ETestCase( @@ -2397,17 +3244,26 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: cat, type: list, element: int } type::{ name: $typeName, type: struct, fields: { b: cat } } """, - StructType(mapOf( - "b" to StaticType.unionOf( - ListType(StaticType.INT, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("cat", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }) - ))), - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("cat", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }) - )) + StructType( + mapOf( + "b" to StaticType.unionOf( + ListType( + StaticType.INT, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("cat", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }) + ) + ) + ), + StaticType.MISSING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("cat", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }) + ) + ) + ) ) - )) + ) ), // nullable field, custom type is scalar type MapperE2ETestCase( @@ -2415,14 +3271,16 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: bar, type: int } type::{ name: $typeName, type: struct, fields: { c: nullable::bar } } """, - StructType(mapOf( - "c" to StaticType.unionOf( - IntType(metas = mapOf(ISL_META_KEY to listOf(buildTypeDef("bar", buildTypeConstraint("int"))))), - StaticType.NULL, - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf(buildTypeDef("bar", buildTypeConstraint("int")))) + StructType( + mapOf( + "c" to StaticType.unionOf( + IntType(metas = mapOf(ISL_META_KEY to listOf(buildTypeDef("bar", buildTypeConstraint("int"))))), + StaticType.NULL, + StaticType.MISSING, + metas = mapOf(ISL_META_KEY to listOf(buildTypeDef("bar", buildTypeConstraint("int")))) + ) ) - )) + ) ), // nullable field, custom type is collection type MapperE2ETestCase( @@ -2430,18 +3288,27 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: cat, type: list, element: int } type::{ name: $typeName, type: struct, fields: { d: nullable::cat } } """, - StructType(mapOf( - "d" to StaticType.unionOf( - ListType(StaticType.INT, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("cat", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }) - ))), - StaticType.NULL, - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("cat", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }) - )) + StructType( + mapOf( + "d" to StaticType.unionOf( + ListType( + StaticType.INT, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("cat", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }) + ) + ) + ), + StaticType.NULL, + StaticType.MISSING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("cat", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }) + ) + ) + ) ) - )) + ) ), // required field, custom type is collection type MapperE2ETestCase( @@ -2449,12 +3316,18 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: bar, type: list, element: int } type::{ name: $typeName, type: struct, fields: { a : { type: bar, occurs: required } } } """, - StructType(mapOf( - "a" to ListType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }), - buildTypeDef(null, buildTypeConstraint("bar"), IonSchemaModel.build { occurs(occursRequired()) }) - ))) - )) + StructType( + mapOf( + "a" to ListType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }), + buildTypeDef(null, buildTypeConstraint("bar"), IonSchemaModel.build { occurs(occursRequired()) }) + ) + ) + ) + ) + ) ), MapperE2ETestCase( """ @@ -2463,32 +3336,42 @@ internal fun structWithCustomFieldTests() = listOf( g: { type: list, element: bar } }} """, - StructType(mapOf( - "g" to StaticType.unionOf( - StaticType.MISSING, - ListType( - StringType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string")))) + StructType( + mapOf( + "g" to StaticType.unionOf( + StaticType.MISSING, + ListType( + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("string")) + ) + ) + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("string")), + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("bar", ionBool(false))) } + ) + ) + ) ), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string")), - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("bar", ionBool(false))) } + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("string")), + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("bar", ionBool(false))) } + ) ) - )) - ), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string")), - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("bar", ionBool(false))) } ) - )) + ) ) - )) + ) ), MapperE2ETestCase( """ @@ -2497,22 +3380,30 @@ internal fun structWithCustomFieldTests() = listOf( h: { type: list, element: bar, occurs: required } }} """, - StructType(mapOf( - "h" to ListType( - StringType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string")))) - ), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string")), - buildTypeDef( - null, - buildTypeConstraint("list"), - IonSchemaModel.build { element(namedType("bar", ionBool(false))) }, - IonSchemaModel.build { occurs(occursRequired()) } - )) + StructType( + mapOf( + "h" to ListType( + StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("string")) + ) + ) + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("string")), + buildTypeDef( + null, + buildTypeConstraint("list"), + IonSchemaModel.build { element(namedType("bar", ionBool(false))) }, + IonSchemaModel.build { occurs(occursRequired()) } + ) + ) + ) ) ) - )) + ) ), // any_of types with custom types MapperE2ETestCase( @@ -2521,25 +3412,43 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: cat, type: list, element: int } type::{ name: $typeName, type: struct, fields: { h: { any_of: [bar,cat] } } } """, - StructType(mapOf( - "h" to StaticType.unionOf( - IntType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("int")))) - ), - ListType(StaticType.INT, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("cat", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }))) - ), - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("int")), - buildTypeDef("cat", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }), - buildTypeDef(null, IonSchemaModel.build { anyOf( - namedType("bar", ionBool(false)), - namedType("cat", ionBool(false)) - )}) - )) + StructType( + mapOf( + "h" to StaticType.unionOf( + IntType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("int")) + ) + ) + ), + ListType( + StaticType.INT, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("cat", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }) + ) + ) + ), + StaticType.MISSING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("int")), + buildTypeDef("cat", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }), + buildTypeDef( + null, + IonSchemaModel.build { + anyOf( + namedType("bar", ionBool(false)), + namedType("cat", ionBool(false)) + ) + } + ) + ) + ) + ) ) - )) + ) ), // Same as above, with required field MapperE2ETestCase( @@ -2548,27 +3457,43 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: cat, type: list, element: int } type::{ name: $typeName, type: struct, fields: { g: { any_of:[bar,cat], occurs: required } } } """, - StructType(mapOf( - "g" to StaticType.unionOf( - IntType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("int")) - ))), - ListType(StaticType.INT, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("cat", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }) - ))), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("int")), - buildTypeDef("cat", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }), - buildTypeDef( - null, - IonSchemaModel.build { anyOf( - namedType("bar", ionBool(false)), - namedType("cat", ionBool(false)) - )}, IonSchemaModel.build { occurs(occursRequired()) } + StructType( + mapOf( + "g" to StaticType.unionOf( + IntType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("int")) + ) + ) + ), + ListType( + StaticType.INT, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("cat", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }) + ) + ) + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("int")), + buildTypeDef("cat", buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("int", ionBool(false))) }), + buildTypeDef( + null, + IonSchemaModel.build { + anyOf( + namedType("bar", ionBool(false)), + namedType("cat", ionBool(false)) + ) + }, + IonSchemaModel.build { occurs(occursRequired()) } + ) + ) ) - )) + ) ) - )) + ) ), // Field with custom type and additional constraints MapperE2ETestCase( @@ -2576,19 +3501,27 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: bar, type: int } type::{ name: $typeName, type: struct, fields: { a : { type: bar, annotations: ['my_int'] } } } """, - StructType(mapOf( - "a" to StaticType.unionOf( - IntType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("int")), - buildTypeDef(null, buildTypeConstraint("bar"), IonSchemaModel.build { annotations(ionBool(false).toIonElement(), annotationList(annotation("my_int"))) }) - ))), - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("int")), - buildTypeDef(null, buildTypeConstraint("bar"), IonSchemaModel.build { annotations(ionBool(false).toIonElement(), annotationList(annotation("my_int"))) }) - )) + StructType( + mapOf( + "a" to StaticType.unionOf( + IntType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("int")), + buildTypeDef(null, buildTypeConstraint("bar"), IonSchemaModel.build { annotations(ionBool(false).toIonElement(), annotationList(annotation("my_int"))) }) + ) + ) + ), + StaticType.MISSING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("int")), + buildTypeDef(null, buildTypeConstraint("bar"), IonSchemaModel.build { annotations(ionBool(false).toIonElement(), annotationList(annotation("my_int"))) }) + ) + ) + ) ) - )) + ) ), // Same as above, with required field MapperE2ETestCase( @@ -2596,17 +3529,23 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: bar, type: int } type::{ name: $typeName, type: struct, fields: { a : { type: bar, annotations: ['my_int'], occurs: required } } } """, - StructType(mapOf( - "a" to IntType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("int")), - buildTypeDef( - null, - buildTypeConstraint("bar"), - IonSchemaModel.build { annotations(ionBool(false).toIonElement(), annotationList(annotation("my_int"))) }, - IonSchemaModel.build { occurs(occursRequired()) } - ) - ))) - )) + StructType( + mapOf( + "a" to IntType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("int")), + buildTypeDef( + null, + buildTypeConstraint("bar"), + IonSchemaModel.build { annotations(ionBool(false).toIonElement(), annotationList(annotation("my_int"))) }, + IonSchemaModel.build { occurs(occursRequired()) } + ) + ) + ) + ) + ) + ) ), // any_of types with mix of core and custom types MapperE2ETestCase( @@ -2614,20 +3553,29 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: bar, type: int } type::{ name: $typeName, type: struct, fields: { a : { any_of: [string, bar] } } } """, - StructType(mapOf( - "a" to StaticType.unionOf( - StaticType.STRING, - IntType(metas = mapOf(ISL_META_KEY to listOf(buildTypeDef("bar", buildTypeConstraint("int"))))), - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("int")), - buildTypeDef(null, IonSchemaModel.build { anyOf( - namedType("string", ionBool(false)), - namedType("bar", ionBool(false)) - )}) - )) + StructType( + mapOf( + "a" to StaticType.unionOf( + StaticType.STRING, + IntType(metas = mapOf(ISL_META_KEY to listOf(buildTypeDef("bar", buildTypeConstraint("int"))))), + StaticType.MISSING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("int")), + buildTypeDef( + null, + IonSchemaModel.build { + anyOf( + namedType("string", ionBool(false)), + namedType("bar", ionBool(false)) + ) + } + ) + ) + ) + ) ) - )) + ) ), // Same as above, with required field MapperE2ETestCase( @@ -2635,23 +3583,29 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: bar, type: int } type::{ name: $typeName, type: struct, fields: { a : { any_of: [string, bar], occurs: required } } } """, - StructType(mapOf( - "a" to StaticType.unionOf( - StaticType.STRING, - IntType(metas = mapOf(ISL_META_KEY to listOf(buildTypeDef("bar", buildTypeConstraint("int"))))), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("int")), - buildTypeDef( - null, - IonSchemaModel.build { anyOf( - namedType("string", ionBool(false)), - namedType("bar", ionBool(false)) - )}, - IonSchemaModel.build { occurs(occursRequired()) } + StructType( + mapOf( + "a" to StaticType.unionOf( + StaticType.STRING, + IntType(metas = mapOf(ISL_META_KEY to listOf(buildTypeDef("bar", buildTypeConstraint("int"))))), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef("bar", buildTypeConstraint("int")), + buildTypeDef( + null, + IonSchemaModel.build { + anyOf( + namedType("string", ionBool(false)), + namedType("bar", ionBool(false)) + ) + }, + IonSchemaModel.build { occurs(occursRequired()) } + ) + ) ) - )) + ) ) - )) + ) ), // nullable, optional field, custom type with constraints MapperE2ETestCase( @@ -2659,25 +3613,35 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: bar, type: string, codepoint_length: 5, utf8_byte_length: 5 } type::{ name: $typeName, type: struct, fields: { a: nullable::bar } } """, - StructType(mapOf("a" to StaticType.unionOf( - StringType( - StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(5)), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, - IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } + StructType( + mapOf( + "a" to StaticType.unionOf( + StringType( + StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(5)), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, + IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } + ) + ) + ) + ), + StaticType.NULL, + StaticType.MISSING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, + IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } + ) + ) ) - )) - ), - StaticType.NULL, - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, - IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } + ) ) - )) - ))) + ) ), // required field, custom type with constraints MapperE2ETestCase( @@ -2685,13 +3649,22 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: bar, type: string, codepoint_length: 5, utf8_byte_length: 5 } type::{ name: $typeName, type: struct, fields: { a: { type: bar, occurs: required } } } """, - StructType(mapOf("a" to StringType(metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, - IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } - ), - buildTypeDef(null, buildTypeConstraint("bar"), IonSchemaModel.build { occurs(occursRequired()) }) - ))))) + StructType( + mapOf( + "a" to StringType( + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, + IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } + ), + buildTypeDef(null, buildTypeConstraint("bar"), IonSchemaModel.build { occurs(occursRequired()) }) + ) + ) + ) + ) + ) ), // nullable, required field, custom type with constraints MapperE2ETestCase( @@ -2699,25 +3672,35 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: bar, type: string, codepoint_length: 5, utf8_byte_length: 5 } type::{ name: $typeName, type: struct, fields: { a: { type: nullable::bar, occurs: required } } } """, - StructType(mapOf("a" to StaticType.unionOf( - StringType( - StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(5)), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, - IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } + StructType( + mapOf( + "a" to StaticType.unionOf( + StringType( + StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(5)), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, + IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } + ) + ) + ) + ), + StaticType.NULL, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, + IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } + ), + buildTypeDef(null, buildTypeConstraint("bar", true), IonSchemaModel.build { occurs(occursRequired()) }) + ) ) - )) - ), - StaticType.NULL, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) }, - IonSchemaModel.build { utf8ByteLength(equalsNumber(ionInt(5))) } - ), - buildTypeDef(null, buildTypeConstraint("bar", true), IonSchemaModel.build { occurs(occursRequired()) }) - )) - ))), + ) + ) + ), """ type::{ name: bar, type: string, codepoint_length: 5, utf8_byte_length: 5 } type::{ name: $typeName, type: struct, fields: { a: nullable::{ type: nullable::bar, occurs: required } } } @@ -2729,27 +3712,48 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: bar, any_of: [string, {type: list, element: bar}] } type::{ name: $typeName, type: struct, fields: { a: {type: bar, occurs: required} } } """, - StructType(mapOf("a" to StaticType.unionOf( - StaticType.STRING, - ListType(StaticType.ANY, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", IonSchemaModel.build { anyOf( - namedType("string", ionBool(false)), - inlineType( - buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("bar", ionBool(false))) }), ionBool(false) - ) - ) }), - buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("bar", ionBool(false))) }) - ))), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", IonSchemaModel.build { anyOf( - namedType("string", ionBool(false)), - inlineType( - buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("bar", ionBool(false))) }), ionBool(false) - ) - ) }), - buildTypeDef(null, buildTypeConstraint("bar"), IonSchemaModel.build { occurs(occursRequired()) }) - )) - ))), + StructType( + mapOf( + "a" to StaticType.unionOf( + StaticType.STRING, + ListType( + StaticType.ANY, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + IonSchemaModel.build { + anyOf( + namedType("string", ionBool(false)), + inlineType( + buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("bar", ionBool(false))) }), ionBool(false) + ) + ) + } + ), + buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("bar", ionBool(false))) }) + ) + ) + ), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + IonSchemaModel.build { + anyOf( + namedType("string", ionBool(false)), + inlineType( + buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("bar", ionBool(false))) }), ionBool(false) + ) + ) + } + ), + buildTypeDef(null, buildTypeConstraint("bar"), IonSchemaModel.build { occurs(occursRequired()) }) + ) + ) + ) + ) + ), """ type::{ name: bar, any_of: [string, {type: list, element: bar}] } type::{ name: $typeName, type: struct, fields: { a: {any_of:[string, {type: list, element: bar}], occurs: required} } } @@ -2761,27 +3765,48 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: bar, any_of: [string, {type: list, element: bar}] } type::{ name: $typeName, type: struct, fields: { a: bar } } """, - StructType(mapOf("a" to StaticType.unionOf( - StaticType.STRING, - ListType(StaticType.ANY, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", IonSchemaModel.build { anyOf( - namedType("string", ionBool(false)), - inlineType( - buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("bar", ionBool(false))) }), ionBool(false) - ) - ) }), - buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("bar", ionBool(false))) }) - ))), - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", IonSchemaModel.build { anyOf( - namedType("string", ionBool(false)), - inlineType( - buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("bar", ionBool(false))) }), ionBool(false) - ) - ) }) - )) - ))) + StructType( + mapOf( + "a" to StaticType.unionOf( + StaticType.STRING, + ListType( + StaticType.ANY, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + IonSchemaModel.build { + anyOf( + namedType("string", ionBool(false)), + inlineType( + buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("bar", ionBool(false))) }), ionBool(false) + ) + ) + } + ), + buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("bar", ionBool(false))) }) + ) + ) + ), + StaticType.MISSING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + IonSchemaModel.build { + anyOf( + namedType("string", ionBool(false)), + inlineType( + buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("bar", ionBool(false))) }), ionBool(false) + ) + ) + } + ) + ) + ) + ) + ) + ) ), // same as above, but nullable too MapperE2ETestCase( @@ -2789,30 +3814,49 @@ internal fun structWithCustomFieldTests() = listOf( type::{ name: bar, any_of: [string, {type: list, element: bar}] } type::{ name: $typeName, type: struct, fields: { a: nullable::bar } } """, - StructType(mapOf("a" to StaticType.unionOf( - StaticType.STRING, - ListType(StaticType.ANY, metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", IonSchemaModel.build { - anyOf( - namedType("string", ionBool(false)), - inlineType( - buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("bar", ionBool(false))) }), ionBool(false) + StructType( + mapOf( + "a" to StaticType.unionOf( + StaticType.STRING, + ListType( + StaticType.ANY, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + IonSchemaModel.build { + anyOf( + namedType("string", ionBool(false)), + inlineType( + buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("bar", ionBool(false))) }), ionBool(false) + ) + ) + } + ), + buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("bar", ionBool(false))) }) + ) + ) + ), + StaticType.NULL, + StaticType.MISSING, + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + "bar", + IonSchemaModel.build { + anyOf( + namedType("string", ionBool(false)), + inlineType( + buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("bar", ionBool(false))) }), ionBool(false) + ) + ) + } + ) ) ) - }), - buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("bar", ionBool(false))) }) - ))), - StaticType.NULL, - StaticType.MISSING, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef("bar", IonSchemaModel.build { anyOf( - namedType("string", ionBool(false)), - inlineType( - buildTypeDef(null, buildTypeConstraint("list"), IonSchemaModel.build { element(namedType("bar", ionBool(false))) }), ionBool(false) - ) - ) }) - )) - ))) + ) + ) + ) ) ) @@ -2830,11 +3874,15 @@ internal fun stringTests() = listOf( StaticType.unionOf( StringType( StringType.StringLengthConstraint.Constrained(NumberConstraint.Equals(5)), - metas = mapOf(ISL_META_KEY to listOf(buildTypeDef( - null, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) } - ))) + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsNumber(ionInt(5))) } + ) + ) + ) ), StaticType.NULL ) @@ -2846,18 +3894,23 @@ internal fun stringTests() = listOf( // nullable string with constraints MapperE2ETestCase( "type::{ name: $typeName, type: nullable::{type: string, codepoint_length: range::[0,5]} }", - AnyOfType(setOf( - StaticType.NULL, - StringType(StringType.StringLengthConstraint.Constrained(NumberConstraint.UpTo(5)), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(0)), inclusive(ionInt(5))))) } + AnyOfType( + setOf( + StaticType.NULL, + StringType( + StringType.StringLengthConstraint.Constrained(NumberConstraint.UpTo(5)), + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(0)), inclusive(ionInt(5))))) } + ) + ) ) - )) + ) ) - )) + ) ), MapperE2ETestCase( "type::{ name: $typeName, type: string, codepoint_length: range::[exclusive::-1, 5] }", @@ -2883,13 +3936,15 @@ internal fun stringTests() = listOf( "type::{ name: $typeName, type: string, codepoint_length: range::[1, 2048] }", StringType( StringType.StringLengthConstraint.Unconstrained, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - typeName, - buildTypeConstraint("string"), - IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))))) } + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + typeName, + buildTypeConstraint("string"), + IonSchemaModel.build { codepointLength(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(2048))))) } + ) ) - )) + ) ) ) ) @@ -2917,18 +3972,26 @@ internal fun intTests() = listOf( StaticType.unionOf( IntType( IntType.IntRangeConstraint.LONG, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("int"), - IonSchemaModel.build { - validValues(rangeOfValidValues(numRange(numberRange( - inclusive(ionInt(Long.MIN_VALUE)), - inclusive(ionInt(Long.MAX_VALUE)) - )))) - } + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("int"), + IonSchemaModel.build { + validValues( + rangeOfValidValues( + numRange( + numberRange( + inclusive(ionInt(Long.MIN_VALUE)), + inclusive(ionInt(Long.MAX_VALUE)) + ) + ) + ) + ) + } + ) ) - )) + ) ), StaticType.NULL ) @@ -2985,14 +4048,16 @@ internal fun decimalTests() = listOf( StaticType.unionOf( DecimalType( DecimalType.PrecisionScaleConstraint.Constrained(10, 5), - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - null, - buildTypeConstraint("decimal"), - IonSchemaModel.build { precision(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(10))))) }, - IonSchemaModel.build { scale(equalsNumber(ionInt(5))) } + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + null, + buildTypeConstraint("decimal"), + IonSchemaModel.build { precision(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(10))))) }, + IonSchemaModel.build { scale(equalsNumber(ionInt(5))) } + ) ) - )) + ) ), StaticType.NULL ) @@ -3001,14 +4066,16 @@ internal fun decimalTests() = listOf( "type::{ name: $typeName, type: decimal, precision: range::[1,47], scale: range::[1,37] }", DecimalType( DecimalType.PrecisionScaleConstraint.Unconstrained, - metas = mapOf(ISL_META_KEY to listOf( - buildTypeDef( - typeName, - buildTypeConstraint("decimal"), - IonSchemaModel.build { precision(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(47))))) }, - IonSchemaModel.build { scale(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(37))))) } - ) - )) + metas = mapOf( + ISL_META_KEY to listOf( + buildTypeDef( + typeName, + buildTypeConstraint("decimal"), + IonSchemaModel.build { precision(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(47))))) }, + IonSchemaModel.build { scale(equalsRange(numberRange(inclusive(ionInt(1)), inclusive(ionInt(37))))) } + ) + ) + ) ) ) ) diff --git a/lang/test/org/partiql/lang/mockdb/MockDb.kt b/lang/test/org/partiql/lang/mockdb/MockDb.kt index acc937e409..9112ffba65 100644 --- a/lang/test/org/partiql/lang/mockdb/MockDb.kt +++ b/lang/test/org/partiql/lang/mockdb/MockDb.kt @@ -31,7 +31,8 @@ class MockDb( * other global variables of our mock database. */ val valueBindings: Bindings = Bindings.ofMap( - globals.mapValues { valueFactory.newFromIonValue(it.value) }) + globals.mapValues { valueFactory.newFromIonValue(it.value) } + ) /** * Provides an implementation of [Bindings] for accessing the data types of tables and other @@ -48,7 +49,8 @@ class MockDb( // "upto" decimal precision ranges val normalizedSchemaModel = NormalizeDecimalPrecisionsToUpToRange().transformSchema(schemaModel) StaticTypeMapper(normalizedSchemaModel).toStaticType(it.key) - }.toMap()) + }.toMap() + ) fun toSession(): EvaluationSession = EvaluationSession.build { globals(valueBindings) } } diff --git a/lang/test/org/partiql/lang/partiqlisl/PartiQLISLSchemaTests.kt b/lang/test/org/partiql/lang/partiqlisl/PartiQLISLSchemaTests.kt index c49dd4bed1..926485fd4b 100644 --- a/lang/test/org/partiql/lang/partiqlisl/PartiQLISLSchemaTests.kt +++ b/lang/test/org/partiql/lang/partiqlisl/PartiQLISLSchemaTests.kt @@ -81,12 +81,16 @@ class PartiQLISLSchemaTests { Assert.assertTrue(bagType.isl.isReadOnly) Assert.assertNull(bagType.isl.container) - val violations = bagType.validate(ION.singleValue(""" + val violations = bagType.validate( + ION.singleValue( + """ $BAG_ANNOTATION::[1, 2, $BAG_ANNOTATION::["a", "a", "b"], 2] - """.trimIndent())) + """.trimIndent() + ) + ) Assert.assertNotNull(violations) Assert.assertTrue(violations.isValid()) Assert.assertFalse(violations.iterator().hasNext()) @@ -183,14 +187,18 @@ class PartiQLISLSchemaTests { Assert.assertNull(dateType.isl.container) // time - 23:59:59.009999(HH:MM:SS.MMMMMM) is represented as ion struct - val violations = dateType.validate(ION.singleValue(""" + val violations = dateType.validate( + ION.singleValue( + """ ${'$'}partiql_time::{ hour: 23, min: 59, sec: 59, sec_fraction: 9999 } - """.trimIndent())) + """.trimIndent() + ) + ) Assert.assertNotNull(violations) Assert.assertTrue(violations.isValid()) } @@ -202,20 +210,27 @@ class PartiQLISLSchemaTests { Assert.assertNull(dateType.isl.container) // time - 24:59:1(HH:MM:SS.MMMMMM) - val violations = dateType.validate(ION.singleValue(""" + val violations = dateType.validate( + ION.singleValue( + """ { hour: 23, min: 59, sec: 1 } - """.trimIndent())) + """.trimIndent() + ) + ) Assert.assertNotNull(violations) Assert.assertFalse(violations.isValid()) Assert.assertTrue(violations.violations.size > 0) - Assert.assertEquals(""" + Assert.assertEquals( + """ Validation failed: - missing annotation(s): ${'$'}partiql_time - """.trimIndent(), violations.toString().trimIndent()) + """.trimIndent(), + violations.toString().trimIndent() + ) } @Test @@ -225,13 +240,17 @@ class PartiQLISLSchemaTests { Assert.assertNull(dateType.isl.container) // time - 23:59:59(HH:MM:SS.MMMMMM) is represented as ion struct - val violations = dateType.validate(ION.singleValue(""" + val violations = dateType.validate( + ION.singleValue( + """ ${'$'}partiql_time::{ hour: 23, min: 59, sec: 59 } - """.trimIndent())) + """.trimIndent() + ) + ) Assert.assertNotNull(violations) Assert.assertTrue(violations.isValid()) } @@ -243,21 +262,28 @@ class PartiQLISLSchemaTests { Assert.assertNull(dateType.isl.container) // time - 23:59(HH:MM:SS.MMMMMM) is invalid as sec field is missing - val violations = dateType.validate(ION.singleValue(""" + val violations = dateType.validate( + ION.singleValue( + """ ${'$'}partiql_time::{ hour: 23, min: 59 } - """.trimIndent())) + """.trimIndent() + ) + ) Assert.assertNotNull(violations) Assert.assertFalse(violations.isValid()) Assert.assertTrue(violations.violations.size > 0) - Assert.assertEquals(""" + Assert.assertEquals( + """ Validation failed: - one or more fields don't match expectations - sec - expected range::[1,1] occurrences, found 0 - """.trimIndent(), violations.toString().trimIndent()) + """.trimIndent(), + violations.toString().trimIndent() + ) } @Test @@ -267,22 +293,29 @@ class PartiQLISLSchemaTests { Assert.assertNull(dateType.isl.container) // time - 24:59:1(HH:MM:SS.MMMMMM) - val violations = dateType.validate(ION.singleValue(""" + val violations = dateType.validate( + ION.singleValue( + """ ${'$'}partiql_time::{ hour: 24, min: 59, sec: 1 } - """.trimIndent())) + """.trimIndent() + ) + ) Assert.assertNotNull(violations) Assert.assertFalse(violations.isValid()) Assert.assertTrue(violations.violations.size > 0) - Assert.assertEquals(""" + Assert.assertEquals( + """ Validation failed: - one or more fields don't match expectations - hour: 24 - invalid value 24 - """.trimIndent(), violations.toString().trimIndent()) + """.trimIndent(), + violations.toString().trimIndent() + ) } @Test @@ -291,7 +324,9 @@ class PartiQLISLSchemaTests { Assert.assertTrue(dateType.isl.isReadOnly) Assert.assertNull(dateType.isl.container) - val violations = dateType.validate(ION.singleValue(""" + val violations = dateType.validate( + ION.singleValue( + """ ${'$'}partiql_time::{ hour: 23, min: 59, @@ -299,15 +334,19 @@ class PartiQLISLSchemaTests { sec_fraction: 999999, foo: 12 } - """.trimIndent())) + """.trimIndent() + ) + ) Assert.assertNotNull(violations) Assert.assertFalse(violations.isValid()) Assert.assertTrue(violations.violations.size > 0) - Assert.assertEquals(""" + Assert.assertEquals( + """ Validation failed: - found one or more unexpected fields - foo: 12 - """.trimIndent(), violations.toString().trimIndent()) + """.trimIndent(), + violations.toString().trimIndent() + ) } - -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/schemadiscovery/SchemaInferencerFromExampleTests.kt b/lang/test/org/partiql/lang/schemadiscovery/SchemaInferencerFromExampleTests.kt index 75f304001b..b0f6d8a9d5 100644 --- a/lang/test/org/partiql/lang/schemadiscovery/SchemaInferencerFromExampleTests.kt +++ b/lang/test/org/partiql/lang/schemadiscovery/SchemaInferencerFromExampleTests.kt @@ -22,17 +22,17 @@ private val islHeader = """ { id: "$schemaId" }, ], } - """.trimIndent() +""".trimIndent() private val islFooter = """ schema_footer::{ } - """.trimIndent() +""".trimIndent() private val ion = IonSystemBuilder.standard().build() private val resourceAuthority = ResourceAuthority("org/partiql/schemas", ClassLoader.getSystemClassLoader(), ion) private val iss = IonSchemaSystemBuilder.standard().addAuthority(resourceAuthority).build() -private val inferencer : SchemaInferencerFromExample = SchemaInferencerFromExampleImpl(typeName, iss, listOf(schemaId)) +private val inferencer: SchemaInferencerFromExample = SchemaInferencerFromExampleImpl(typeName, iss, listOf(schemaId)) private val NEG_BIG_INT = BigInteger.valueOf(MIN_INT8).minus(BigInteger.ONE) private val POS_BIG_INT = BigInteger.valueOf(MAX_INT8).plus(BigInteger.ONE) @@ -44,7 +44,8 @@ private const val INT8_VALID_VALUES = "valid_values: range::[$MIN_INT8, $MAX_INT data class ExampleInferenceTestCase( val examples: String, val islAsString: String, - val maxExampleCount: Int = Int.MAX_VALUE) { + val maxExampleCount: Int = Int.MAX_VALUE +) { override fun toString(): String { return examples.trimIndent() + " -> " + islAsString.trimIndent() @@ -56,7 +57,8 @@ data class InferenceAndDefiniteUnifyTestCase( val examples: String, val definiteIslAsString: String, val islAsString: String, - val maxExampleCount: Int = Int.MAX_VALUE) { + val maxExampleCount: Int = Int.MAX_VALUE +) { override fun toString(): String = name } @@ -82,10 +84,13 @@ private fun assertCorrectISL( val generatedISL = generatedIonSchemaModel.toIsl() if (expectedISL != generatedISL) { - throw AssertionError(""" + throw AssertionError( + """ Expected ISL and discovered ISL differ, Expected: $expectedISL - Actual: $generatedISL""".trimIndent()) + Actual: $generatedISL + """.trimIndent() + ) } } @@ -106,7 +111,7 @@ class SchemaInferencerFromExampleTests : TestBase() { """ override fun getParameters(): List = listOf( // empty sequence - ExampleInferenceTestCase("","type::{ name: $typeName }"), + ExampleInferenceTestCase("", "type::{ name: $typeName }"), // boolean ExampleInferenceTestCase("false", "type::{ name: $typeName, type: bool }"), // int @@ -1065,7 +1070,7 @@ class SchemaInferencerFromExampleTests : TestBase() { return ExampleInferenceTestCase( examples = examplesCombined, islAsString = - """ + """ type::{ name: $typeName, type: struct, content: closed, fields: { @@ -1081,160 +1086,198 @@ class SchemaInferencerFromExampleTests : TestBase() { // single example: int (0) createMultiExampleConstraintTestCase( examples = listOf("0"), - constraints = listOf(INT2_VALID_VALUES)), + constraints = listOf(INT2_VALID_VALUES) + ), // single example: int2 createMultiExampleConstraintTestCase( examples = listOf("12345"), - constraints = listOf(INT2_VALID_VALUES)), + constraints = listOf(INT2_VALID_VALUES) + ), // single example: int2 min createMultiExampleConstraintTestCase( examples = listOf("$MIN_INT2"), - constraints = listOf(INT2_VALID_VALUES)), + constraints = listOf(INT2_VALID_VALUES) + ), // single example: int2 max createMultiExampleConstraintTestCase( examples = listOf("$MAX_INT2"), - constraints = listOf(INT2_VALID_VALUES)), + constraints = listOf(INT2_VALID_VALUES) + ), // single example: int4 (minInt2 - 1) createMultiExampleConstraintTestCase( examples = listOf("${MIN_INT2 - 1}"), - constraints = listOf(INT4_VALID_VALUES)), + constraints = listOf(INT4_VALID_VALUES) + ), // single example: int4 (maxInt2 + 1) createMultiExampleConstraintTestCase( examples = listOf("${MAX_INT2 + 1}"), - constraints = listOf(INT4_VALID_VALUES)), + constraints = listOf(INT4_VALID_VALUES) + ), // single example: int4 min createMultiExampleConstraintTestCase( examples = listOf("$MIN_INT4"), - constraints = listOf(INT4_VALID_VALUES)), + constraints = listOf(INT4_VALID_VALUES) + ), // single example: int4 max createMultiExampleConstraintTestCase( examples = listOf("$MAX_INT4"), - constraints = listOf(INT4_VALID_VALUES)), + constraints = listOf(INT4_VALID_VALUES) + ), // single example: int8 (minInt4 - 1) createMultiExampleConstraintTestCase( examples = listOf("${MIN_INT4 - 1}"), - constraints = listOf(INT8_VALID_VALUES)), + constraints = listOf(INT8_VALID_VALUES) + ), // single example: int8 (maxInt4 + 1) createMultiExampleConstraintTestCase( examples = listOf("${MAX_INT4 + 1}"), - constraints = listOf(INT8_VALID_VALUES)), + constraints = listOf(INT8_VALID_VALUES) + ), // single example: int8 min createMultiExampleConstraintTestCase( examples = listOf("$MIN_INT8"), - constraints = listOf(INT8_VALID_VALUES)), + constraints = listOf(INT8_VALID_VALUES) + ), // single example: int8 max createMultiExampleConstraintTestCase( examples = listOf("$MAX_INT8"), - constraints = listOf(INT8_VALID_VALUES)), + constraints = listOf(INT8_VALID_VALUES) + ), // single example: unconstrained negative int createMultiExampleConstraintTestCase( examples = listOf("$NEG_BIG_INT"), - constraints = emptyList()), + constraints = emptyList() + ), // single example: unconstrained positive int createMultiExampleConstraintTestCase( examples = listOf("$POS_BIG_INT"), - constraints = emptyList()), + constraints = emptyList() + ), // single example: decimal // single example: decimal zero createMultiExampleConstraintTestCase( examples = listOf("0d0"), - constraints = listOf("scale: 0", "precision: 1")), + constraints = listOf("scale: 0", "precision: 1") + ), // single example: decimal negative zero createMultiExampleConstraintTestCase( examples = listOf("-0d0"), - constraints = listOf("scale: 0", "precision: 1")), + constraints = listOf("scale: 0", "precision: 1") + ), // single example: decimal w/ precision != 1, scale = 0 createMultiExampleConstraintTestCase( examples = listOf("12345d0"), - constraints = listOf("scale: 0", "precision: 5")), + constraints = listOf("scale: 0", "precision: 5") + ), // single example: decimal w/ precision = 1, scale != 0 createMultiExampleConstraintTestCase( examples = listOf("1d-5"), - constraints = listOf("scale: 5", "precision: 1")), + constraints = listOf("scale: 5", "precision: 1") + ), // single example: decimal w/ precision != 1, scale != 0 createMultiExampleConstraintTestCase( examples = listOf("12345.123"), - constraints = listOf("scale: 3", "precision: 8")), + constraints = listOf("scale: 3", "precision: 8") + ), // single example: string // single example: empty string createMultiExampleConstraintTestCase( examples = listOf("\"\""), - constraints = listOf("codepoint_length: 0")), + constraints = listOf("codepoint_length: 0") + ), // single example: non-empty string createMultiExampleConstraintTestCase( examples = listOf("\"abc\""), - constraints = listOf("codepoint_length: 3")), + constraints = listOf("codepoint_length: 3") + ), // multiple examples: int // int2 with int2 -> int2 createMultiExampleConstraintTestCase( examples = listOf("12345", "-12345"), - constraints = listOf(INT2_VALID_VALUES)), + constraints = listOf(INT2_VALID_VALUES) + ), // int2 with int4 -> int4 createMultiExampleConstraintTestCase( examples = listOf("12345", "$MAX_INT4"), - constraints = listOf(INT4_VALID_VALUES)), + constraints = listOf(INT4_VALID_VALUES) + ), // int2 with int8 -> int8 createMultiExampleConstraintTestCase( examples = listOf("12345", "$MAX_INT8"), - constraints = listOf(INT8_VALID_VALUES)), + constraints = listOf(INT8_VALID_VALUES) + ), // int4 with int8 -> int8 createMultiExampleConstraintTestCase( examples = listOf("$MAX_INT4", "$MIN_INT8"), - constraints = listOf(INT8_VALID_VALUES)), + constraints = listOf(INT8_VALID_VALUES) + ), // int2 and int4 with int8 -> int8 createMultiExampleConstraintTestCase( examples = listOf("12345", "$MAX_INT4", "$MAX_INT8"), - constraints = listOf(INT8_VALID_VALUES)), + constraints = listOf(INT8_VALID_VALUES) + ), // int2 with unconstrained int createMultiExampleConstraintTestCase( examples = listOf("$MAX_INT2", "$POS_BIG_INT"), - constraints = emptyList()), + constraints = emptyList() + ), // int4 with unconstrained int createMultiExampleConstraintTestCase( examples = listOf("$MAX_INT4", "$POS_BIG_INT"), - constraints = emptyList()), + constraints = emptyList() + ), // int8 with unconstrained int createMultiExampleConstraintTestCase( examples = listOf("$MAX_INT8", "$POS_BIG_INT"), - constraints = emptyList()), + constraints = emptyList() + ), // unconstrained int with unconstrained int createMultiExampleConstraintTestCase( examples = listOf("$NEG_BIG_INT", "$POS_BIG_INT"), - constraints = emptyList()), + constraints = emptyList() + ), // int2, int4, int8 with unconstrained int createMultiExampleConstraintTestCase( examples = listOf("$MIN_INT2", "$MAX_INT4", "$MIN_INT8", "$POS_BIG_INT"), - constraints = emptyList()), + constraints = emptyList() + ), // multiple examples: decimal // decimals of the same scale and precision createMultiExampleConstraintTestCase( examples = listOf("12345.123", "54321.321"), - constraints = listOf("scale: 3", "precision: 8")), + constraints = listOf("scale: 3", "precision: 8") + ), // decimals of the same scale and different precision createMultiExampleConstraintTestCase( examples = listOf("12345.123", "1234.123"), - constraints = listOf("scale: 3", "precision: range::[7, 8]")), + constraints = listOf("scale: 3", "precision: range::[7, 8]") + ), // decimals of the different scale and same precision createMultiExampleConstraintTestCase( examples = listOf("12345.123", "123456.12"), - constraints = listOf("scale: range::[2, 3]", "precision: 8")), + constraints = listOf("scale: range::[2, 3]", "precision: 8") + ), // decimals of the different scale and precision createMultiExampleConstraintTestCase( examples = listOf("12345.123", "123456.1234"), - constraints = listOf("scale: range::[3, 4]", "precision: range::[8, 10]")), + constraints = listOf("scale: range::[3, 4]", "precision: range::[8, 10]") + ), // multiple decimals of the different scale and precision createMultiExampleConstraintTestCase( examples = listOf("1.1", "123.123", "12345.12345"), - constraints = listOf("scale: range::[1, 5]", "precision: range::[2, 10]")), + constraints = listOf("scale: range::[1, 5]", "precision: range::[2, 10]") + ), // multiple examples: string // string with string of same lengths createMultiExampleConstraintTestCase( examples = listOf("\"123\"", "\"456\""), - constraints = listOf("codepoint_length: 3")), + constraints = listOf("codepoint_length: 3") + ), // string with string of differing lengths createMultiExampleConstraintTestCase( examples = listOf("\"abc\"", "\"abcdefgh\""), - constraints = listOf("codepoint_length: range::[3, 8]")), + constraints = listOf("codepoint_length: range::[3, 8]") + ), // collections + structs // list of int2s ExampleInferenceTestCase( @@ -1435,7 +1478,7 @@ class SchemaInferencerFromExampleTests : TestBase() { """ ${'$'}partiql_bag::[ { a: 1, b: [1, 2, 3], c: { x: 1, y: 2 } }, { a: 10, b: [10, 20, 30], c: { x: 10, y: $POS_BIG_INT } }, - { a: ${MAX_INT4}, b: [100, 200, 300], c: { x: 100, y: 200 } } ] + { a: $MAX_INT4, b: [100, 200, 300], c: { x: 100, y: 200 } } ] """, """ type::{ name: $typeName, type: bag, @@ -1472,21 +1515,23 @@ class SchemaInferencerFromExampleTests : TestBase() { class NullTypeTests : ArgumentsProviderBase() { // All typed nulls will collapse down to untyped null private fun createTypedNullTests(): List { - val coreTypedNulls = listOf("null.null", "null.int", "null.float", "null.decimal", "null.string", - "null.symbol", "null.timestamp", "null.blob", "null.clob", "null.list", "null.sexp", "null.struct") + val coreTypedNulls = listOf( + "null.null", "null.int", "null.float", "null.decimal", "null.string", + "null.symbol", "null.timestamp", "null.blob", "null.clob", "null.list", "null.sexp", "null.struct" + ) return coreTypedNulls.map { typedNull -> ExampleInferenceTestCase(typedNull, "type::{ name: $typeName, type: nullable::\$null }") } } - override fun getParameters(): List = createTypedNullTests() + listOf( // null ExampleInferenceTestCase("null", "type::{ name: $typeName, type: nullable::\$null }"), // list of null.int ExampleInferenceTestCase("[null.int]", "type::{ name: $typeName, type: list, element: { type: nullable::\$null } }"), // struct with null.int field value - ExampleInferenceTestCase("{ foo: null.int }", + ExampleInferenceTestCase( + "{ foo: null.int }", """ type::{ name: $typeName, type: struct, content:closed, @@ -1497,7 +1542,8 @@ class SchemaInferencerFromExampleTests : TestBase() { """ ), // list of struct with null.int field value - ExampleInferenceTestCase("[ { foo: null.int } ]", + ExampleInferenceTestCase( + "[ { foo: null.int } ]", """ type::{ name: $typeName, type: list, element: { @@ -1511,7 +1557,8 @@ class SchemaInferencerFromExampleTests : TestBase() { """ ), // struct of list with null.int field value - ExampleInferenceTestCase("{ foo: [ null.int ] }", + ExampleInferenceTestCase( + "{ foo: [ null.int ] }", """ type::{ name: $typeName, type: struct, content:closed, @@ -1523,7 +1570,8 @@ class SchemaInferencerFromExampleTests : TestBase() { ), // unification of null types tests // list of int and null - ExampleInferenceTestCase("[1, null]", + ExampleInferenceTestCase( + "[1, null]", """ type::{ name: $typeName, type: list, element: { type: nullable::int, $INT2_VALID_VALUES } @@ -1531,7 +1579,8 @@ class SchemaInferencerFromExampleTests : TestBase() { """ ), // list of int and null.int - ExampleInferenceTestCase("[1, null.int]", + ExampleInferenceTestCase( + "[1, null.int]", """ type::{ name: $typeName, type: list, element: { type: nullable::int, $INT2_VALID_VALUES } @@ -1539,7 +1588,8 @@ class SchemaInferencerFromExampleTests : TestBase() { """ ), // list of int, null, and null.int - ExampleInferenceTestCase("[1, null, null.int]", + ExampleInferenceTestCase( + "[1, null, null.int]", """ type::{ name: $typeName, type: list, element: { type: nullable::int, $INT2_VALID_VALUES } @@ -1560,7 +1610,8 @@ class SchemaInferencerFromExampleTests : TestBase() { ExampleInferenceTestCase("{} null null.struct", "type::{ name: $typeName, type: nullable::struct, content:closed }"), // lists with conflicting types // list of int, decimal, and null - ExampleInferenceTestCase("[null, 1, 1d0]", + ExampleInferenceTestCase( + "[null, 1, 1d0]", """ type::{ name: $typeName, type: list, element: { @@ -1573,7 +1624,8 @@ class SchemaInferencerFromExampleTests : TestBase() { """ ), // list of int, decimal, and null (separate lists) - ExampleInferenceTestCase("[1, 1d0] [null]", + ExampleInferenceTestCase( + "[1, 1d0] [null]", """ type::{ name: $typeName, type: list, element: { @@ -1586,7 +1638,8 @@ class SchemaInferencerFromExampleTests : TestBase() { """ ), // list of int and decimal with int and null (separate lists) - ExampleInferenceTestCase("[1, 1d0] [1, null]", + ExampleInferenceTestCase( + "[1, 1d0] [1, null]", """ type::{ name: $typeName, type: list, element: { @@ -1599,7 +1652,8 @@ class SchemaInferencerFromExampleTests : TestBase() { """ ), // structs with nullable type - ExampleInferenceTestCase("{ one: 1 } { one: null }", + ExampleInferenceTestCase( + "{ one: 1 } { one: null }", """ type::{ name: $typeName, type: struct, content: closed, @@ -1610,7 +1664,8 @@ class SchemaInferencerFromExampleTests : TestBase() { """ ), // structs with nullable type and conflict - ExampleInferenceTestCase("{ one: 1 } { one: 1d0 } { one: null }", + ExampleInferenceTestCase( + "{ one: 1 } { one: 1d0 } { one: null }", """ type::{ name: $typeName, type: struct, content: closed, @@ -1626,7 +1681,8 @@ class SchemaInferencerFromExampleTests : TestBase() { """ ), // structs with nullable container type - ExampleInferenceTestCase("{ one: [1] } { one: null }", + ExampleInferenceTestCase( + "{ one: [1] } { one: null }", """ type::{ name: $typeName, type: struct, content: closed, @@ -1640,7 +1696,8 @@ class SchemaInferencerFromExampleTests : TestBase() { """ ), // structs with nullable container type and nullable element - ExampleInferenceTestCase("{ one: [1] } { one: null } { one: [null] }", + ExampleInferenceTestCase( + "{ one: [1] } { one: null } { one: [null] }", """ type::{ name: $typeName, type: struct, content: closed, @@ -1783,13 +1840,13 @@ class SchemaInferencerFromExampleTests : TestBase() { name = "decimal unified with definite schema with non-discovered constraint (valid_values)", examples = "1d0", definiteIslAsString = - """ + """ type::{ name: $typeName, type: decimal, $decimalValidValuesRange } """, islAsString = - """ + """ type::{ name: $typeName, type: decimal, scale: 0, precision: 1, @@ -1802,14 +1859,14 @@ class SchemaInferencerFromExampleTests : TestBase() { name = "decimal unified with definite schema with discovered (precision) and non-discovered constraint (valid_values)", examples = "1d0", definiteIslAsString = - """ + """ type::{ name: $typeName, type: decimal, precision: range::[1, 38], $decimalValidValuesRange } """, islAsString = - """ + """ type::{ name: $typeName, type: decimal, scale: 0, precision: 1, @@ -1822,13 +1879,13 @@ class SchemaInferencerFromExampleTests : TestBase() { name = "string unified with definite schema with non-discovered constraint (utf8_byte_length)", examples = "\"abc\"", definiteIslAsString = - """ + """ type::{ name: $typeName, type: string, $stringUTF8ByteLengthRange } """, islAsString = - """ + """ type::{ name: $typeName, type: string, codepoint_length: 3, $stringUTF8ByteLengthRange @@ -1840,13 +1897,13 @@ class SchemaInferencerFromExampleTests : TestBase() { name = "blob unified with definite schema with non-discovered constraint (byte_length)", examples = "{{ +AB/ }}", definiteIslAsString = - """ + """ type::{ name: $typeName, type: blob, $blobByteLengthRange } """, islAsString = - """ + """ type::{ name: $typeName, type: blob, $blobByteLengthRange } @@ -1856,13 +1913,13 @@ class SchemaInferencerFromExampleTests : TestBase() { name = "union(int, decimal) with definite schema of decimal with non-discovered constraint (valid_values)", examples = "1 1d0", definiteIslAsString = - """ + """ type::{ name: $typeName, type: decimal, $decimalValidValuesRange } """, islAsString = - """ + """ type::{ name: $typeName, any_of:[ { type: int, $INT2_VALID_VALUES }, { type: decimal, scale: 0, precision: 1, $decimalValidValuesRange } @@ -1873,14 +1930,14 @@ class SchemaInferencerFromExampleTests : TestBase() { name = "decimal with definite schema of union(blob, decimal) with non-discovered constraints", examples = "1d0", definiteIslAsString = - """ + """ type::{ name: $typeName, any_of:[ { type: blob, $blobByteLengthRange }, { type: decimal, $decimalValidValuesRange } ]} """, islAsString = - """ + """ type::{ name: $typeName, any_of:[ { type: blob, $blobByteLengthRange }, { type: decimal, scale: 0, precision: 1, $decimalValidValuesRange } @@ -1891,7 +1948,7 @@ class SchemaInferencerFromExampleTests : TestBase() { name = "empty struct with definite schema struct", examples = "{ }", definiteIslAsString = - """ + """ type::{ name: $typeName, type: struct, fields: { a: { type: decimal, $decimalValidValuesRange }, @@ -1899,7 +1956,7 @@ class SchemaInferencerFromExampleTests : TestBase() { } """, islAsString = - """ + """ type::{ name: $typeName, type: struct, content: closed, fields: { a: { type: decimal, $decimalValidValuesRange }, @@ -1911,7 +1968,7 @@ class SchemaInferencerFromExampleTests : TestBase() { name = "struct with definite schema struct with additional field", examples = "{ a: 1d0 }", definiteIslAsString = - """ + """ type::{ name: $typeName, type: struct, fields: { a: { type: decimal, $decimalValidValuesRange }, @@ -1920,7 +1977,7 @@ class SchemaInferencerFromExampleTests : TestBase() { } """, islAsString = - """ + """ type::{ name: $typeName, type: struct, content: closed, fields: { a: { type: decimal, scale: 0, precision: 1, $decimalValidValuesRange }, @@ -1933,7 +1990,7 @@ class SchemaInferencerFromExampleTests : TestBase() { name = "struct with additional fields with definite schema struct", examples = "{ a: 1d0, b: {{ +AB/ }}, c: \"abc\" }", definiteIslAsString = - """ + """ type::{ name: $typeName, type: struct, fields: { a: { type: decimal, $decimalValidValuesRange }, @@ -1941,7 +1998,7 @@ class SchemaInferencerFromExampleTests : TestBase() { } """, islAsString = - """ + """ type::{ name: $typeName, type: struct, content: closed, fields: { a: { type: decimal, scale: 0, precision: 1, $decimalValidValuesRange }, @@ -1955,7 +2012,7 @@ class SchemaInferencerFromExampleTests : TestBase() { name = "bag of struct unified with additional decimal constraint", examples = "\$partiql_bag::[ { a: 1d0 } ]", definiteIslAsString = - """ + """ type::{ name: $typeName, type: bag, element: { type: struct, @@ -1966,7 +2023,7 @@ class SchemaInferencerFromExampleTests : TestBase() { } """, islAsString = - """ + """ type::{ name: $typeName, type: bag, element: { type: struct, content: closed, @@ -1981,7 +2038,7 @@ class SchemaInferencerFromExampleTests : TestBase() { name = "bag of struct unified with additional decimal constraint", examples = "\$partiql_bag::[ { a: 1d0 } ]", definiteIslAsString = - """ + """ type::{ name: $typeName, type: bag, element: { type: struct, @@ -1992,7 +2049,7 @@ class SchemaInferencerFromExampleTests : TestBase() { } """, islAsString = - """ + """ type::{ name: $typeName, type: bag, element: { type: struct, content: closed, @@ -2006,13 +2063,13 @@ class SchemaInferencerFromExampleTests : TestBase() { InferenceAndDefiniteUnifyTestCase( name = "bag of structs unified with additional constraints", examples = - """ + """ ${'$'}partiql_bag::[ { a: 1, b: ["a", "b", "c"], c: { x: 1., y: {{ +AA/ }} } }, { a: 10, b: ["aa", "bb", "cc"], c: { x: 10., y: {{ +BB/ }} } }, { a: 100, b: ["aaa", "bbb", "ccc"], c: { x: 100., y: {{ +CC/ }} } } ] """, definiteIslAsString = - """ + """ type::{ name: $typeName, type: bag, element: { type: struct, @@ -2029,7 +2086,7 @@ class SchemaInferencerFromExampleTests : TestBase() { } """, islAsString = - """ + """ type::{ name: $typeName, type: bag, element: { type: struct, content: closed, diff --git a/lang/test/org/partiql/lang/syntax/SqlLexerTest.kt b/lang/test/org/partiql/lang/syntax/SqlLexerTest.kt index c57a36a119..ef25fb4eaf 100644 --- a/lang/test/org/partiql/lang/syntax/SqlLexerTest.kt +++ b/lang/test/org/partiql/lang/syntax/SqlLexerTest.kt @@ -176,7 +176,6 @@ class SqlLexerTest : TestBase() { token(TokenType.ION_LITERAL, "{{ \"not a comment //\" }}", 2, 27, 26) ) - @Test fun quotedStrings() = assertTokens( "'1e0' '{''a'':5}'", @@ -518,4 +517,4 @@ class SqlLexerTest : TestBase() { "RS_boolean", token(TokenType.IDENTIFIER, "RS_boolean", 1, 1, 10) ) -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/syntax/SqlParserCastTests.kt b/lang/test/org/partiql/lang/syntax/SqlParserCastTests.kt index 59e210af49..c98a87ddac 100644 --- a/lang/test/org/partiql/lang/syntax/SqlParserCastTests.kt +++ b/lang/test/org/partiql/lang/syntax/SqlParserCastTests.kt @@ -12,7 +12,6 @@ import org.partiql.lang.ION import org.partiql.lang.domains.PartiqlAst import org.partiql.lang.util.ArgumentsProviderBase - class SqlParserCastTests : SqlParserTestBase() { companion object { @@ -25,15 +24,18 @@ class SqlParserCastTests : SqlParserTestBase() { fun toCastTest() = ConfiguredCastParseTest( source, - PartiqlAst.build { query( cast(ast.value, ast.asType, ast.metas)) }) + PartiqlAst.build { query(cast(ast.value, ast.asType, ast.metas)) } + ) fun toCanCastTest() = ConfiguredCastParseTest( source.replaceFirst("CAST", "CAN_CAST"), - PartiqlAst.build { query( canCast(ast.value, ast.asType, ast.metas)) }) + PartiqlAst.build { query(canCast(ast.value, ast.asType, ast.metas)) } + ) fun toCanLosslessCastTest() = ConfiguredCastParseTest( source.replaceFirst("CAST", "CAN_LOSSLESS_CAST"), - PartiqlAst.build { query( canLosslessCast(ast.value, ast.asType, ast.metas) ) }) + PartiqlAst.build { query(canLosslessCast(ast.value, ast.asType, ast.metas)) } + ) } data class ConfiguredCastParseTest(val source: String, val expectedAst: PartiqlAst.PartiqlAstNode) { fun assertCase() { @@ -170,4 +172,4 @@ class SqlParserCastTests : SqlParserTestBase() { @ParameterizedTest @ArgumentsSource(SqlConfiguredCastArguments::class) fun configuredCast(configuredCastCase: ConfiguredCastParseTest) = configuredCastCase.assertCase() -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/syntax/SqlParserCorrelatedJoinTests.kt b/lang/test/org/partiql/lang/syntax/SqlParserCorrelatedJoinTests.kt index c2db8b7da3..fd7eddf1b7 100644 --- a/lang/test/org/partiql/lang/syntax/SqlParserCorrelatedJoinTests.kt +++ b/lang/test/org/partiql/lang/syntax/SqlParserCorrelatedJoinTests.kt @@ -13,16 +13,19 @@ class SqlParserCorrelatedJoinTests : SqlParserTestBase() { joinPredicate: PartiqlAst.Expr?, wherePredicate: PartiqlAst.Expr? = null ): PartiqlAst.Expr = - select( - project = projectList( - projectExpr(id("a")), - projectExpr(id("b"))), - from = join( - joinType, - scan(id("stuff"), "s"), - scan(id("s", caseInsensitive(), localsFirst())), - joinPredicate), - where = wherePredicate) + select( + project = projectList( + projectExpr(id("a")), + projectExpr(id("b")) + ), + from = join( + joinType, + scan(id("stuff"), "s"), + scan(id("s", caseInsensitive(), localsFirst())), + joinPredicate + ), + where = wherePredicate + ) @Test fun selectCorrelatedExplicitCrossJoin() = assertExpression( @@ -37,7 +40,8 @@ class SqlParserCorrelatedJoinTests : SqlParserTestBase() { selectWithCorrelatedJoin( joinType = PartiqlAst.JoinType.Inner(), joinPredicate = null, - wherePredicate = callFWithS()) + wherePredicate = callFWithS() + ) } @Test @@ -53,7 +57,8 @@ class SqlParserCorrelatedJoinTests : SqlParserTestBase() { selectWithCorrelatedJoin( joinType = PartiqlAst.JoinType.Left(), joinPredicate = null, - wherePredicate = callFWithS()) + wherePredicate = callFWithS() + ) } @Test @@ -73,10 +78,10 @@ class SqlParserCorrelatedJoinTests : SqlParserTestBase() { ) { selectWithCorrelatedJoin( joinType = PartiqlAst.JoinType.Left(), - joinPredicate = callFWithS()) + joinPredicate = callFWithS() + ) } - @Test fun selectCorrelatedJoin() = assertExpression( "SELECT a, b FROM stuff s, @s WHERE f(s)", @@ -90,7 +95,8 @@ class SqlParserCorrelatedJoinTests : SqlParserTestBase() { selectWithCorrelatedJoin( joinType = PartiqlAst.JoinType.Inner(), joinPredicate = null, - wherePredicate = callFWithS()) + wherePredicate = callFWithS() + ) } @Test @@ -106,6 +112,7 @@ class SqlParserCorrelatedJoinTests : SqlParserTestBase() { selectWithCorrelatedJoin( joinType = PartiqlAst.JoinType.Inner(), joinPredicate = null, - wherePredicate = callFWithS()) + wherePredicate = callFWithS() + ) } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/syntax/SqlParserCustomTypeCatalogTests.kt b/lang/test/org/partiql/lang/syntax/SqlParserCustomTypeCatalogTests.kt index 3035143b93..3fbc4fb0c7 100644 --- a/lang/test/org/partiql/lang/syntax/SqlParserCustomTypeCatalogTests.kt +++ b/lang/test/org/partiql/lang/syntax/SqlParserCustomTypeCatalogTests.kt @@ -16,7 +16,7 @@ import org.partiql.lang.util.asIonSexp * Parser tests covering the (de)serialization of the old ASTs to custom types * TODO: Remove these tests https://github.com/partiql/partiql-lang-kotlin/issues/510 */ -class SqlParserCustomTypeCatalogTests: SqlParserTestBase() { +class SqlParserCustomTypeCatalogTests : SqlParserTestBase() { private val customTypeVisitorTransform = CustomTypeVisitorTransform() @@ -31,7 +31,7 @@ class SqlParserCustomTypeCatalogTests: SqlParserTestBase() { val newSerializedPigAst: String ) - private fun deserialize(serializedSexp: String) : ExprNode { + private fun deserialize(serializedSexp: String): ExprNode { val sexp = ion.singleValue(serializedSexp).asIonSexp() val astExpr = PartiqlAst.transform(sexp.toIonElement()) as PartiqlAst.Expr val astStatement = PartiqlAst.build { @@ -102,7 +102,7 @@ class SqlParserCustomTypeCatalogTests: SqlParserTestBase() { (project_expr (cast (id colour (case_insensitive) (unqualified)) (es_text)) colour) (project_expr (cast (id age (case_insensitive) (unqualified)) (es_integer)) years))) (from (scan (id SOURCE_VIEW_DELTA_FULL_TRANSACTIONS (case_insensitive) (unqualified)) null null null))) - """.trimIndent(), + """.trimIndent(), newSerializedPigAst = """ (select (project (project_list @@ -110,8 +110,8 @@ class SqlParserCustomTypeCatalogTests: SqlParserTestBase() { (project_expr (cast (id colour (case_insensitive) (unqualified)) (custom_type es_text)) colour) (project_expr (cast (id age (case_insensitive) (unqualified)) (custom_type es_integer)) years))) (from (scan (id SOURCE_VIEW_DELTA_FULL_TRANSACTIONS (case_insensitive) (unqualified)) null null null))) - """.trimIndent() + """.trimIndent() ) ) + castToCustomTypeTests() } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/syntax/SqlParserDateTimeTests.kt b/lang/test/org/partiql/lang/syntax/SqlParserDateTimeTests.kt index ac709a8470..92c35ce598 100644 --- a/lang/test/org/partiql/lang/syntax/SqlParserDateTimeTests.kt +++ b/lang/test/org/partiql/lang/syntax/SqlParserDateTimeTests.kt @@ -191,20 +191,20 @@ class SqlParserDateTimeTests : SqlParserTestBase() { } ) - private fun generateRandomSeed() : Random { + private fun generateRandomSeed(): Random { val rng = Random() val seed = rng.nextLong() - println("Randomly generated seed is ${seed}. Use this to reproduce failures in dev environment.") + println("Randomly generated seed is $seed. Use this to reproduce failures in dev environment.") rng.setSeed(seed) return rng } - private fun Random.nextDate() : Date { + private fun Random.nextDate(): Date { val year = nextInt(10000) val month = nextInt(12) + 1 val day = when (month) { in monthsWith31Days -> nextInt(31) - 2 -> when ((year % 4 == 0 && year % 100 != 0) || (year % 400 == 0)) { + 2 -> when ((year % 4 == 0 && year % 100 != 0) || (year % 400 == 0)) { true -> nextInt(29) false -> nextInt(28) } @@ -224,7 +224,7 @@ class SqlParserDateTimeTests : SqlParserTestBase() { } } } - + private fun createErrorCaseForTime(source: String, errorCode: ErrorCode, line: Long, col: Long, tokenType: TokenType, tokenValue: IonValue, skipTest: Boolean = false): () -> Unit = { if (!skipTest) { checkInputThrowingParserException( @@ -244,7 +244,8 @@ class SqlParserDateTimeTests : SqlParserTestBase() { checkInputThrowingParserException( source, errorCode, - errorContext) + errorContext + ) } fun parametersForTimeParserErrorTests() = listOf( @@ -695,5 +696,4 @@ class SqlParserDateTimeTests : SqlParserTestBase() { @Test @Parameters fun timeParserErrorTests(block: () -> Unit) = block() - -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/syntax/SqlParserPrecedenceTest.kt b/lang/test/org/partiql/lang/syntax/SqlParserPrecedenceTest.kt index fd87ae66d4..a09a9689a5 100644 --- a/lang/test/org/partiql/lang/syntax/SqlParserPrecedenceTest.kt +++ b/lang/test/org/partiql/lang/syntax/SqlParserPrecedenceTest.kt @@ -28,66 +28,67 @@ class SqlParserPrecedenceTest : SqlParserTestBase() { fun intersectPrecedence(pair: Pair) = runTest(pair) fun parametersForIntersectPrecedence(): List> = listOf( // two by two binary operators - /* (intersect, intersect_all) */ "a intersect b intersect all c" to "(intersect (all) (intersect (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (intersect, except) */ "a intersect b except c" to "(except (distinct) (intersect (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (intersect, except_all) */ "a intersect b except all c" to "(except (all) (intersect (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (intersect, union) */ "a intersect b union c" to "(union (distinct) (intersect (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (intersect, union_all) */ "a intersect b union all c" to "(union (all) (intersect (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (intersect, and) */ "a intersect b and c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (and (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect, or) */ "a intersect b or c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (or (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect, =) */ "a intersect b = c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (eq (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect, <>) */ "a intersect b <> c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (ne (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect, in) */ "a intersect b in c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect, not_in) */ "a intersect b not in c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (not (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (intersect, <) */ "a intersect b < c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect, <=) */ "a intersect b <= c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect, >) */ "a intersect b > c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect, >=) */ "a intersect b >= c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect, between) */ "a intersect b between w and c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect, not_between) */ "a intersect b not between y and c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (intersect, like) */ "a intersect b like c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", - /* (intersect, not_like) */ "a intersect b not like c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", - /* (intersect, +) */ "a intersect b + c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect, -) */ "a intersect b - c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect, ||) */ "a intersect b || c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect, *) */ "a intersect b * c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect, /) */ "a intersect b / c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect, %) */ "a intersect b % c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect, is) */ "a intersect b is boolean" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (is_type (id b (case_insensitive) (unqualified)) (boolean_type)))", - /* (intersect, is_not) */ "a intersect b is not boolean" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (not (is_type (id b (case_insensitive) (unqualified)) (boolean_type))))") + /* (intersect, intersect_all) */ "a intersect b intersect all c" to "(intersect (all) (intersect (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (intersect, except) */ "a intersect b except c" to "(except (distinct) (intersect (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (intersect, except_all) */ "a intersect b except all c" to "(except (all) (intersect (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (intersect, union) */ "a intersect b union c" to "(union (distinct) (intersect (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (intersect, union_all) */ "a intersect b union all c" to "(union (all) (intersect (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (intersect, and) */ "a intersect b and c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (and (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect, or) */ "a intersect b or c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (or (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect, =) */ "a intersect b = c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (eq (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect, <>) */ "a intersect b <> c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (ne (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect, in) */ "a intersect b in c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect, not_in) */ "a intersect b not in c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (not (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (intersect, <) */ "a intersect b < c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect, <=) */ "a intersect b <= c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect, >) */ "a intersect b > c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect, >=) */ "a intersect b >= c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect, between) */ "a intersect b between w and c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect, not_between) */ "a intersect b not between y and c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (intersect, like) */ "a intersect b like c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", + /* (intersect, not_like) */ "a intersect b not like c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", + /* (intersect, +) */ "a intersect b + c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect, -) */ "a intersect b - c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect, ||) */ "a intersect b || c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect, *) */ "a intersect b * c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect, /) */ "a intersect b / c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect, %) */ "a intersect b % c" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect, is) */ "a intersect b is boolean" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (is_type (id b (case_insensitive) (unqualified)) (boolean_type)))", + /* (intersect, is_not) */ "a intersect b is not boolean" to "(intersect (distinct) (id a (case_insensitive) (unqualified)) (not (is_type (id b (case_insensitive) (unqualified)) (boolean_type))))" + ) @Test @Parameters @TestCaseName("{0}") fun intersectAllPrecedence(pair: Pair) = runTest(pair) fun parametersForIntersectAllPrecedence() = listOf( - /* (intersect_all, intersect) */ "a intersect all b intersect c" to "(intersect (distinct) (intersect (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (intersect_all, except) */ "a intersect all b except c" to "(except (distinct) (intersect (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (intersect_all, except_all) */ "a intersect all b except all c" to "(except (all) (intersect (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (intersect_all, union) */ "a intersect all b union c" to "(union (distinct) (intersect (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (intersect_all, union_all) */ "a intersect all b union all c" to "(union (all) (intersect (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (intersect_all, and) */ "a intersect all b and c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (and (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect_all, or) */ "a intersect all b or c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (or (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect_all, =) */ "a intersect all b = c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (eq (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect_all, <>) */ "a intersect all b <> c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (ne (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect_all, in) */ "a intersect all b in c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect_all, not_in) */ "a intersect all b not in c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (not (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (intersect_all, <) */ "a intersect all b < c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect_all, <=) */ "a intersect all b <= c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect_all, >) */ "a intersect all b > c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect_all, >=) */ "a intersect all b >= c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect_all, between) */ "a intersect all b between w and c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect_all, intersect) */ "a intersect all b intersect c" to "(intersect (distinct) (intersect (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (intersect_all, except) */ "a intersect all b except c" to "(except (distinct) (intersect (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (intersect_all, except_all) */ "a intersect all b except all c" to "(except (all) (intersect (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (intersect_all, union) */ "a intersect all b union c" to "(union (distinct) (intersect (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (intersect_all, union_all) */ "a intersect all b union all c" to "(union (all) (intersect (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (intersect_all, and) */ "a intersect all b and c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (and (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect_all, or) */ "a intersect all b or c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (or (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect_all, =) */ "a intersect all b = c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (eq (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect_all, <>) */ "a intersect all b <> c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (ne (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect_all, in) */ "a intersect all b in c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect_all, not_in) */ "a intersect all b not in c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (not (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (intersect_all, <) */ "a intersect all b < c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect_all, <=) */ "a intersect all b <= c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect_all, >) */ "a intersect all b > c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect_all, >=) */ "a intersect all b >= c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect_all, between) */ "a intersect all b between w and c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", /* (intersect_all, not_between) */ "a intersect all b not between y and c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (intersect_all, like) */ "a intersect all b like c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", - /* (intersect_all, not_like) */ "a intersect all b not like c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", - /* (intersect_all, +) */ "a intersect all b + c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect_all, -) */ "a intersect all b - c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect_all, ||) */ "a intersect all b || c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect_all, *) */ "a intersect all b * c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect_all, /) */ "a intersect all b / c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect_all, %) */ "a intersect all b % c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (intersect_all, is) */ "a intersect all b is boolean" to "(intersect (all) (id a (case_insensitive) (unqualified)) (is_type (id b (case_insensitive) (unqualified)) (boolean_type)))", - /* (intersect_all, is_not) */ "a intersect all b is not boolean" to "(intersect (all) (id a (case_insensitive) (unqualified)) (not (is_type (id b (case_insensitive) (unqualified)) (boolean_type))))" + /* (intersect_all, like) */ "a intersect all b like c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", + /* (intersect_all, not_like) */ "a intersect all b not like c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", + /* (intersect_all, +) */ "a intersect all b + c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect_all, -) */ "a intersect all b - c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect_all, ||) */ "a intersect all b || c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect_all, *) */ "a intersect all b * c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect_all, /) */ "a intersect all b / c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect_all, %) */ "a intersect all b % c" to "(intersect (all) (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (intersect_all, is) */ "a intersect all b is boolean" to "(intersect (all) (id a (case_insensitive) (unqualified)) (is_type (id b (case_insensitive) (unqualified)) (boolean_type)))", + /* (intersect_all, is_not) */ "a intersect all b is not boolean" to "(intersect (all) (id a (case_insensitive) (unqualified)) (not (is_type (id b (case_insensitive) (unqualified)) (boolean_type))))" ) @Test @@ -95,856 +96,882 @@ class SqlParserPrecedenceTest : SqlParserTestBase() { @TestCaseName("{0}") fun exceptPrecedence(pair: Pair) = runTest(pair) fun parametersForExceptPrecedence() = listOf( - /* (except, intersect) */ "a except b intersect c" to "(intersect (distinct) (except (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (except, intersect_all) */ "a except b intersect all c" to "(intersect (all) (except (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (except, except_all) */ "a except b except all c" to "(except (all) (except (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (except, union) */ "a except b union c" to "(union (distinct) (except (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (except, union_all) */ "a except b union all c" to "(union (all) (except (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (except, and) */ "a except b and c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (and (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except, or) */ "a except b or c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (or (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except, =) */ "a except b = c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (eq (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except, <>) */ "a except b <> c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (ne (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except, in) */ "a except b in c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except, not_in) */ "a except b not in c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (not (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (except, <) */ "a except b < c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except, <=) */ "a except b <= c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except, >) */ "a except b > c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except, >=) */ "a except b >= c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except, between) */ "a except b between w and c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except, not_between) */ "a except b not between y and c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (except, like) */ "a except b like c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", - /* (except, not_like) */ "a except b not like c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", - /* (except, +) */ "a except b + c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except, -) */ "a except b - c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except, ||) */ "a except b || c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except, *) */ "a except b * c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except, /) */ "a except b / c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except, %) */ "a except b % c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except, is) */ "a except b is boolean" to "(except (distinct) (id a (case_insensitive) (unqualified)) (is_type (id b (case_insensitive) (unqualified)) (boolean_type)))", - /* (except, is_not) */ "a except b is not boolean" to "(except (distinct) (id a (case_insensitive) (unqualified)) (not (is_type (id b (case_insensitive) (unqualified)) (boolean_type))))") + /* (except, intersect) */ "a except b intersect c" to "(intersect (distinct) (except (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (except, intersect_all) */ "a except b intersect all c" to "(intersect (all) (except (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (except, except_all) */ "a except b except all c" to "(except (all) (except (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (except, union) */ "a except b union c" to "(union (distinct) (except (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (except, union_all) */ "a except b union all c" to "(union (all) (except (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (except, and) */ "a except b and c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (and (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except, or) */ "a except b or c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (or (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except, =) */ "a except b = c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (eq (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except, <>) */ "a except b <> c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (ne (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except, in) */ "a except b in c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except, not_in) */ "a except b not in c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (not (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (except, <) */ "a except b < c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except, <=) */ "a except b <= c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except, >) */ "a except b > c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except, >=) */ "a except b >= c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except, between) */ "a except b between w and c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except, not_between) */ "a except b not between y and c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (except, like) */ "a except b like c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", + /* (except, not_like) */ "a except b not like c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", + /* (except, +) */ "a except b + c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except, -) */ "a except b - c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except, ||) */ "a except b || c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except, *) */ "a except b * c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except, /) */ "a except b / c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except, %) */ "a except b % c" to "(except (distinct) (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except, is) */ "a except b is boolean" to "(except (distinct) (id a (case_insensitive) (unqualified)) (is_type (id b (case_insensitive) (unqualified)) (boolean_type)))", + /* (except, is_not) */ "a except b is not boolean" to "(except (distinct) (id a (case_insensitive) (unqualified)) (not (is_type (id b (case_insensitive) (unqualified)) (boolean_type))))" + ) @Test @Parameters @TestCaseName("{0}") fun exceptAllPrecedence(pair: Pair) = runTest(pair) fun parametersForExceptAllPrecedence() = listOf( - /* (except_all, intersect) */ "a except all b intersect c" to "(intersect (distinct) (except (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (except_all, intersect_all) */ "a except all b intersect all c" to "(intersect (all) (except (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (except_all, except) */ "a except all b except c" to "(except (distinct) (except (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (except_all, union) */ "a except all b union c" to "(union (distinct) (except (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (except_all, union_all) */ "a except all b union all c" to "(union (all) (except (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (except_all, and) */ "a except all b and c" to "(except (all) (id a (case_insensitive) (unqualified)) (and (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except_all, or) */ "a except all b or c" to "(except (all) (id a (case_insensitive) (unqualified)) (or (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except_all, =) */ "a except all b = c" to "(except (all) (id a (case_insensitive) (unqualified)) (eq (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except_all, <>) */ "a except all b <> c" to "(except (all) (id a (case_insensitive) (unqualified)) (ne (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except_all, in) */ "a except all b in c" to "(except (all) (id a (case_insensitive) (unqualified)) (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except_all, not_in) */ "a except all b not in c" to "(except (all) (id a (case_insensitive) (unqualified)) (not (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (except_all, <) */ "a except all b < c" to "(except (all) (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except_all, <=) */ "a except all b <= c" to "(except (all) (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except_all, >) */ "a except all b > c" to "(except (all) (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except_all, >=) */ "a except all b >= c" to "(except (all) (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except_all, between) */ "a except all b between w and c" to "(except (all) (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except_all, not_between) */ "a except all b not between y and c" to "(except (all) (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (except_all, like) */ "a except all b like c" to "(except (all) (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", - /* (except_all, not_like) */ "a except all b not like c" to "(except (all) (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", - /* (except_all, +) */ "a except all b + c" to "(except (all) (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except_all, -) */ "a except all b - c" to "(except (all) (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except_all, ||) */ "a except all b || c" to "(except (all) (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except_all, *) */ "a except all b * c" to "(except (all) (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except_all, /) */ "a except all b / c" to "(except (all) (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except_all, %) */ "a except all b % c" to "(except (all) (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (except_all, is) */ "a except all b is boolean" to "(except (all) (id a (case_insensitive) (unqualified)) (is_type (id b (case_insensitive) (unqualified)) (boolean_type)))", - /* (except_all, is_not) */ "a except all b is not boolean" to "(except (all) (id a (case_insensitive) (unqualified)) (not (is_type (id b (case_insensitive) (unqualified)) (boolean_type))))") + /* (except_all, intersect) */ "a except all b intersect c" to "(intersect (distinct) (except (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (except_all, intersect_all) */ "a except all b intersect all c" to "(intersect (all) (except (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (except_all, except) */ "a except all b except c" to "(except (distinct) (except (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (except_all, union) */ "a except all b union c" to "(union (distinct) (except (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (except_all, union_all) */ "a except all b union all c" to "(union (all) (except (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (except_all, and) */ "a except all b and c" to "(except (all) (id a (case_insensitive) (unqualified)) (and (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except_all, or) */ "a except all b or c" to "(except (all) (id a (case_insensitive) (unqualified)) (or (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except_all, =) */ "a except all b = c" to "(except (all) (id a (case_insensitive) (unqualified)) (eq (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except_all, <>) */ "a except all b <> c" to "(except (all) (id a (case_insensitive) (unqualified)) (ne (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except_all, in) */ "a except all b in c" to "(except (all) (id a (case_insensitive) (unqualified)) (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except_all, not_in) */ "a except all b not in c" to "(except (all) (id a (case_insensitive) (unqualified)) (not (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (except_all, <) */ "a except all b < c" to "(except (all) (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except_all, <=) */ "a except all b <= c" to "(except (all) (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except_all, >) */ "a except all b > c" to "(except (all) (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except_all, >=) */ "a except all b >= c" to "(except (all) (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except_all, between) */ "a except all b between w and c" to "(except (all) (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except_all, not_between) */ "a except all b not between y and c" to "(except (all) (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (except_all, like) */ "a except all b like c" to "(except (all) (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", + /* (except_all, not_like) */ "a except all b not like c" to "(except (all) (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", + /* (except_all, +) */ "a except all b + c" to "(except (all) (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except_all, -) */ "a except all b - c" to "(except (all) (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except_all, ||) */ "a except all b || c" to "(except (all) (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except_all, *) */ "a except all b * c" to "(except (all) (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except_all, /) */ "a except all b / c" to "(except (all) (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except_all, %) */ "a except all b % c" to "(except (all) (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (except_all, is) */ "a except all b is boolean" to "(except (all) (id a (case_insensitive) (unqualified)) (is_type (id b (case_insensitive) (unqualified)) (boolean_type)))", + /* (except_all, is_not) */ "a except all b is not boolean" to "(except (all) (id a (case_insensitive) (unqualified)) (not (is_type (id b (case_insensitive) (unqualified)) (boolean_type))))" + ) @Test @Parameters @TestCaseName("{0}") fun unionPrecedence(pair: Pair) = runTest(pair) fun parametersForUnionPrecedence() = listOf( - /* (union, intersect) */ "a union b intersect c" to "(intersect (distinct) (union (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (union, intersect_all) */ "a union b intersect all c" to "(intersect (all) (union (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (union, except) */ "a union b except c" to "(except (distinct) (union (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (union, except_all) */ "a union b except all c" to "(except (all) (union (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (union, union_all) */ "a union b union all c" to "(union (all) (union (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (union, and) */ "a union b and c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (and (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union, or) */ "a union b or c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (or (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union, =) */ "a union b = c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (eq (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union, <>) */ "a union b <> c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (ne (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union, in) */ "a union b in c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union, not_in) */ "a union b not in c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (not (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (union, <) */ "a union b < c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union, <=) */ "a union b <= c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union, >) */ "a union b > c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union, >=) */ "a union b >= c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union, between) */ "a union b between w and c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union, not_between) */ "a union b not between y and c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (union, like) */ "a union b like c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", - /* (union, not_like) */ "a union b not like c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", - /* (union, +) */ "a union b + c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union, -) */ "a union b - c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union, ||) */ "a union b || c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union, *) */ "a union b * c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union, /) */ "a union b / c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union, %) */ "a union b % c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union, is) */ "a union b is boolean" to "(union (distinct) (id a (case_insensitive) (unqualified)) (is_type (id b (case_insensitive) (unqualified)) (boolean_type)))", - /* (union, is_not) */ "a union b is not boolean" to "(union (distinct) (id a (case_insensitive) (unqualified)) (not (is_type (id b (case_insensitive) (unqualified)) (boolean_type))))") + /* (union, intersect) */ "a union b intersect c" to "(intersect (distinct) (union (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (union, intersect_all) */ "a union b intersect all c" to "(intersect (all) (union (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (union, except) */ "a union b except c" to "(except (distinct) (union (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (union, except_all) */ "a union b except all c" to "(except (all) (union (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (union, union_all) */ "a union b union all c" to "(union (all) (union (distinct) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (union, and) */ "a union b and c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (and (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union, or) */ "a union b or c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (or (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union, =) */ "a union b = c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (eq (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union, <>) */ "a union b <> c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (ne (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union, in) */ "a union b in c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union, not_in) */ "a union b not in c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (not (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (union, <) */ "a union b < c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union, <=) */ "a union b <= c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union, >) */ "a union b > c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union, >=) */ "a union b >= c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union, between) */ "a union b between w and c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union, not_between) */ "a union b not between y and c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (union, like) */ "a union b like c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", + /* (union, not_like) */ "a union b not like c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", + /* (union, +) */ "a union b + c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union, -) */ "a union b - c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union, ||) */ "a union b || c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union, *) */ "a union b * c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union, /) */ "a union b / c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union, %) */ "a union b % c" to "(union (distinct) (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union, is) */ "a union b is boolean" to "(union (distinct) (id a (case_insensitive) (unqualified)) (is_type (id b (case_insensitive) (unqualified)) (boolean_type)))", + /* (union, is_not) */ "a union b is not boolean" to "(union (distinct) (id a (case_insensitive) (unqualified)) (not (is_type (id b (case_insensitive) (unqualified)) (boolean_type))))" + ) @Test @Parameters @TestCaseName("{0}") fun unionAllPrecedence(pair: Pair) = runTest(pair) fun parametersForUnionAllPrecedence() = listOf( - /* (union_all, intersect) */ "a union all b intersect c" to "(intersect (distinct) (union (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (union_all, intersect_all) */ "a union all b intersect all c" to "(intersect (all) (union (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (union_all, except) */ "a union all b except c" to "(except (distinct) (union (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (union_all, except_all) */ "a union all b except all c" to "(except (all) (union (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (union_all, union) */ "a union all b union c" to "(union (distinct) (union (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (union_all, and) */ "a union all b and c" to "(union (all) (id a (case_insensitive) (unqualified)) (and (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union_all, or) */ "a union all b or c" to "(union (all) (id a (case_insensitive) (unqualified)) (or (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union_all, =) */ "a union all b = c" to "(union (all) (id a (case_insensitive) (unqualified)) (eq (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union_all, <>) */ "a union all b <> c" to "(union (all) (id a (case_insensitive) (unqualified)) (ne (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union_all, in) */ "a union all b in c" to "(union (all) (id a (case_insensitive) (unqualified)) (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union_all, not_in) */ "a union all b not in c" to "(union (all) (id a (case_insensitive) (unqualified)) (not (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (union_all, <) */ "a union all b < c" to "(union (all) (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union_all, <=) */ "a union all b <= c" to "(union (all) (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union_all, >) */ "a union all b > c" to "(union (all) (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union_all, >=) */ "a union all b >= c" to "(union (all) (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union_all, between) */ "a union all b between w and c" to "(union (all) (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union_all, not_between) */ "a union all b not between y and c" to "(union (all) (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (union_all, like) */ "a union all b like c" to "(union (all) (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", - /* (union_all, not_like) */ "a union all b not like c" to "(union (all) (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", - /* (union_all, +) */ "a union all b + c" to "(union (all) (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union_all, -) */ "a union all b - c" to "(union (all) (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union_all, ||) */ "a union all b || c" to "(union (all) (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union_all, *) */ "a union all b * c" to "(union (all) (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union_all, /) */ "a union all b / c" to "(union (all) (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union_all, %) */ "a union all b % c" to "(union (all) (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (union_all, is) */ "a union all b is boolean" to "(union (all) (id a (case_insensitive) (unqualified)) (is_type (id b (case_insensitive) (unqualified)) (boolean_type)))", - /* (union_all, is_not) */ "a union all b is not boolean" to "(union (all) (id a (case_insensitive) (unqualified)) (not (is_type (id b (case_insensitive) (unqualified)) (boolean_type))))") + /* (union_all, intersect) */ "a union all b intersect c" to "(intersect (distinct) (union (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (union_all, intersect_all) */ "a union all b intersect all c" to "(intersect (all) (union (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (union_all, except) */ "a union all b except c" to "(except (distinct) (union (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (union_all, except_all) */ "a union all b except all c" to "(except (all) (union (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (union_all, union) */ "a union all b union c" to "(union (distinct) (union (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (union_all, and) */ "a union all b and c" to "(union (all) (id a (case_insensitive) (unqualified)) (and (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union_all, or) */ "a union all b or c" to "(union (all) (id a (case_insensitive) (unqualified)) (or (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union_all, =) */ "a union all b = c" to "(union (all) (id a (case_insensitive) (unqualified)) (eq (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union_all, <>) */ "a union all b <> c" to "(union (all) (id a (case_insensitive) (unqualified)) (ne (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union_all, in) */ "a union all b in c" to "(union (all) (id a (case_insensitive) (unqualified)) (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union_all, not_in) */ "a union all b not in c" to "(union (all) (id a (case_insensitive) (unqualified)) (not (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (union_all, <) */ "a union all b < c" to "(union (all) (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union_all, <=) */ "a union all b <= c" to "(union (all) (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union_all, >) */ "a union all b > c" to "(union (all) (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union_all, >=) */ "a union all b >= c" to "(union (all) (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union_all, between) */ "a union all b between w and c" to "(union (all) (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union_all, not_between) */ "a union all b not between y and c" to "(union (all) (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (union_all, like) */ "a union all b like c" to "(union (all) (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", + /* (union_all, not_like) */ "a union all b not like c" to "(union (all) (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", + /* (union_all, +) */ "a union all b + c" to "(union (all) (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union_all, -) */ "a union all b - c" to "(union (all) (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union_all, ||) */ "a union all b || c" to "(union (all) (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union_all, *) */ "a union all b * c" to "(union (all) (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union_all, /) */ "a union all b / c" to "(union (all) (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union_all, %) */ "a union all b % c" to "(union (all) (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (union_all, is) */ "a union all b is boolean" to "(union (all) (id a (case_insensitive) (unqualified)) (is_type (id b (case_insensitive) (unqualified)) (boolean_type)))", + /* (union_all, is_not) */ "a union all b is not boolean" to "(union (all) (id a (case_insensitive) (unqualified)) (not (is_type (id b (case_insensitive) (unqualified)) (boolean_type))))" + ) @Test @Parameters @TestCaseName("{0}") fun andPrecedence(pair: Pair) = runTest(pair) fun parametersForAndPrecedence() = listOf( - /* (and, intersect) */ "a and b intersect c" to "(intersect (distinct) (and (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (and, intersect_all) */ "a and b intersect all c" to "(intersect (all) (and (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (and, except) */ "a and b except c" to "(except (distinct) (and (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (and, except_all) */ "a and b except all c" to "(except (all) (and (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (and, union) */ "a and b union c" to "(union (distinct) (and (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (and, union_all) */ "a and b union all c" to "(union (all) (and (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (and, or) */ "a and b or c" to "(or (and (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (and, =) */ "a and b = c" to "(and (id a (case_insensitive) (unqualified)) (eq (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (and, <>) */ "a and b <> c" to "(and (id a (case_insensitive) (unqualified)) (ne (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (and, in) */ "a and b in c" to "(and (id a (case_insensitive) (unqualified)) (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (and, not_in) */ "a and b not in c" to "(and (id a (case_insensitive) (unqualified)) (not (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (and, <) */ "a and b < c" to "(and (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (and, <=) */ "a and b <= c" to "(and (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (and, >) */ "a and b > c" to "(and (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (and, >=) */ "a and b >= c" to "(and (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (and, between) */ "a and b between w and c" to "(and (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (and, not_between) */ "a and b not between y and c" to "(and (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (and, like) */ "a and b like c" to "(and (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", - /* (and, not_like) */ "a and b not like c" to "(and (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", - /* (and, +) */ "a and b + c" to "(and (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (and, -) */ "a and b - c" to "(and (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (and, ||) */ "a and b || c" to "(and (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (and, *) */ "a and b * c" to "(and (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (and, /) */ "a and b / c" to "(and (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (and, %) */ "a and b % c" to "(and (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (and, is) */ "a and b is boolean" to "(and (id a (case_insensitive) (unqualified)) (is_type (id b (case_insensitive) (unqualified)) (boolean_type)))", - /* (and, is_not) */ "a and b is not boolean" to "(and (id a (case_insensitive) (unqualified)) (not (is_type (id b (case_insensitive) (unqualified)) (boolean_type))))") + /* (and, intersect) */ "a and b intersect c" to "(intersect (distinct) (and (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (and, intersect_all) */ "a and b intersect all c" to "(intersect (all) (and (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (and, except) */ "a and b except c" to "(except (distinct) (and (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (and, except_all) */ "a and b except all c" to "(except (all) (and (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (and, union) */ "a and b union c" to "(union (distinct) (and (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (and, union_all) */ "a and b union all c" to "(union (all) (and (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (and, or) */ "a and b or c" to "(or (and (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (and, =) */ "a and b = c" to "(and (id a (case_insensitive) (unqualified)) (eq (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (and, <>) */ "a and b <> c" to "(and (id a (case_insensitive) (unqualified)) (ne (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (and, in) */ "a and b in c" to "(and (id a (case_insensitive) (unqualified)) (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (and, not_in) */ "a and b not in c" to "(and (id a (case_insensitive) (unqualified)) (not (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (and, <) */ "a and b < c" to "(and (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (and, <=) */ "a and b <= c" to "(and (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (and, >) */ "a and b > c" to "(and (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (and, >=) */ "a and b >= c" to "(and (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (and, between) */ "a and b between w and c" to "(and (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (and, not_between) */ "a and b not between y and c" to "(and (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (and, like) */ "a and b like c" to "(and (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", + /* (and, not_like) */ "a and b not like c" to "(and (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", + /* (and, +) */ "a and b + c" to "(and (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (and, -) */ "a and b - c" to "(and (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (and, ||) */ "a and b || c" to "(and (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (and, *) */ "a and b * c" to "(and (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (and, /) */ "a and b / c" to "(and (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (and, %) */ "a and b % c" to "(and (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (and, is) */ "a and b is boolean" to "(and (id a (case_insensitive) (unqualified)) (is_type (id b (case_insensitive) (unqualified)) (boolean_type)))", + /* (and, is_not) */ "a and b is not boolean" to "(and (id a (case_insensitive) (unqualified)) (not (is_type (id b (case_insensitive) (unqualified)) (boolean_type))))" + ) @Test @Parameters @TestCaseName("{0}") fun orPrecedence(pair: Pair) = runTest(pair) fun parametersForOrPrecedence() = listOf( - /* (or, intersect) */ "a or b intersect c" to "(intersect (distinct) (or (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (or, intersect_all) */ "a or b intersect all c " to "(intersect (all) (or (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (or, except) */ "a or b except c" to "(except (distinct) (or (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (or, except_all) */ "a or b except all c " to "(except (all) (or (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (or, union) */ "a or b union c" to "(union (distinct) (or (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (or, union_all) */ "a or b union all c " to "(union (all) (or (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (or, and) */ "a or b and c" to "(or (id a (case_insensitive) (unqualified)) (and (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (or, =) */ "a or b = c" to "(or (id a (case_insensitive) (unqualified)) (eq (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (or, <>) */ "a or b <> c" to "(or (id a (case_insensitive) (unqualified)) (ne (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (or, in) */ "a or b in c" to "(or (id a (case_insensitive) (unqualified)) (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (or, not_in) */ "a or b not in c" to "(or (id a (case_insensitive) (unqualified)) (not (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (or, <) */ "a or b < c" to "(or (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (or, <=) */ "a or b <= c" to "(or (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (or, >) */ "a or b > c" to "(or (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (or, >=) */ "a or b >= c" to "(or (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (or, between) */ "a or b between w and c" to "(or (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (or, not_between) */ "a or b not between y and c" to "(or (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (or, like) */ "a or b like c" to "(or (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", - /* (or, not_like) */ "a or b not like c" to "(or (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", - /* (or, +) */ "a or b + c" to "(or (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (or, -) */ "a or b - c" to "(or (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (or, ||) */ "a or b || c" to "(or (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (or, *) */ "a or b * c" to "(or (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (or, /) */ "a or b / c" to "(or (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (or, %) */ "a or b % c" to "(or (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (or, is) */ "a or b is boolean" to "(or (id a (case_insensitive) (unqualified)) (is_type (id b (case_insensitive) (unqualified)) (boolean_type)))", - /* (or, is_not) */ "a or b is not boolean" to "(or (id a (case_insensitive) (unqualified)) (not (is_type (id b (case_insensitive) (unqualified)) (boolean_type))))") + /* (or, intersect) */ "a or b intersect c" to "(intersect (distinct) (or (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (or, intersect_all) */ "a or b intersect all c " to "(intersect (all) (or (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (or, except) */ "a or b except c" to "(except (distinct) (or (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (or, except_all) */ "a or b except all c " to "(except (all) (or (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (or, union) */ "a or b union c" to "(union (distinct) (or (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (or, union_all) */ "a or b union all c " to "(union (all) (or (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (or, and) */ "a or b and c" to "(or (id a (case_insensitive) (unqualified)) (and (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (or, =) */ "a or b = c" to "(or (id a (case_insensitive) (unqualified)) (eq (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (or, <>) */ "a or b <> c" to "(or (id a (case_insensitive) (unqualified)) (ne (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (or, in) */ "a or b in c" to "(or (id a (case_insensitive) (unqualified)) (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (or, not_in) */ "a or b not in c" to "(or (id a (case_insensitive) (unqualified)) (not (in_collection (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (or, <) */ "a or b < c" to "(or (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (or, <=) */ "a or b <= c" to "(or (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (or, >) */ "a or b > c" to "(or (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (or, >=) */ "a or b >= c" to "(or (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (or, between) */ "a or b between w and c" to "(or (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (or, not_between) */ "a or b not between y and c" to "(or (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (or, like) */ "a or b like c" to "(or (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", + /* (or, not_like) */ "a or b not like c" to "(or (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", + /* (or, +) */ "a or b + c" to "(or (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (or, -) */ "a or b - c" to "(or (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (or, ||) */ "a or b || c" to "(or (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (or, *) */ "a or b * c" to "(or (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (or, /) */ "a or b / c" to "(or (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (or, %) */ "a or b % c" to "(or (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (or, is) */ "a or b is boolean" to "(or (id a (case_insensitive) (unqualified)) (is_type (id b (case_insensitive) (unqualified)) (boolean_type)))", + /* (or, is_not) */ "a or b is not boolean" to "(or (id a (case_insensitive) (unqualified)) (not (is_type (id b (case_insensitive) (unqualified)) (boolean_type))))" + ) @Test @Parameters @TestCaseName("{0}") fun equalsPrecedence(pair: Pair) = runTest(pair) fun parametersForEqualsPrecedence() = listOf( - /* (=, intersect) */ "a = b intersect c" to "(intersect (distinct) (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (=, intersect_all) */ "a = b intersect all c " to "(intersect (all) (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (=, except) */ "a = b except c" to "(except (distinct) (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (=, except_all) */ "a = b except all c " to "(except (all) (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (=, union) */ "a = b union c" to "(union (distinct) (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (=, union_all) */ "a = b union all c " to "(union (all) (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (=, or) */ "a = b or c" to "(or (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (=, and) */ "a = b and c" to "(and (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (=, <>) */ "a = b <> c" to "(ne (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (=, in) */ "a = b in c" to "(in_collection (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (=, not_in) */ "a = b not in c" to "(not (in_collection (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", - /* (=, <) */ "a = b < c" to "(eq (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (=, <=) */ "a = b <= c" to "(eq (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (=, >) */ "a = b > c" to "(eq (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (=, >=) */ "a = b >= c" to "(eq (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (=, between) */ "a = b between w and c" to "(eq (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (=, not_between) */ "a = b not between y and c" to "(eq (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (=, like) */ "a = b like c" to "(eq (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", - /* (=, not_like) */ "a = b not like c" to "(eq (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", - /* (=, +) */ "a = b + c" to "(eq (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (=, -) */ "a = b - c" to "(eq (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (=, ||) */ "a = b || c" to "(eq (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (=, *) */ "a = b * c" to "(eq (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (=, /) */ "a = b / c" to "(eq (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (=, %) */ "a = b % c" to "(eq (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (=, is) */ "a = b is boolean" to "(is_type (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", - /* (=, is_not) */ "a = b is not boolean" to "(not (is_type (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))") + /* (=, intersect) */ "a = b intersect c" to "(intersect (distinct) (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (=, intersect_all) */ "a = b intersect all c " to "(intersect (all) (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (=, except) */ "a = b except c" to "(except (distinct) (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (=, except_all) */ "a = b except all c " to "(except (all) (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (=, union) */ "a = b union c" to "(union (distinct) (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (=, union_all) */ "a = b union all c " to "(union (all) (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (=, or) */ "a = b or c" to "(or (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (=, and) */ "a = b and c" to "(and (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (=, <>) */ "a = b <> c" to "(ne (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (=, in) */ "a = b in c" to "(in_collection (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (=, not_in) */ "a = b not in c" to "(not (in_collection (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", + /* (=, <) */ "a = b < c" to "(eq (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (=, <=) */ "a = b <= c" to "(eq (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (=, >) */ "a = b > c" to "(eq (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (=, >=) */ "a = b >= c" to "(eq (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (=, between) */ "a = b between w and c" to "(eq (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (=, not_between) */ "a = b not between y and c" to "(eq (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (=, like) */ "a = b like c" to "(eq (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", + /* (=, not_like) */ "a = b not like c" to "(eq (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", + /* (=, +) */ "a = b + c" to "(eq (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (=, -) */ "a = b - c" to "(eq (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (=, ||) */ "a = b || c" to "(eq (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (=, *) */ "a = b * c" to "(eq (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (=, /) */ "a = b / c" to "(eq (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (=, %) */ "a = b % c" to "(eq (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (=, is) */ "a = b is boolean" to "(is_type (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", + /* (=, is_not) */ "a = b is not boolean" to "(not (is_type (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))" + ) @Test @Parameters @TestCaseName("{0}") fun notEqualPrecedence(pair: Pair) = runTest(pair) fun parametersForNotEqualPrecedence() = listOf( - /* (<>, intersect) */ "a <> b intersect c" to "(intersect (distinct) (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<>, intersect_all) */ "a <> b intersect all c" to "(intersect (all) (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<>, except) */ "a <> b except c" to "(except (distinct) (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<>, except_all) */ "a <> b except all c" to "(except (all) (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<>, union) */ "a <> b union c" to "(union (distinct) (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<>, union_all) */ "a <> b union all c" to "(union (all) (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<>, or) */ "a <> b or c" to "(or (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<>, and) */ "a <> b and c" to "(and (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<>, =) */ "a <> b = c" to "(eq (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<>, in) */ "a <> b in c" to "(in_collection (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<>, not_in) */ "a <> b not in c" to "(not (in_collection (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", - /* (<>, <) */ "a <> b < c" to "(ne (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<>, <=) */ "a <> b <= c" to "(ne (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<>, >) */ "a <> b > c" to "(ne (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<>, >=) */ "a <> b >= c" to "(ne (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<>, between) */ "a <> b between w and c" to "(ne (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<>, not_between) */ "a <> b not between y and c" to "(ne (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (<>, like) */ "a <> b like c" to "(ne (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", - /* (<>, not_like) */ "a <> b not like c" to "(ne (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", - /* (<>, +) */ "a <> b + c" to "(ne (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<>, -) */ "a <> b - c" to "(ne (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<>, ||) */ "a <> b || c" to "(ne (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<>, *) */ "a <> b * c" to "(ne (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<>, /) */ "a <> b / c" to "(ne (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<>, %) */ "a <> b % c" to "(ne (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<>, is) */ "a <> b is boolean" to "(is_type (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", - /* (<>, is_not) */ "a <> b is not boolean" to "(not (is_type (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))") + /* (<>, intersect) */ "a <> b intersect c" to "(intersect (distinct) (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<>, intersect_all) */ "a <> b intersect all c" to "(intersect (all) (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<>, except) */ "a <> b except c" to "(except (distinct) (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<>, except_all) */ "a <> b except all c" to "(except (all) (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<>, union) */ "a <> b union c" to "(union (distinct) (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<>, union_all) */ "a <> b union all c" to "(union (all) (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<>, or) */ "a <> b or c" to "(or (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<>, and) */ "a <> b and c" to "(and (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<>, =) */ "a <> b = c" to "(eq (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<>, in) */ "a <> b in c" to "(in_collection (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<>, not_in) */ "a <> b not in c" to "(not (in_collection (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", + /* (<>, <) */ "a <> b < c" to "(ne (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<>, <=) */ "a <> b <= c" to "(ne (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<>, >) */ "a <> b > c" to "(ne (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<>, >=) */ "a <> b >= c" to "(ne (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<>, between) */ "a <> b between w and c" to "(ne (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<>, not_between) */ "a <> b not between y and c" to "(ne (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (<>, like) */ "a <> b like c" to "(ne (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", + /* (<>, not_like) */ "a <> b not like c" to "(ne (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", + /* (<>, +) */ "a <> b + c" to "(ne (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<>, -) */ "a <> b - c" to "(ne (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<>, ||) */ "a <> b || c" to "(ne (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<>, *) */ "a <> b * c" to "(ne (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<>, /) */ "a <> b / c" to "(ne (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<>, %) */ "a <> b % c" to "(ne (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<>, is) */ "a <> b is boolean" to "(is_type (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", + /* (<>, is_not) */ "a <> b is not boolean" to "(not (is_type (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))" + ) @Test @Parameters @TestCaseName("{0}") fun isPrecedence(pair: Pair) = runTest(pair) fun parametersForIsPrecedence() = listOf( - /* (is, intersect) */ "a is boolean intersect c" to "(intersect (distinct) (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, intersect_all) */ "a is boolean intersect all c" to "(intersect (all) (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, except) */ "a is boolean except c" to "(except (distinct) (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, except_all) */ "a is boolean except all c" to "(except (all) (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, union) */ "a is boolean union c" to "(union (distinct) (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, union_all) */ "a is boolean union all c" to "(union (all) (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, or) */ "a is boolean or c" to "(or (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, and) */ "a is boolean and c" to "(and (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, =) */ "a is boolean = c" to "(eq (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, in) */ "a is boolean in c" to "(in_collection (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, not_in) */ "a is boolean not in c" to "(not (in_collection (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified))))", - /* (is, <) */ "a is boolean < c" to "(lt (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, <=) */ "a is boolean <= c" to "(lte (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, >) */ "a is boolean > c" to "(gt (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, >=) */ "a is boolean >= c" to "(gte (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, between) */ "a is boolean between w and c" to "(between (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", - /* (is, not_between) */ "a is boolean not between y and c" to "(not (between (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (is, like) */ "a is boolean like c" to "(like (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)) null)", - /* (is, not_like) */ "a is boolean not like c" to "(not (like (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)) null))", - /* (is, +) */ "a is boolean + c" to "(plus (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, -) */ "a is boolean - c" to "(minus (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, ||) */ "a is boolean || c" to "(concat (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, *) */ "a is boolean * c" to "(times (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, /) */ "a is boolean / c" to "(divide (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, %) */ "a is boolean % c" to "(modulo (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", - /* (is, is_not) */ "a is boolean is not boolean" to "(not (is_type (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (boolean_type)))") + /* (is, intersect) */ "a is boolean intersect c" to "(intersect (distinct) (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, intersect_all) */ "a is boolean intersect all c" to "(intersect (all) (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, except) */ "a is boolean except c" to "(except (distinct) (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, except_all) */ "a is boolean except all c" to "(except (all) (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, union) */ "a is boolean union c" to "(union (distinct) (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, union_all) */ "a is boolean union all c" to "(union (all) (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, or) */ "a is boolean or c" to "(or (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, and) */ "a is boolean and c" to "(and (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, =) */ "a is boolean = c" to "(eq (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, in) */ "a is boolean in c" to "(in_collection (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, not_in) */ "a is boolean not in c" to "(not (in_collection (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified))))", + /* (is, <) */ "a is boolean < c" to "(lt (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, <=) */ "a is boolean <= c" to "(lte (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, >) */ "a is boolean > c" to "(gt (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, >=) */ "a is boolean >= c" to "(gte (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, between) */ "a is boolean between w and c" to "(between (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", + /* (is, not_between) */ "a is boolean not between y and c" to "(not (between (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (is, like) */ "a is boolean like c" to "(like (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)) null)", + /* (is, not_like) */ "a is boolean not like c" to "(not (like (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)) null))", + /* (is, +) */ "a is boolean + c" to "(plus (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, -) */ "a is boolean - c" to "(minus (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, ||) */ "a is boolean || c" to "(concat (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, *) */ "a is boolean * c" to "(times (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, /) */ "a is boolean / c" to "(divide (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, %) */ "a is boolean % c" to "(modulo (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (id c (case_insensitive) (unqualified)))", + /* (is, is_not) */ "a is boolean is not boolean" to "(not (is_type (is_type (id a (case_insensitive) (unqualified)) (boolean_type)) (boolean_type)))" + ) @Test @Parameters @TestCaseName("{0}") fun isNotPrecedence(pair: Pair) = runTest(pair) fun parametersForIsNotPrecedence() = listOf( - /* (not (is, intersect) */ "a is not boolean intersect c" to "(intersect (distinct) (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, intersect_all) */ "a is not boolean intersect all c" to "(intersect (all) (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, except) */ "a is not boolean except c" to "(except (distinct) (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, union) */ "a is not boolean union c" to "(union (distinct) (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, union_all) */ "a is not boolean union all c" to "(union (all) (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, or) */ "a is not boolean or c" to "(or (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, and) */ "a is not boolean and c" to "(and (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, =) */ "a is not boolean = c" to "(eq (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, <>) */ "a is not boolean <> c" to "(ne (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, in) */ "a is not boolean in c" to "(in_collection (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, not_in) */ "a is not boolean not in c" to "(not (in_collection (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified))))", - /* (not (is, <) */ "a is not boolean < c" to "(lt (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, <=) */ "a is not boolean <= c" to "(lte (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, >) */ "a is not boolean > c" to "(gt (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, >=) */ "a is not boolean >= c" to "(gte (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, between) */ "a is not boolean between w and c" to "(between (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", - /* (not (is, not_between) */ "a is not boolean not between y and c" to "(not (between (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (not (is, like) */ "a is not boolean like c" to "(like (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)) null)", - /* (not (is, not_like) */ "a is not boolean not like c" to "(not (like (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)) null))", - /* (not (is, +) */ "a is not boolean + c" to "(plus (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, -) */ "a is not boolean - c" to "(minus (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, ||) */ "a is not boolean || c" to "(concat (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, *) */ "a is not boolean * c" to "(times (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, /) */ "a is not boolean / c" to "(divide (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, %) */ "a is not boolean % c" to "(modulo (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", - /* (not (is, is) */ "a is not boolean is boolean" to "(is_type (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (boolean_type))") + /* (not (is, intersect) */ "a is not boolean intersect c" to "(intersect (distinct) (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, intersect_all) */ "a is not boolean intersect all c" to "(intersect (all) (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, except) */ "a is not boolean except c" to "(except (distinct) (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, union) */ "a is not boolean union c" to "(union (distinct) (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, union_all) */ "a is not boolean union all c" to "(union (all) (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, or) */ "a is not boolean or c" to "(or (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, and) */ "a is not boolean and c" to "(and (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, =) */ "a is not boolean = c" to "(eq (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, <>) */ "a is not boolean <> c" to "(ne (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, in) */ "a is not boolean in c" to "(in_collection (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, not_in) */ "a is not boolean not in c" to "(not (in_collection (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified))))", + /* (not (is, <) */ "a is not boolean < c" to "(lt (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, <=) */ "a is not boolean <= c" to "(lte (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, >) */ "a is not boolean > c" to "(gt (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, >=) */ "a is not boolean >= c" to "(gte (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, between) */ "a is not boolean between w and c" to "(between (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", + /* (not (is, not_between) */ "a is not boolean not between y and c" to "(not (between (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (not (is, like) */ "a is not boolean like c" to "(like (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)) null)", + /* (not (is, not_like) */ "a is not boolean not like c" to "(not (like (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)) null))", + /* (not (is, +) */ "a is not boolean + c" to "(plus (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, -) */ "a is not boolean - c" to "(minus (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, ||) */ "a is not boolean || c" to "(concat (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, *) */ "a is not boolean * c" to "(times (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, /) */ "a is not boolean / c" to "(divide (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, %) */ "a is not boolean % c" to "(modulo (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (id c (case_insensitive) (unqualified)))", + /* (not (is, is) */ "a is not boolean is boolean" to "(is_type (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))) (boolean_type))" + ) @Test @Parameters @TestCaseName("{0}") fun inPrecedence(pair: Pair) = runTest(pair) fun parametersForInPrecedence() = listOf( - /* (in, intersect) */ "a in b intersect c" to "(intersect (distinct) (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (in, intersect_all) */ "a in b intersect all c" to "(intersect (all) (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (in, except) */ "a in b except c" to "(except (distinct) (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (in, except_all) */ "a in b except all c" to "(except (all) (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (in, union) */ "a in b union c" to "(union (distinct) (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (in, union_all) */ "a in b union all c" to "(union (all) (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (in, or) */ "a in b or c" to "(or (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (in, and) */ "a in b and c" to "(and (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (in, =) */ "a in b = c" to "(eq (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (in, <>) */ "a in b <> c" to "(ne (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (in, not_in) */ "a in b not in c" to "(not (in_collection (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", - /* (in, <) */ "a in b < c" to "(in_collection (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (in, <=) */ "a in b <= c" to "(in_collection (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (in, >) */ "a in b > c" to "(in_collection (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (in, >=) */ "a in b >= c" to "(in_collection (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (in, between) */ "a in b between w and c" to "(in_collection (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (in, not_between) */ "a in b not between y and c" to "(in_collection (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (in, like) */ "a in b like c" to "(in_collection (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", - /* (in, not_like) */ "a in b not like c" to "(in_collection (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", - /* (in, +) */ "a in b + c" to "(in_collection (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (in, -) */ "a in b - c" to "(in_collection (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (in, ||) */ "a in b || c" to "(in_collection (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (in, *) */ "a in b * c" to "(in_collection (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (in, /) */ "a in b / c" to "(in_collection (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (in, %) */ "a in b % c" to "(in_collection (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (in, is) */ "a in b is boolean" to "(is_type (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", - /* (in, is_not) */ "a in b is not boolean" to "(not (is_type (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))") + /* (in, intersect) */ "a in b intersect c" to "(intersect (distinct) (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (in, intersect_all) */ "a in b intersect all c" to "(intersect (all) (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (in, except) */ "a in b except c" to "(except (distinct) (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (in, except_all) */ "a in b except all c" to "(except (all) (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (in, union) */ "a in b union c" to "(union (distinct) (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (in, union_all) */ "a in b union all c" to "(union (all) (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (in, or) */ "a in b or c" to "(or (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (in, and) */ "a in b and c" to "(and (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (in, =) */ "a in b = c" to "(eq (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (in, <>) */ "a in b <> c" to "(ne (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (in, not_in) */ "a in b not in c" to "(not (in_collection (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", + /* (in, <) */ "a in b < c" to "(in_collection (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (in, <=) */ "a in b <= c" to "(in_collection (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (in, >) */ "a in b > c" to "(in_collection (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (in, >=) */ "a in b >= c" to "(in_collection (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (in, between) */ "a in b between w and c" to "(in_collection (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (in, not_between) */ "a in b not between y and c" to "(in_collection (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (in, like) */ "a in b like c" to "(in_collection (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))", + /* (in, not_like) */ "a in b not like c" to "(in_collection (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", + /* (in, +) */ "a in b + c" to "(in_collection (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (in, -) */ "a in b - c" to "(in_collection (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (in, ||) */ "a in b || c" to "(in_collection (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (in, *) */ "a in b * c" to "(in_collection (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (in, /) */ "a in b / c" to "(in_collection (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (in, %) */ "a in b % c" to "(in_collection (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (in, is) */ "a in b is boolean" to "(is_type (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", + /* (in, is_not) */ "a in b is not boolean" to "(not (is_type (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))" + ) @Test @Parameters @TestCaseName("{0}") fun notInPrecedence(pair: Pair) = runTest(pair) fun parametersForNotInPrecedence() = listOf( - /* (not (in, intersect) */ "a not in b intersect c" to "(intersect (distinct) (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (in, intersect_all) */ "a not in b intersect all c" to "(intersect (all) (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (in, except) */ "a not in b except c" to "(except (distinct) (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (in, except_all) */ "a not in b except all c" to "(except (all) (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (in, union) */ "a not in b union c" to "(union (distinct) (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (in, union_all) */ "a not in b union all c" to "(union (all) (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (in, or) */ "a not in b or c" to "(or (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (in, and) */ "a not in b and c" to "(and (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (in, =) */ "a not in b = c" to "(eq (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (in, <>) */ "a not in b <> c" to "(ne (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (in, in) */ "a not in b in c" to "(in_collection (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (in, <) */ "a not in b < c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (not (in, <=) */ "a not in b <= c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (not (in, >) */ "a not in b > c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (not (in, >=) */ "a not in b >= c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (not (in, between) */ "a not in b between w and c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (not (in, not_between) */ "a not in b not between y and c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))))", - /* (not (in, like) */ "a not in b like c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", - /* (not (in, not_like) */ "a not in b not like c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))))", - /* (not (in, +) */ "a not in b + c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (not (in, -) */ "a not in b - c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (not (in, ||) */ "a not in b || c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (not (in, *) */ "a not in b * c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (not (in, /) */ "a not in b / c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (not (in, %) */ "a not in b % c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (not (in, is) */ "a not in b is boolean" to "(is_type (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (boolean_type))", - /* (not (in, is_not) */ "a not in b is not boolean" to "(not (is_type (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (boolean_type)))") + /* (not (in, intersect) */ "a not in b intersect c" to "(intersect (distinct) (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (in, intersect_all) */ "a not in b intersect all c" to "(intersect (all) (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (in, except) */ "a not in b except c" to "(except (distinct) (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (in, except_all) */ "a not in b except all c" to "(except (all) (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (in, union) */ "a not in b union c" to "(union (distinct) (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (in, union_all) */ "a not in b union all c" to "(union (all) (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (in, or) */ "a not in b or c" to "(or (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (in, and) */ "a not in b and c" to "(and (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (in, =) */ "a not in b = c" to "(eq (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (in, <>) */ "a not in b <> c" to "(ne (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (in, in) */ "a not in b in c" to "(in_collection (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (in, <) */ "a not in b < c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (lt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (not (in, <=) */ "a not in b <= c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (lte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (not (in, >) */ "a not in b > c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (gt (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (not (in, >=) */ "a not in b >= c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (gte (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (not (in, between) */ "a not in b between w and c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (between (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (not (in, not_between) */ "a not in b not between y and c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (not (between (id b (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))))", + /* (not (in, like) */ "a not in b like c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null)))", + /* (not (in, not_like) */ "a not in b not like c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (not (like (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)) null))))", + /* (not (in, +) */ "a not in b + c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (not (in, -) */ "a not in b - c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (not (in, ||) */ "a not in b || c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (not (in, *) */ "a not in b * c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (not (in, /) */ "a not in b / c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (not (in, %) */ "a not in b % c" to "(not (in_collection (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (not (in, is) */ "a not in b is boolean" to "(is_type (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (boolean_type))", + /* (not (in, is_not) */ "a not in b is not boolean" to "(not (is_type (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (boolean_type)))" + ) @Test @Parameters @TestCaseName("{0}") fun ltPrecedence(pair: Pair) = runTest(pair) fun parametersForLtPrecedence() = listOf( - /* (<, intersect) */ "a < b intersect c" to "(intersect (distinct) (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<, intersect_all) */ "a < b intersect all c" to "(intersect (all) (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<, except) */ "a < b except c" to "(except (distinct) (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<, except_all) */ "a < b except all c" to "(except (all) (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<, union) */ "a < b union c" to "(union (distinct) (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<, union_all) */ "a < b union all c" to "(union (all) (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<, or) */ "a < b or c" to "(or (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<, and) */ "a < b and c" to "(and (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<, =) */ "a < b = c" to "(eq (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<, <>) */ "a < b <> c" to "(ne (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<, in) */ "a < b in c" to "(in_collection (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<, not_in) */ "a < b not in c" to "(not (in_collection (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", - /* (<, <=) */ "a < b <= c" to "(lte (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<, >) */ "a < b > c" to "(gt (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<, >=) */ "a < b >= c" to "(gte (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<, between) */ "a < b between w and c" to "(between (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", - /* (<, not_between) */ "a < b not between y and c" to "(not (between (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<, like) */ "a < b like c" to "(like (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", - /* (<, not_like) */ "a < b not like c" to "(not (like (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", - /* (<, +) */ "a < b + c" to "(lt (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<, -) */ "a < b - c" to "(lt (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<, ||) */ "a < b || c" to "(lt (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<, *) */ "a < b * c" to "(lt (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<, /) */ "a < b / c" to "(lt (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<, %) */ "a < b % c" to "(lt (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<, is) */ "a < b is boolean" to "(is_type (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", - /* (<, is_not) */ "a < b is not boolean" to "(not (is_type (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))") + /* (<, intersect) */ "a < b intersect c" to "(intersect (distinct) (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<, intersect_all) */ "a < b intersect all c" to "(intersect (all) (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<, except) */ "a < b except c" to "(except (distinct) (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<, except_all) */ "a < b except all c" to "(except (all) (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<, union) */ "a < b union c" to "(union (distinct) (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<, union_all) */ "a < b union all c" to "(union (all) (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<, or) */ "a < b or c" to "(or (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<, and) */ "a < b and c" to "(and (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<, =) */ "a < b = c" to "(eq (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<, <>) */ "a < b <> c" to "(ne (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<, in) */ "a < b in c" to "(in_collection (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<, not_in) */ "a < b not in c" to "(not (in_collection (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", + /* (<, <=) */ "a < b <= c" to "(lte (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<, >) */ "a < b > c" to "(gt (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<, >=) */ "a < b >= c" to "(gte (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<, between) */ "a < b between w and c" to "(between (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", + /* (<, not_between) */ "a < b not between y and c" to "(not (between (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<, like) */ "a < b like c" to "(like (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", + /* (<, not_like) */ "a < b not like c" to "(not (like (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", + /* (<, +) */ "a < b + c" to "(lt (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<, -) */ "a < b - c" to "(lt (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<, ||) */ "a < b || c" to "(lt (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<, *) */ "a < b * c" to "(lt (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<, /) */ "a < b / c" to "(lt (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<, %) */ "a < b % c" to "(lt (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<, is) */ "a < b is boolean" to "(is_type (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", + /* (<, is_not) */ "a < b is not boolean" to "(not (is_type (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))" + ) @Test @Parameters @TestCaseName("{0}") fun ltePrecedence(pair: Pair) = runTest(pair) fun parametersForLtePrecedence() = listOf( - /* (<=, intersect) */ "a <= b intersect c" to "(intersect (distinct) (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<=, intersect_all) */ "a <= b intersect all c" to "(intersect (all) (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<=, except) */ "a <= b except c" to "(except (distinct) (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<=, except_all) */ "a <= b except all c" to "(except (all) (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<=, union) */ "a <= b union c" to "(union (distinct) (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<=, union_all) */ "a <= b union all c" to "(union (all) (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<=, or) */ "a <= b or c" to "(or (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<=, and) */ "a <= b and c" to "(and (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<=, =) */ "a <= b = c" to "(eq (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<=, <>) */ "a <= b <> c" to "(ne (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<=, in) */ "a <= b in c" to "(in_collection (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<=, not_in) */ "a <= b not in c" to "(not (in_collection (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", - /* (<=, <) */ "a <= b < c" to "(lt (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<=, >) */ "a <= b > c" to "(gt (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<=, >=) */ "a <= b >= c" to "(gte (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (<=, between) */ "a <= b between w and c" to "(between (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", - /* (<=, not_between) */ "a <= b not between y and c" to "(not (between (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<=, like) */ "a <= b like c" to "(like (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", - /* (<=, not_like) */ "a <= b not like c" to "(not (like (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", - /* (<=, +) */ "a <= b + c" to "(lte (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<=, -) */ "a <= b - c" to "(lte (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<=, ||) */ "a <= b || c" to "(lte (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<=, *) */ "a <= b * c" to "(lte (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<=, /) */ "a <= b / c" to "(lte (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<=, %) */ "a <= b % c" to "(lte (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (<=, is) */ "a <= b is boolean" to "(is_type (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", - /* (<=, is_not) */ "a <= b is not boolean" to "(not (is_type (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))") + /* (<=, intersect) */ "a <= b intersect c" to "(intersect (distinct) (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<=, intersect_all) */ "a <= b intersect all c" to "(intersect (all) (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<=, except) */ "a <= b except c" to "(except (distinct) (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<=, except_all) */ "a <= b except all c" to "(except (all) (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<=, union) */ "a <= b union c" to "(union (distinct) (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<=, union_all) */ "a <= b union all c" to "(union (all) (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<=, or) */ "a <= b or c" to "(or (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<=, and) */ "a <= b and c" to "(and (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<=, =) */ "a <= b = c" to "(eq (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<=, <>) */ "a <= b <> c" to "(ne (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<=, in) */ "a <= b in c" to "(in_collection (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<=, not_in) */ "a <= b not in c" to "(not (in_collection (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", + /* (<=, <) */ "a <= b < c" to "(lt (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<=, >) */ "a <= b > c" to "(gt (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<=, >=) */ "a <= b >= c" to "(gte (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (<=, between) */ "a <= b between w and c" to "(between (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", + /* (<=, not_between) */ "a <= b not between y and c" to "(not (between (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<=, like) */ "a <= b like c" to "(like (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", + /* (<=, not_like) */ "a <= b not like c" to "(not (like (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", + /* (<=, +) */ "a <= b + c" to "(lte (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<=, -) */ "a <= b - c" to "(lte (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<=, ||) */ "a <= b || c" to "(lte (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<=, *) */ "a <= b * c" to "(lte (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<=, /) */ "a <= b / c" to "(lte (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<=, %) */ "a <= b % c" to "(lte (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (<=, is) */ "a <= b is boolean" to "(is_type (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", + /* (<=, is_not) */ "a <= b is not boolean" to "(not (is_type (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))" + ) @Test @Parameters @TestCaseName("{0}") fun gtPrecedence(pair: Pair) = runTest(pair) fun parametersForGtPrecedence() = listOf( - /* (>, intersect) */ "a > b intersect c" to "(intersect (distinct) (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>, intersect_all) */ "a > b intersect all c" to "(intersect (all) (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>, except) */ "a > b except c" to "(except (distinct) (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>, except_all) */ "a > b except all c" to "(except (all) (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>, union) */ "a > b union c" to "(union (distinct) (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>, union_all) */ "a > b union all c" to "(union (all) (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>, or) */ "a > b or c" to "(or (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>, and) */ "a > b and c" to "(and (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>, =) */ "a > b = c" to "(eq (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>, <>) */ "a > b <> c" to "(ne (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>, in) */ "a > b in c" to "(in_collection (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>, not_in) */ "a > b not in c" to "(not (in_collection (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", - /* (>, <) */ "a > b < c" to "(lt (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>, <=) */ "a > b <= c" to "(lte (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>, >=) */ "a > b >= c" to "(gte (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>, between) */ "a > b between w and c" to "(between (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", - /* (>, not_between) */ "a > b not between y and c" to "(not (between (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (>, like) */ "a > b like c" to "(like (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", - /* (>, not_like) */ "a > b not like c" to "(not (like (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", - /* (>, +) */ "a > b + c" to "(gt (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (>, -) */ "a > b - c" to "(gt (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (>, ||) */ "a > b || c" to "(gt (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (>, *) */ "a > b * c" to "(gt (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (>, /) */ "a > b / c" to "(gt (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (>, %) */ "a > b % c" to "(gt (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (>, is) */ "a > b is boolean" to "(is_type (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", - /* (>, is_not) */ "a > b is not boolean" to "(not (is_type (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))") + /* (>, intersect) */ "a > b intersect c" to "(intersect (distinct) (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>, intersect_all) */ "a > b intersect all c" to "(intersect (all) (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>, except) */ "a > b except c" to "(except (distinct) (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>, except_all) */ "a > b except all c" to "(except (all) (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>, union) */ "a > b union c" to "(union (distinct) (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>, union_all) */ "a > b union all c" to "(union (all) (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>, or) */ "a > b or c" to "(or (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>, and) */ "a > b and c" to "(and (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>, =) */ "a > b = c" to "(eq (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>, <>) */ "a > b <> c" to "(ne (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>, in) */ "a > b in c" to "(in_collection (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>, not_in) */ "a > b not in c" to "(not (in_collection (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", + /* (>, <) */ "a > b < c" to "(lt (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>, <=) */ "a > b <= c" to "(lte (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>, >=) */ "a > b >= c" to "(gte (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>, between) */ "a > b between w and c" to "(between (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", + /* (>, not_between) */ "a > b not between y and c" to "(not (between (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (>, like) */ "a > b like c" to "(like (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", + /* (>, not_like) */ "a > b not like c" to "(not (like (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", + /* (>, +) */ "a > b + c" to "(gt (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (>, -) */ "a > b - c" to "(gt (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (>, ||) */ "a > b || c" to "(gt (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (>, *) */ "a > b * c" to "(gt (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (>, /) */ "a > b / c" to "(gt (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (>, %) */ "a > b % c" to "(gt (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (>, is) */ "a > b is boolean" to "(is_type (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", + /* (>, is_not) */ "a > b is not boolean" to "(not (is_type (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))" + ) @Test @Parameters @TestCaseName("{0}") fun gtePrecedence(pair: Pair) = runTest(pair) fun parametersForGtePrecedence() = listOf( - /* (>=, intersect) */ "a >= b intersect c" to "(intersect (distinct) (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>=, intersect_all) */ "a >= b intersect all c" to "(intersect (all) (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>=, except) */ "a >= b except c" to "(except (distinct) (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>=, except_all) */ "a >= b except all c" to "(except (all) (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>=, union) */ "a >= b union c" to "(union (distinct) (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>=, union_all) */ "a >= b union all c" to "(union (all) (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>=, or) */ "a >= b or c" to "(or (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>=, and) */ "a >= b and c" to "(and (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>=, =) */ "a >= b = c" to "(eq (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>=, <>) */ "a >= b <> c" to "(ne (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>=, in) */ "a >= b in c" to "(in_collection (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>=, not_in) */ "a >= b not in c" to "(not (in_collection (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", - /* (>=, <) */ "a >= b < c" to "(lt (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>=, <=) */ "a >= b <= c" to "(lte (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>=, >) */ "a >= b > c" to "(gt (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (>=, between) */ "a >= b between w and c" to "(between (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", - /* (>=, not_between) */ "a >= b not between y and c" to "(not (between (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (>=, like) */ "a >= b like c" to "(like (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", - /* (>=, not_like) */ "a >= b not like c" to "(not (like (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", - /* (>=, +) */ "a >= b + c" to "(gte (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (>=, -) */ "a >= b - c" to "(gte (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (>=, ||) */ "a >= b || c" to "(gte (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (>=, *) */ "a >= b * c" to "(gte (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (>=, /) */ "a >= b / c" to "(gte (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (>=, %) */ "a >= b % c" to "(gte (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (>=, is) */ "a >= b is boolean" to "(is_type (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", - /* (>=, is_not) */ "a >= b is not boolean" to "(not (is_type (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))") + /* (>=, intersect) */ "a >= b intersect c" to "(intersect (distinct) (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>=, intersect_all) */ "a >= b intersect all c" to "(intersect (all) (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>=, except) */ "a >= b except c" to "(except (distinct) (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>=, except_all) */ "a >= b except all c" to "(except (all) (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>=, union) */ "a >= b union c" to "(union (distinct) (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>=, union_all) */ "a >= b union all c" to "(union (all) (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>=, or) */ "a >= b or c" to "(or (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>=, and) */ "a >= b and c" to "(and (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>=, =) */ "a >= b = c" to "(eq (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>=, <>) */ "a >= b <> c" to "(ne (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>=, in) */ "a >= b in c" to "(in_collection (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>=, not_in) */ "a >= b not in c" to "(not (in_collection (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", + /* (>=, <) */ "a >= b < c" to "(lt (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>=, <=) */ "a >= b <= c" to "(lte (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>=, >) */ "a >= b > c" to "(gt (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (>=, between) */ "a >= b between w and c" to "(between (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", + /* (>=, not_between) */ "a >= b not between y and c" to "(not (between (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (>=, like) */ "a >= b like c" to "(like (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", + /* (>=, not_like) */ "a >= b not like c" to "(not (like (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", + /* (>=, +) */ "a >= b + c" to "(gte (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (>=, -) */ "a >= b - c" to "(gte (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (>=, ||) */ "a >= b || c" to "(gte (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (>=, *) */ "a >= b * c" to "(gte (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (>=, /) */ "a >= b / c" to "(gte (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (>=, %) */ "a >= b % c" to "(gte (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (>=, is) */ "a >= b is boolean" to "(is_type (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", + /* (>=, is_not) */ "a >= b is not boolean" to "(not (is_type (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))" + ) @Test @Parameters @TestCaseName("{0}") fun betweenPrecedence(pair: Pair) = runTest(pair) fun parametersForBetweenPrecedence() = listOf( - /* (between, intersect) */ "a between b and w intersect c" to "(intersect (distinct) (between (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (between, intersect_all) */ "a between b and w intersect all c" to "(intersect (all) (between (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (between, except) */ "a between b and w except c" to "(except (distinct) (between (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (between, except_all) */ "a between b and w except all c" to "(except (all) (between (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (between, union) */ "a between b and w union c" to "(union (distinct) (between (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (between, union_all) */ "a between b and w union all c" to "(union (all) (between (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (between, or) */ "a between w and b or c" to "(or (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (between, and) */ "a between w and b and c" to "(and (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (between, =) */ "a between w and b = c" to "(eq (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (between, <>) */ "a between w and b <> c" to "(ne (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (between, in) */ "a between w and b in c" to "(in_collection (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (between, not_in) */ "a between w and b not in c" to "(not (in_collection (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", - /* (between, <) */ "a between w and b < c" to "(lt (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (between, <=) */ "a between w and b <= c" to "(lte (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (between, >) */ "a between w and b > c" to "(gt (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (between, >=) */ "a between w and b >= c" to "(gte (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (between, intersect) */ "a between b and w intersect c" to "(intersect (distinct) (between (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (between, intersect_all) */ "a between b and w intersect all c" to "(intersect (all) (between (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (between, except) */ "a between b and w except c" to "(except (distinct) (between (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (between, except_all) */ "a between b and w except all c" to "(except (all) (between (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (between, union) */ "a between b and w union c" to "(union (distinct) (between (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (between, union_all) */ "a between b and w union all c" to "(union (all) (between (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (between, or) */ "a between w and b or c" to "(or (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (between, and) */ "a between w and b and c" to "(and (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (between, =) */ "a between w and b = c" to "(eq (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (between, <>) */ "a between w and b <> c" to "(ne (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (between, in) */ "a between w and b in c" to "(in_collection (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (between, not_in) */ "a between w and b not in c" to "(not (in_collection (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", + /* (between, <) */ "a between w and b < c" to "(lt (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (between, <=) */ "a between w and b <= c" to "(lte (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (between, >) */ "a between w and b > c" to "(gt (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (between, >=) */ "a between w and b >= c" to "(gte (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", /* (between, not_between) */ "a between w and b not between y and c" to "(not (between (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (between, like) */ "a between w and b like c" to "(like (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", - /* (between, not_like) */ "a between w and b not like c" to "(not (like (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", - /* (between, +) */ "a between w and b + c" to "(between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (between, -) */ "a between w and b - c" to "(between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (between, ||) */ "a between w and b || c" to "(between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (between, *) */ "a between w and b * c" to "(between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (between, /) */ "a between w and b / c" to "(between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (between, %) */ "a between w and b % c" to "(between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (between, is) */ "a between w and b is boolean" to "(is_type (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", - /* (between, is_not) */ "a between w and b is not boolean" to "(not (is_type (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))") + /* (between, like) */ "a between w and b like c" to "(like (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", + /* (between, not_like) */ "a between w and b not like c" to "(not (like (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", + /* (between, +) */ "a between w and b + c" to "(between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (between, -) */ "a between w and b - c" to "(between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (between, ||) */ "a between w and b || c" to "(between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (between, *) */ "a between w and b * c" to "(between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (between, /) */ "a between w and b / c" to "(between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (between, %) */ "a between w and b % c" to "(between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (between, is) */ "a between w and b is boolean" to "(is_type (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", + /* (between, is_not) */ "a between w and b is not boolean" to "(not (is_type (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))" + ) @Test @Parameters @TestCaseName("{0}") fun notBetweenPrecedence(pair: Pair) = runTest(pair) fun parametersForNotBetweenPrecedence() = listOf( - /* (not (between, intersect) */ "a not between w and b intersect c" to "(intersect (distinct) (not (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (between, intersect) */ "a not between w and b intersect c" to "(intersect (distinct) (not (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", /* (not (between, intersect_all) */ "a not between w and b intersect all c" to "(intersect (all) (not (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (between, except) */ "a not between w and b except c" to "(except (distinct) (not (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (between, except_all) */ "a not between w and b except all c" to "(except (all) (not (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (between, union) */ "a not between w and b union c" to "(union (distinct) (not (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (between, union_all) */ "a not between w and b union all c" to "(union (all) (not (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (between, or) */ "a not between y and b or c" to "(or (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (between, and) */ "a not between y and b and c" to "(and (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (between, =) */ "a not between y and b = c" to "(eq (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (between, <>) */ "a not between y and b <> c" to "(ne (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (between, in) */ "a not between y and b in c" to "(in_collection (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (between, not_in) */ "a not between y and b not in c" to "(not (in_collection (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified))))", - /* (not (between, <) */ "a not between y and b < c" to "(lt (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (between, <=) */ "a not between y and b <= c" to "(lte (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (between, >) */ "a not between y and b > c" to "(gt (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", - /* (not (between, >=) */ "a not between y and b >= c" to "(gte (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (between, except) */ "a not between w and b except c" to "(except (distinct) (not (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (between, except_all) */ "a not between w and b except all c" to "(except (all) (not (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (between, union) */ "a not between w and b union c" to "(union (distinct) (not (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (between, union_all) */ "a not between w and b union all c" to "(union (all) (not (between (id a (case_insensitive) (unqualified)) (id w (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (between, or) */ "a not between y and b or c" to "(or (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (between, and) */ "a not between y and b and c" to "(and (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (between, =) */ "a not between y and b = c" to "(eq (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (between, <>) */ "a not between y and b <> c" to "(ne (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (between, in) */ "a not between y and b in c" to "(in_collection (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (between, not_in) */ "a not between y and b not in c" to "(not (in_collection (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified))))", + /* (not (between, <) */ "a not between y and b < c" to "(lt (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (between, <=) */ "a not between y and b <= c" to "(lte (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (between, >) */ "a not between y and b > c" to "(gt (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", + /* (not (between, >=) */ "a not between y and b >= c" to "(gte (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)))", /* (not (between, between) */ "a not between y and b between w and c" to "(between (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", - /* (not (between, like) */ "a not between y and b like c" to "(like (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)) null)", - /* (not (between, not_like) */ "a not between y and b not like c" to "(not (like (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)) null))", - /* (not (between, +) */ "a not between y and b + c" to "(not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (not (between, -) */ "a not between y and b - c" to "(not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (not (between, ||) */ "a not between y and b || c" to "(not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (not (between, *) */ "a not between y and b * c" to "(not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (not (between, /) */ "a not between y and b / c" to "(not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (not (between, %) */ "a not between y and b % c" to "(not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (not (between, is) */ "a not between y and b is boolean" to "(is_type (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (boolean_type))", - /* (not (between, is_not) */ "a not between y and b is not boolean" to "(not (is_type (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (boolean_type)))") + /* (not (between, like) */ "a not between y and b like c" to "(like (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)) null)", + /* (not (between, not_like) */ "a not between y and b not like c" to "(not (like (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (id c (case_insensitive) (unqualified)) null))", + /* (not (between, +) */ "a not between y and b + c" to "(not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (not (between, -) */ "a not between y and b - c" to "(not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (not (between, ||) */ "a not between y and b || c" to "(not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (not (between, *) */ "a not between y and b * c" to "(not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (not (between, /) */ "a not between y and b / c" to "(not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (not (between, %) */ "a not between y and b % c" to "(not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", + /* (not (between, is) */ "a not between y and b is boolean" to "(is_type (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (boolean_type))", + /* (not (between, is_not) */ "a not between y and b is not boolean" to "(not (is_type (not (between (id a (case_insensitive) (unqualified)) (id y (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) (boolean_type)))" + ) @Test @Parameters @TestCaseName("{0}") fun likePrecedence(pair: Pair) = runTest(pair) fun parametersForLikePrecedence() = listOf( - /* (like, intersect) */ "a like b intersect c" to "(intersect (distinct) (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", - /* (like, intersect_all) */ "a like b intersect all c" to "(intersect (all) (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", - /* (like, except) */ "a like b except c" to "(except (distinct) (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", - /* (like, except_all) */ "a like b except all c" to "(except (all) (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", - /* (like, union) */ "a like b union c" to "(union (distinct) (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", - /* (like, union_all) */ "a like b union all c" to "(union (all) (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", - /* (like, or) */ "a like b or c" to "(or (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", - /* (like, and) */ "a like b and c" to "(and (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", - /* (like, =) */ "a like b = c" to "(eq (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", - /* (like, <>) */ "a like b <> c" to "(ne (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", - /* (like, in) */ "a like b in c" to "(in_collection (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", - /* (like, not_in) */ "a like b not in c" to "(not (in_collection (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified))))", - /* (like, <) */ "a like b < c" to "(lt (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", - /* (like, <=) */ "a like b <= c" to "(lte (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", - /* (like, >) */ "a like b > c" to "(gt (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", - /* (like, >=) */ "a like b >= c" to "(gte (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", - /* (like, between) */ "a like b between w and c" to "(between (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", - /* (like, not_between) */ "a like b not between y and c" to "(not (between (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (like, not_like) */ "a like b not like c" to "(not (like (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)) null))", - /* (like, +) */ "a like b + c" to "(like (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null)", - /* (like, -) */ "a like b - c" to "(like (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null)", - /* (like, ||) */ "a like b || c" to "(like (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null)", - /* (like, *) */ "a like b * c" to "(like (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null)", - /* (like, /) */ "a like b / c" to "(like (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null)", - /* (like, %) */ "a like b % c" to "(like (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null)", - /* (like, is) */ "a like b is boolean" to "(is_type (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (boolean_type))", - /* (like, is_not) */ "a like b is not boolean" to "(not (is_type (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (boolean_type)))") + /* (like, intersect) */ "a like b intersect c" to "(intersect (distinct) (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", + /* (like, intersect_all) */ "a like b intersect all c" to "(intersect (all) (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", + /* (like, except) */ "a like b except c" to "(except (distinct) (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", + /* (like, except_all) */ "a like b except all c" to "(except (all) (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", + /* (like, union) */ "a like b union c" to "(union (distinct) (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", + /* (like, union_all) */ "a like b union all c" to "(union (all) (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", + /* (like, or) */ "a like b or c" to "(or (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", + /* (like, and) */ "a like b and c" to "(and (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", + /* (like, =) */ "a like b = c" to "(eq (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", + /* (like, <>) */ "a like b <> c" to "(ne (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", + /* (like, in) */ "a like b in c" to "(in_collection (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", + /* (like, not_in) */ "a like b not in c" to "(not (in_collection (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified))))", + /* (like, <) */ "a like b < c" to "(lt (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", + /* (like, <=) */ "a like b <= c" to "(lte (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", + /* (like, >) */ "a like b > c" to "(gt (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", + /* (like, >=) */ "a like b >= c" to "(gte (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)))", + /* (like, between) */ "a like b between w and c" to "(between (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", + /* (like, not_between) */ "a like b not between y and c" to "(not (between (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (like, not_like) */ "a like b not like c" to "(not (like (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (id c (case_insensitive) (unqualified)) null))", + /* (like, +) */ "a like b + c" to "(like (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null)", + /* (like, -) */ "a like b - c" to "(like (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null)", + /* (like, ||) */ "a like b || c" to "(like (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null)", + /* (like, *) */ "a like b * c" to "(like (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null)", + /* (like, /) */ "a like b / c" to "(like (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null)", + /* (like, %) */ "a like b % c" to "(like (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null)", + /* (like, is) */ "a like b is boolean" to "(is_type (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (boolean_type))", + /* (like, is_not) */ "a like b is not boolean" to "(not (is_type (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null) (boolean_type)))" + ) @Test @Parameters @TestCaseName("{0}") fun notLikePrecedence(pair: Pair) = runTest(pair) fun parametersForNotLikePrecedence() = listOf( - /* (not (like, intersect) */ "a not like b intersect c" to "(intersect (distinct) (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", - /* (not (like, intersect_all) */ "a not like b intersect all c" to "(intersect (all) (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", - /* (not (like, except) */ "a not like b except c" to "(except (distinct) (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", - /* (not (like, except_all) */ "a not like b except all c" to "(except (all) (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", - /* (not (like, union) */ "a not like b union c" to "(union (distinct) (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", - /* (not (like, union_all) */ "a not like b union all c" to "(union (all) (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", - /* (not (like, or) */ "a not like b or c" to "(or (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", - /* (not (like, and) */ "a not like b and c" to "(and (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", - /* (not (like, =) */ "a not like b = c" to "(eq (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", - /* (not (like, <>) */ "a not like b <> c" to "(ne (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", - /* (not (like, in) */ "a not like b in c" to "(in_collection (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", - /* (not (like, not_in) */ "a not like b not in c" to "(not (in_collection (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified))))", - /* (not (like, <) */ "a not like b < c" to "(lt (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", - /* (not (like, <=) */ "a not like b <= c" to "(lte (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", - /* (not (like, >) */ "a not like b > c" to "(gt (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", - /* (not (like, >=) */ "a not like b >= c" to "(gte (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", - /* (not (like, between) */ "a not like b between w and c" to "(between (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", - /* (not (like, not_between) */ "a not like b not between y and c" to "(not (between (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (not (like, like) */ "a not like b like c" to "(like (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)) null)", - /* (not (like, +) */ "a not like b + c" to "(not (like (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null))", - /* (not (like, -) */ "a not like b - c" to "(not (like (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null))", - /* (not (like, ||) */ "a not like b || c" to "(not (like (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null))", - /* (not (like, *) */ "a not like b * c" to "(not (like (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null))", - /* (not (like, /) */ "a not like b / c" to "(not (like (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null))", - /* (not (like, %) */ "a not like b % c" to "(not (like (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null))", - /* (not (like, is) */ "a not like b is boolean" to "(is_type (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (boolean_type))", - /* (not (like, is_not) */ "a not like b is not boolean" to "(not (is_type (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (boolean_type)))") + /* (not (like, intersect) */ "a not like b intersect c" to "(intersect (distinct) (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", + /* (not (like, intersect_all) */ "a not like b intersect all c" to "(intersect (all) (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", + /* (not (like, except) */ "a not like b except c" to "(except (distinct) (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", + /* (not (like, except_all) */ "a not like b except all c" to "(except (all) (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", + /* (not (like, union) */ "a not like b union c" to "(union (distinct) (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", + /* (not (like, union_all) */ "a not like b union all c" to "(union (all) (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", + /* (not (like, or) */ "a not like b or c" to "(or (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", + /* (not (like, and) */ "a not like b and c" to "(and (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", + /* (not (like, =) */ "a not like b = c" to "(eq (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", + /* (not (like, <>) */ "a not like b <> c" to "(ne (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", + /* (not (like, in) */ "a not like b in c" to "(in_collection (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", + /* (not (like, not_in) */ "a not like b not in c" to "(not (in_collection (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified))))", + /* (not (like, <) */ "a not like b < c" to "(lt (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", + /* (not (like, <=) */ "a not like b <= c" to "(lte (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", + /* (not (like, >) */ "a not like b > c" to "(gt (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", + /* (not (like, >=) */ "a not like b >= c" to "(gte (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)))", + /* (not (like, between) */ "a not like b between w and c" to "(between (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", + /* (not (like, not_between) */ "a not like b not between y and c" to "(not (between (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (not (like, like) */ "a not like b like c" to "(like (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (id c (case_insensitive) (unqualified)) null)", + /* (not (like, +) */ "a not like b + c" to "(not (like (id a (case_insensitive) (unqualified)) (plus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null))", + /* (not (like, -) */ "a not like b - c" to "(not (like (id a (case_insensitive) (unqualified)) (minus (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null))", + /* (not (like, ||) */ "a not like b || c" to "(not (like (id a (case_insensitive) (unqualified)) (concat (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null))", + /* (not (like, *) */ "a not like b * c" to "(not (like (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null))", + /* (not (like, /) */ "a not like b / c" to "(not (like (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null))", + /* (not (like, %) */ "a not like b % c" to "(not (like (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))) null))", + /* (not (like, is) */ "a not like b is boolean" to "(is_type (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (boolean_type))", + /* (not (like, is_not) */ "a not like b is not boolean" to "(not (is_type (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)) (boolean_type)))" + ) @Test @Parameters @TestCaseName("{0}") fun subtractPrecedence(pair: Pair) = runTest(pair) fun parametersForSubtractPrecedence() = listOf( - /* (+, intersect) */ "a + b intersect c" to "(intersect (distinct) (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (+, intersect_all) */ "a + b intersect all c" to "(intersect (all) (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (+, except) */ "a + b except c" to "(except (distinct) (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (+, except_all) */ "a + b except all c" to "(except (all) (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (+, union) */ "a + b union c" to "(union (distinct) (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (+, union_all) */ "a + b union all c" to "(union (all) (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (+, or) */ "a + b or c" to "(or (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (+, and) */ "a + b and c" to "(and (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (+, =) */ "a + b = c" to "(eq (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (+, <>) */ "a + b <> c" to "(ne (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (+, in) */ "a + b in c" to "(in_collection (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (+, not_in) */ "a + b not in c" to "(not (in_collection (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", - /* (+, <) */ "a + b < c" to "(lt (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (+, <=) */ "a + b <= c" to "(lte (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (+, >) */ "a + b > c" to "(gt (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (+, >=) */ "a + b >= c" to "(gte (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (+, between) */ "a + b between w and c" to "(between (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", - /* (+, not_between) */ "a + b not between y and c" to "(not (between (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (+, like) */ "a + b like c" to "(like (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", - /* (+, not_like) */ "a + b not like c" to "(not (like (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", - /* (+, -) */ "a + b - c" to "(minus (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (+, ||) */ "a + b || c" to "(concat (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (+, *) */ "a + b * c" to "(plus (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (+, /) */ "a + b / c" to "(plus (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (+, %) */ "a + b % c" to "(plus (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (+, is) */ "a + b is boolean" to "(is_type (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", - /* (+, is_not) */ "a + b is not boolean" to "(not (is_type (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))") + /* (+, intersect) */ "a + b intersect c" to "(intersect (distinct) (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (+, intersect_all) */ "a + b intersect all c" to "(intersect (all) (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (+, except) */ "a + b except c" to "(except (distinct) (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (+, except_all) */ "a + b except all c" to "(except (all) (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (+, union) */ "a + b union c" to "(union (distinct) (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (+, union_all) */ "a + b union all c" to "(union (all) (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (+, or) */ "a + b or c" to "(or (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (+, and) */ "a + b and c" to "(and (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (+, =) */ "a + b = c" to "(eq (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (+, <>) */ "a + b <> c" to "(ne (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (+, in) */ "a + b in c" to "(in_collection (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (+, not_in) */ "a + b not in c" to "(not (in_collection (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", + /* (+, <) */ "a + b < c" to "(lt (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (+, <=) */ "a + b <= c" to "(lte (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (+, >) */ "a + b > c" to "(gt (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (+, >=) */ "a + b >= c" to "(gte (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (+, between) */ "a + b between w and c" to "(between (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", + /* (+, not_between) */ "a + b not between y and c" to "(not (between (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (+, like) */ "a + b like c" to "(like (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", + /* (+, not_like) */ "a + b not like c" to "(not (like (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", + /* (+, -) */ "a + b - c" to "(minus (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (+, ||) */ "a + b || c" to "(concat (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (+, *) */ "a + b * c" to "(plus (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (+, /) */ "a + b / c" to "(plus (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (+, %) */ "a + b % c" to "(plus (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (+, is) */ "a + b is boolean" to "(is_type (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", + /* (+, is_not) */ "a + b is not boolean" to "(not (is_type (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))" + ) @Test @Parameters @TestCaseName("{0}") fun minusPrecedence(pair: Pair) = runTest(pair) fun parametersForMinusPrecedence() = listOf( - /* (-, intersect) */ "a - b intersect c" to "(intersect (distinct) (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (-, intersect_all) */ "a - b intersect all c" to "(intersect (all) (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (-, except) */ "a - b except c" to "(except (distinct) (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (-, except_all) */ "a - b except all c" to "(except (all) (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (-, union) */ "a - b union c" to "(union (distinct) (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (-, union_all) */ "a - b union all c" to "(union (all) (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (-, or) */ "a - b or c" to "(or (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (-, and) */ "a - b and c" to "(and (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (-, =) */ "a - b = c" to "(eq (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (-, <>) */ "a - b <> c" to "(ne (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (-, in) */ "a - b in c" to "(in_collection (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (-, not_in) */ "a - b not in c" to "(not (in_collection (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", - /* (-, <) */ "a - b < c" to "(lt (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (-, <=) */ "a - b <= c" to "(lte (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (-, >) */ "a - b > c" to "(gt (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (-, >=) */ "a - b >= c" to "(gte (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (-, between) */ "a - b between w and c" to "(between (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", - /* (-, not_between) */ "a - b not between y and c" to "(not (between (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (-, like) */ "a - b like c" to "(like (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", - /* (-, not_like) */ "a - b not like c" to "(not (like (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", - /* (-, +) */ "a - b + c" to "(plus (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (-, ||) */ "a - b || c" to "(concat (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (-, *) */ "a - b * c" to "(minus (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (-, /) */ "a - b / c" to "(minus (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (-, %) */ "a - b % c" to "(minus (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (-, is) */ "a - b is boolean" to "(is_type (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", - /* (-, is_not) */ "a - b is not boolean" to "(not (is_type (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))") + /* (-, intersect) */ "a - b intersect c" to "(intersect (distinct) (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (-, intersect_all) */ "a - b intersect all c" to "(intersect (all) (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (-, except) */ "a - b except c" to "(except (distinct) (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (-, except_all) */ "a - b except all c" to "(except (all) (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (-, union) */ "a - b union c" to "(union (distinct) (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (-, union_all) */ "a - b union all c" to "(union (all) (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (-, or) */ "a - b or c" to "(or (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (-, and) */ "a - b and c" to "(and (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (-, =) */ "a - b = c" to "(eq (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (-, <>) */ "a - b <> c" to "(ne (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (-, in) */ "a - b in c" to "(in_collection (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (-, not_in) */ "a - b not in c" to "(not (in_collection (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", + /* (-, <) */ "a - b < c" to "(lt (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (-, <=) */ "a - b <= c" to "(lte (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (-, >) */ "a - b > c" to "(gt (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (-, >=) */ "a - b >= c" to "(gte (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (-, between) */ "a - b between w and c" to "(between (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", + /* (-, not_between) */ "a - b not between y and c" to "(not (between (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (-, like) */ "a - b like c" to "(like (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", + /* (-, not_like) */ "a - b not like c" to "(not (like (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", + /* (-, +) */ "a - b + c" to "(plus (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (-, ||) */ "a - b || c" to "(concat (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (-, *) */ "a - b * c" to "(minus (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (-, /) */ "a - b / c" to "(minus (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (-, %) */ "a - b % c" to "(minus (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (-, is) */ "a - b is boolean" to "(is_type (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", + /* (-, is_not) */ "a - b is not boolean" to "(not (is_type (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))" + ) @Test @Parameters @TestCaseName("{0}") fun concatPrecedence(pair: Pair) = runTest(pair) fun parametersForConcatPrecedence() = listOf( - /* (||, intersect) */ "a || b intersect c" to "(intersect (distinct) (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (||, intersect_all) */ "a || b intersect all c" to "(intersect (all) (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (||, except) */ "a || b except c" to "(except (distinct) (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (||, except_all) */ "a || b except all c" to "(except (all) (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (||, union) */ "a || b union c" to "(union (distinct) (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (||, union_all) */ "a || b union all c" to "(union (all) (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (||, or) */ "a || b or c" to "(or (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (||, and) */ "a || b and c" to "(and (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (||, =) */ "a || b = c" to "(eq (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (||, <>) */ "a || b <> c" to "(ne (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (||, in) */ "a || b in c" to "(in_collection (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (||, not_in) */ "a || b not in c" to "(not (in_collection (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", - /* (||, <) */ "a || b < c" to "(lt (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (||, <=) */ "a || b <= c" to "(lte (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (||, >) */ "a || b > c" to "(gt (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (||, >=) */ "a || b >= c" to "(gte (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (||, between) */ "a || b between w and c" to "(between (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", - /* (||, not_between) */ "a || b not between y and c" to "(not (between (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (||, like) */ "a || b like c" to "(like (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", - /* (||, not_like) */ "a || b not like c" to "(not (like (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", - /* (||, +) */ "a || b + c" to "(plus (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (||, -) */ "a || b - c" to "(minus (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (||, *) */ "a || b * c" to "(concat (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (||, /) */ "a || b / c" to "(concat (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (||, %) */ "a || b % c" to "(concat (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (||, is) */ "a || b is boolean" to "(is_type (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", - /* (||, is_not) */ "a || b is not boolean" to "(not (is_type (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))") + /* (||, intersect) */ "a || b intersect c" to "(intersect (distinct) (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (||, intersect_all) */ "a || b intersect all c" to "(intersect (all) (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (||, except) */ "a || b except c" to "(except (distinct) (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (||, except_all) */ "a || b except all c" to "(except (all) (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (||, union) */ "a || b union c" to "(union (distinct) (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (||, union_all) */ "a || b union all c" to "(union (all) (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (||, or) */ "a || b or c" to "(or (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (||, and) */ "a || b and c" to "(and (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (||, =) */ "a || b = c" to "(eq (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (||, <>) */ "a || b <> c" to "(ne (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (||, in) */ "a || b in c" to "(in_collection (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (||, not_in) */ "a || b not in c" to "(not (in_collection (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", + /* (||, <) */ "a || b < c" to "(lt (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (||, <=) */ "a || b <= c" to "(lte (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (||, >) */ "a || b > c" to "(gt (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (||, >=) */ "a || b >= c" to "(gte (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (||, between) */ "a || b between w and c" to "(between (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", + /* (||, not_between) */ "a || b not between y and c" to "(not (between (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (||, like) */ "a || b like c" to "(like (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", + /* (||, not_like) */ "a || b not like c" to "(not (like (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", + /* (||, +) */ "a || b + c" to "(plus (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (||, -) */ "a || b - c" to "(minus (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (||, *) */ "a || b * c" to "(concat (id a (case_insensitive) (unqualified)) (times (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (||, /) */ "a || b / c" to "(concat (id a (case_insensitive) (unqualified)) (divide (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (||, %) */ "a || b % c" to "(concat (id a (case_insensitive) (unqualified)) (modulo (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (||, is) */ "a || b is boolean" to "(is_type (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", + /* (||, is_not) */ "a || b is not boolean" to "(not (is_type (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))" + ) @Test @Parameters @TestCaseName("{0}") fun mulPrecedence(pair: Pair) = runTest(pair) fun parametersForMulPrecedence() = listOf( - /* (*, intersect) */ "a * b intersect c" to "(intersect (distinct) (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, intersect_all) */ "a * b intersect all c" to "(intersect (all) (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, except) */ "a * b except c" to "(except (distinct) (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, except_all) */ "a * b except all c" to "(except (all) (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, union) */ "a * b union c" to "(union (distinct) (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, union_all) */ "a * b union all c" to "(union (all) (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, or) */ "a * b or c" to "(or (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, and) */ "a * b and c" to "(and (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, =) */ "a * b = c" to "(eq (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, <>) */ "a * b <> c" to "(ne (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, in) */ "a * b in c" to "(in_collection (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, not_in) */ "a * b not in c" to "(not (in_collection (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", - /* (*, <) */ "a * b < c" to "(lt (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, <=) */ "a * b <= c" to "(lte (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, >) */ "a * b > c" to "(gt (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, >=) */ "a * b >= c" to "(gte (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, between) */ "a * b between w and c" to "(between (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", - /* (*, not_between) */ "a * b not between y and c" to "(not (between (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (*, like) */ "a * b like c" to "(like (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", - /* (*, not_like) */ "a * b not like c" to "(not (like (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", - /* (*, +) */ "a * b + c" to "(plus (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, -) */ "a * b - c" to "(minus (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, ||) */ "a * b || c" to "(concat (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, /) */ "a * b / c" to "(divide (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, %) */ "a * b % c" to "(modulo (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (*, is) */ "a * b is boolean" to "(is_type (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", - /* (*, is_not) */ "a * b is not boolean" to "(not (is_type (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))") + /* (*, intersect) */ "a * b intersect c" to "(intersect (distinct) (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, intersect_all) */ "a * b intersect all c" to "(intersect (all) (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, except) */ "a * b except c" to "(except (distinct) (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, except_all) */ "a * b except all c" to "(except (all) (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, union) */ "a * b union c" to "(union (distinct) (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, union_all) */ "a * b union all c" to "(union (all) (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, or) */ "a * b or c" to "(or (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, and) */ "a * b and c" to "(and (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, =) */ "a * b = c" to "(eq (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, <>) */ "a * b <> c" to "(ne (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, in) */ "a * b in c" to "(in_collection (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, not_in) */ "a * b not in c" to "(not (in_collection (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", + /* (*, <) */ "a * b < c" to "(lt (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, <=) */ "a * b <= c" to "(lte (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, >) */ "a * b > c" to "(gt (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, >=) */ "a * b >= c" to "(gte (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, between) */ "a * b between w and c" to "(between (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", + /* (*, not_between) */ "a * b not between y and c" to "(not (between (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (*, like) */ "a * b like c" to "(like (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", + /* (*, not_like) */ "a * b not like c" to "(not (like (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", + /* (*, +) */ "a * b + c" to "(plus (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, -) */ "a * b - c" to "(minus (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, ||) */ "a * b || c" to "(concat (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, /) */ "a * b / c" to "(divide (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, %) */ "a * b % c" to "(modulo (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (*, is) */ "a * b is boolean" to "(is_type (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", + /* (*, is_not) */ "a * b is not boolean" to "(not (is_type (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))" + ) @Test @Parameters @TestCaseName("{0}") fun divPrecedence(pair: Pair) = runTest(pair) fun parametersForDivPrecedence() = listOf( - /* (/, intersect) */ "a / b intersect c" to "(intersect (distinct) (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, intersect_all) */ "a / b intersect all c" to "(intersect (all) (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, except) */ "a / b except c" to "(except (distinct) (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, except_all) */ "a / b except all c" to "(except (all) (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, union) */ "a / b union c" to "(union (distinct) (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, union_all) */ "a / b union all c" to "(union (all) (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, or) */ "a / b or c" to "(or (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, and) */ "a / b and c" to "(and (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, =) */ "a / b = c" to "(eq (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, <>) */ "a / b <> c" to "(ne (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, in) */ "a / b in c" to "(in_collection (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, not_in) */ "a / b not in c" to "(not (in_collection (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", - /* (/, <) */ "a / b < c" to "(lt (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, <=) */ "a / b <= c" to "(lte (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, >) */ "a / b > c" to "(gt (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, >=) */ "a / b >= c" to "(gte (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, between) */ "a / b between w and c" to "(between (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", - /* (/, not_between) */ "a / b not between y and c" to "(not (between (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (/, like) */ "a / b like c" to "(like (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", - /* (/, not_like) */ "a / b not like c" to "(not (like (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", - /* (/, +) */ "a / b + c" to "(plus (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, -) */ "a / b - c" to "(minus (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, ||) */ "a / b || c" to "(concat (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, *) */ "a / b * c" to "(times (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, %) */ "a / b % c" to "(modulo (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (/, is) */ "a / b is boolean" to "(is_type (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", - /* (/, is_not) */ "a / b is not boolean" to "(not (is_type (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))") + /* (/, intersect) */ "a / b intersect c" to "(intersect (distinct) (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, intersect_all) */ "a / b intersect all c" to "(intersect (all) (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, except) */ "a / b except c" to "(except (distinct) (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, except_all) */ "a / b except all c" to "(except (all) (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, union) */ "a / b union c" to "(union (distinct) (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, union_all) */ "a / b union all c" to "(union (all) (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, or) */ "a / b or c" to "(or (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, and) */ "a / b and c" to "(and (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, =) */ "a / b = c" to "(eq (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, <>) */ "a / b <> c" to "(ne (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, in) */ "a / b in c" to "(in_collection (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, not_in) */ "a / b not in c" to "(not (in_collection (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", + /* (/, <) */ "a / b < c" to "(lt (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, <=) */ "a / b <= c" to "(lte (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, >) */ "a / b > c" to "(gt (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, >=) */ "a / b >= c" to "(gte (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, between) */ "a / b between w and c" to "(between (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", + /* (/, not_between) */ "a / b not between y and c" to "(not (between (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (/, like) */ "a / b like c" to "(like (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", + /* (/, not_like) */ "a / b not like c" to "(not (like (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", + /* (/, +) */ "a / b + c" to "(plus (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, -) */ "a / b - c" to "(minus (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, ||) */ "a / b || c" to "(concat (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, *) */ "a / b * c" to "(times (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, %) */ "a / b % c" to "(modulo (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (/, is) */ "a / b is boolean" to "(is_type (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", + /* (/, is_not) */ "a / b is not boolean" to "(not (is_type (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))" + ) @Test @Parameters @TestCaseName("{0}") fun modPrecedence(pair: Pair) = runTest(pair) fun parametersForModPrecedence() = listOf( - /* (%, intersect) */ "a % b intersect c" to "(intersect (distinct) (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, intersect_all) */ "a % b intersect all c" to "(intersect (all) (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, except) */ "a % b except c" to "(except (distinct) (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, except_all) */ "a % b except all c" to "(except (all) (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, union) */ "a % b union c" to "(union (distinct) (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, union_all) */ "a % b union all c" to "(union (all) (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, or) */ "a % b or c" to "(or (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, and) */ "a % b and c" to "(and (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, =) */ "a % b = c" to "(eq (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, <>) */ "a % b <> c" to "(ne (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, in) */ "a % b in c" to "(in_collection (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, not_in) */ "a % b not in c" to "(not (in_collection (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", - /* (%, <) */ "a % b < c" to "(lt (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, <=) */ "a % b <= c" to "(lte (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, >) */ "a % b > c" to "(gt (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, >=) */ "a % b >= c" to "(gte (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, between) */ "a % b between w and c" to "(between (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", - /* (%, not_between) */ "a % b not between y and c" to "(not (between (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", - /* (%, like) */ "a % b like c" to "(like (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", - /* (%, not_like) */ "a % b not like c" to "(not (like (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", - /* (%, +) */ "a % b + c" to "(plus (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, -) */ "a % b - c" to "(minus (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, ||) */ "a % b || c" to "(concat (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, *) */ "a % b * c" to "(times (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, /) */ "a % b / c" to "(divide (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", - /* (%, is) */ "a % b is boolean" to "(is_type (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", - /* (%, is_not) */ "a % b is not boolean" to "(not (is_type (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))") + /* (%, intersect) */ "a % b intersect c" to "(intersect (distinct) (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, intersect_all) */ "a % b intersect all c" to "(intersect (all) (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, except) */ "a % b except c" to "(except (distinct) (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, except_all) */ "a % b except all c" to "(except (all) (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, union) */ "a % b union c" to "(union (distinct) (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, union_all) */ "a % b union all c" to "(union (all) (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, or) */ "a % b or c" to "(or (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, and) */ "a % b and c" to "(and (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, =) */ "a % b = c" to "(eq (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, <>) */ "a % b <> c" to "(ne (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, in) */ "a % b in c" to "(in_collection (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, not_in) */ "a % b not in c" to "(not (in_collection (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified))))", + /* (%, <) */ "a % b < c" to "(lt (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, <=) */ "a % b <= c" to "(lte (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, >) */ "a % b > c" to "(gt (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, >=) */ "a % b >= c" to "(gte (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, between) */ "a % b between w and c" to "(between (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id w (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))", + /* (%, not_between) */ "a % b not between y and c" to "(not (between (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id y (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (%, like) */ "a % b like c" to "(like (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null)", + /* (%, not_like) */ "a % b not like c" to "(not (like (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)) null))", + /* (%, +) */ "a % b + c" to "(plus (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, -) */ "a % b - c" to "(minus (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, ||) */ "a % b || c" to "(concat (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, *) */ "a % b * c" to "(times (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, /) */ "a % b / c" to "(divide (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (id c (case_insensitive) (unqualified)))", + /* (%, is) */ "a % b is boolean" to "(is_type (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type))", + /* (%, is_not) */ "a % b is not boolean" to "(not (is_type (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))) (boolean_type)))" + ) @Test fun combinationOfBinaryOperators() = runTest( @@ -964,41 +991,42 @@ class SqlParserPrecedenceTest : SqlParserTestBase() { ) ) (id h (case_insensitive) (unqualified)) - )""") + )""" + ) @Test @Parameters @TestCaseName("{0}") fun notUnaryPrecedence(pair: Pair) = runTest(pair) fun parametersForNotUnaryPrecedence() = listOf( - /* (not, intersect) */ "not a intersect b" to "(intersect (distinct) (not (id a (case_insensitive) (unqualified))) (id b (case_insensitive) (unqualified)))", - /* (not, intersect_all) */ "not a intersect all b" to "(intersect (all) (not (id a (case_insensitive) (unqualified))) (id b (case_insensitive) (unqualified)))", - /* (not, except) */ "not a except b" to "(except (distinct) (not (id a (case_insensitive) (unqualified))) (id b (case_insensitive) (unqualified)))", - /* (not, except_all) */ "not a except all b" to "(except (all) (not (id a (case_insensitive) (unqualified))) (id b (case_insensitive) (unqualified)))", - /* (not, union) */ "not a union b" to "(union (distinct) (not (id a (case_insensitive) (unqualified))) (id b (case_insensitive) (unqualified)))", - /* (not, union_all) */ "not a union all b" to "(union (all) (not (id a (case_insensitive) (unqualified))) (id b (case_insensitive) (unqualified)))", - /* (not, or) */ "not a or b" to "(or (not (id a (case_insensitive) (unqualified))) (id b (case_insensitive) (unqualified)))", - /* (not, and) */ "not a and b" to "(and (not (id a (case_insensitive) (unqualified))) (id b (case_insensitive) (unqualified)))", - /* (not, =) */ "not a = b" to "(not (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", - /* (not, <>) */ "not a <> b" to "(not (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", - /* (not, in) */ "not a in b" to "(not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", - /* (not, not_in) */ "not a not in b" to "(not (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))))", - /* (not, <) */ "not a < b" to "(not (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", - /* (not, <=) */ "not a <= b" to "(not (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", - /* (not, >) */ "not a > b" to "(not (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", - /* (not, >=) */ "not a >= b" to "(not (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", - /* (not, between) */ "not a between b and c" to "(not (between (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", + /* (not, intersect) */ "not a intersect b" to "(intersect (distinct) (not (id a (case_insensitive) (unqualified))) (id b (case_insensitive) (unqualified)))", + /* (not, intersect_all) */ "not a intersect all b" to "(intersect (all) (not (id a (case_insensitive) (unqualified))) (id b (case_insensitive) (unqualified)))", + /* (not, except) */ "not a except b" to "(except (distinct) (not (id a (case_insensitive) (unqualified))) (id b (case_insensitive) (unqualified)))", + /* (not, except_all) */ "not a except all b" to "(except (all) (not (id a (case_insensitive) (unqualified))) (id b (case_insensitive) (unqualified)))", + /* (not, union) */ "not a union b" to "(union (distinct) (not (id a (case_insensitive) (unqualified))) (id b (case_insensitive) (unqualified)))", + /* (not, union_all) */ "not a union all b" to "(union (all) (not (id a (case_insensitive) (unqualified))) (id b (case_insensitive) (unqualified)))", + /* (not, or) */ "not a or b" to "(or (not (id a (case_insensitive) (unqualified))) (id b (case_insensitive) (unqualified)))", + /* (not, and) */ "not a and b" to "(and (not (id a (case_insensitive) (unqualified))) (id b (case_insensitive) (unqualified)))", + /* (not, =) */ "not a = b" to "(not (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", + /* (not, <>) */ "not a <> b" to "(not (ne (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", + /* (not, in) */ "not a in b" to "(not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", + /* (not, not_in) */ "not a not in b" to "(not (not (in_collection (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))))", + /* (not, <) */ "not a < b" to "(not (lt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", + /* (not, <=) */ "not a <= b" to "(not (lte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", + /* (not, >) */ "not a > b" to "(not (gt (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", + /* (not, >=) */ "not a >= b" to "(not (gte (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", + /* (not, between) */ "not a between b and c" to "(not (between (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified))))", /* (not, not_between) */ "not a not between b and c" to "(not (not (between (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) (id c (case_insensitive) (unqualified)))))", - /* (not, like) */ "not a like b" to "(not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null))", - /* (not, not_like) */ "not a not like b" to "(not (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)))", - /* (not, +) */ "not a + b" to "(not (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", - /* (not, -) */ "not a - b" to "(not (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", - /* (not, ||) */ "not a || b" to "(not (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", - /* (not, *) */ "not a * b" to "(not (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", - /* (not, /) */ "not a / b" to "(not (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", - /* (not, %) */ "not a % b" to "(not (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", - /* (not, is) */ "not a is boolean" to "(not (is_type (id a (case_insensitive) (unqualified)) (boolean_type)))", - /* (not, is_not) */ "not a is not boolean" to "(not (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))))" + /* (not, like) */ "not a like b" to "(not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null))", + /* (not, not_like) */ "not a not like b" to "(not (not (like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) null)))", + /* (not, +) */ "not a + b" to "(not (plus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", + /* (not, -) */ "not a - b" to "(not (minus (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", + /* (not, ||) */ "not a || b" to "(not (concat (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", + /* (not, *) */ "not a * b" to "(not (times (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", + /* (not, /) */ "not a / b" to "(not (divide (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", + /* (not, %) */ "not a % b" to "(not (modulo (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))", + /* (not, is) */ "not a is boolean" to "(not (is_type (id a (case_insensitive) (unqualified)) (boolean_type)))", + /* (not, is_not) */ "not a is not boolean" to "(not (not (is_type (id a (case_insensitive) (unqualified)) (boolean_type))))" ) @Test @@ -1007,7 +1035,7 @@ class SqlParserPrecedenceTest : SqlParserTestBase() { fun notComboPrecedence(pair: Pair) = runTest(pair) fun parametersForNotComboPrecedence() = listOf( // not combination - "not a and b or c and not d or not e" to """ + "not a and b or c and not d or not e" to """ (or (or (and @@ -1038,9 +1066,8 @@ class SqlParserPrecedenceTest : SqlParserTestBase() { (id e (case_insensitive) (unqualified)) (pos (neg (pos (id foo (case_insensitive) (unqualified))))) ) - )""") - - + )""" + ) private fun runTest(pair: Pair) { val (source, expectedAst) = pair diff --git a/lang/test/org/partiql/lang/syntax/SqlParserTest.kt b/lang/test/org/partiql/lang/syntax/SqlParserTest.kt index 284c22505d..d019ed27f8 100644 --- a/lang/test/org/partiql/lang/syntax/SqlParserTest.kt +++ b/lang/test/org/partiql/lang/syntax/SqlParserTest.kt @@ -38,10 +38,9 @@ import kotlin.concurrent.thread */ class SqlParserTest : SqlParserTestBase() { - - //**************************************** + // **************************************** // literals - //**************************************** + // **************************************** @Test fun litInt() = assertExpression( "5", @@ -153,9 +152,9 @@ class SqlParserTest : SqlParserTestBase() { """(struct (expr_pair (lit "a") (struct)))""" ) - //**************************************** + // **************************************** // container constructors - //**************************************** + // **************************************** @Test fun rowValueConstructorWithSimpleExpressions() = assertExpression( "(1, 2, 3, 4)", @@ -180,9 +179,9 @@ class SqlParserTest : SqlParserTestBase() { """(bag (list (lit 1)) (list (lit 2)) (list (lit 3)))""" ) - //**************************************** + // **************************************** // identifiers - //**************************************** + // **************************************** @Test fun id_case_insensitive() = assertExpression( "kumo", @@ -197,9 +196,9 @@ class SqlParserTest : SqlParserTestBase() { "(id kumo (case_sensitive) (unqualified))" ) - //**************************************** + // **************************************** // call - //**************************************** + // **************************************** @Test fun callEmpty() = assertExpression( "foobar()", @@ -245,33 +244,36 @@ class SqlParserTest : SqlParserTestBase() { @Test fun callTrimSingleArgument() = assertExpression( "trim('test')", - "(call trim (lit \"test\"))") - - + "(call trim (lit \"test\"))" + ) @Test fun callTrimTwoArgumentsDefaultSpecification() = assertExpression( "trim(' ' from 'test')", - "(call trim (lit \" \") (lit \"test\"))") + "(call trim (lit \" \") (lit \"test\"))" + ) @Test fun callTrimTwoArgumentsUsingBoth() = assertExpression( "trim(both from 'test')", - "(call trim (lit both) (lit \"test\"))") + "(call trim (lit both) (lit \"test\"))" + ) @Test fun callTrimTwoArgumentsUsingLeading() = assertExpression( "trim(leading from 'test')", - "(call trim (lit leading) (lit \"test\"))") + "(call trim (lit leading) (lit \"test\"))" + ) @Test fun callTrimTwoArgumentsUsingTrailing() = assertExpression( "trim(trailing from 'test')", - "(call trim (lit trailing) (lit \"test\"))") + "(call trim (lit trailing) (lit \"test\"))" + ) - //**************************************** + // **************************************** // Unary operators - //**************************************** + // **************************************** @Test fun negCall() = assertExpression( @@ -319,9 +321,9 @@ class SqlParserTest : SqlParserTestBase() { "(not (lit 1))" ) - //**************************************** + // **************************************** // BETWEEN - //**************************************** + // **************************************** @Test fun betweenOperator() = assertExpression( "5 BETWEEN 1 AND 10", @@ -334,9 +336,9 @@ class SqlParserTest : SqlParserTestBase() { "(not_between (lit 5) (lit 1) (lit 10))", "(not (between (lit 5) (lit 1) (lit 10)))" ) - //**************************************** + // **************************************** // @ operator - //**************************************** + // **************************************** @Test fun atOperatorOnIdentifier() = assertExpression( @@ -352,9 +354,9 @@ class SqlParserTest : SqlParserTestBase() { """(path (id a (case_insensitive) (locals_first)) (path_expr (lit "b") (case_insensitive)))""" ) - //**************************************** + // **************************************** // IS operator - //**************************************** + // **************************************** @Test fun nullIsNull() = assertExpression( "null IS NULL", @@ -439,9 +441,9 @@ class SqlParserTest : SqlParserTestBase() { "(call_agg (distinct) count (id a (case_insensitive) (unqualified)))" ) - //**************************************** + // **************************************** // path expression - //**************************************** + // **************************************** @Test fun dot_case_1_insensitive_component() = assertExpression( "a.b", @@ -560,9 +562,9 @@ class SqlParserTest : SqlParserTestBase() { (path_expr (lit "b") (case_insensitive)))""".trimMargin() ) - //**************************************** + // **************************************** // cast - //**************************************** + // **************************************** @Test fun castNoArgs() = assertExpression( "CAST(5 AS VARCHAR)", @@ -627,9 +629,9 @@ class SqlParserTest : SqlParserTestBase() { "(cast (id a (case_insensitive) (unqualified)) (numeric_type 1 2))" ) - //**************************************** + // **************************************** // custom type cast - //**************************************** + // **************************************** @Test fun castAsEsBoolean() = assertExpression( "CAST(TRUE AS ES_BOOLEAN)", @@ -642,9 +644,9 @@ class SqlParserTest : SqlParserTestBase() { "(cast (lit 1.123) (custom_type rs_integer))" ) - //**************************************** + // **************************************** // searched CASE - //**************************************** + // **************************************** @Test fun searchedCaseSingleNoElse() = assertExpression( "CASE WHEN name = 'zoe' THEN 1 END", @@ -706,9 +708,9 @@ class SqlParserTest : SqlParserTestBase() { """ ) - //**************************************** + // **************************************** // simple CASE - //**************************************** + // **************************************** @Test fun simpleCaseSingleNoElse() = assertExpression( "CASE name WHEN 'zoe' THEN 1 END", @@ -782,9 +784,9 @@ class SqlParserTest : SqlParserTestBase() { """ ) - //**************************************** + // **************************************** // IN operator - //**************************************** + // **************************************** @Test fun inOperatorWithImplicitValues() = assertExpression( "a IN (1, 2, 3, 4)", @@ -830,11 +832,9 @@ class SqlParserTest : SqlParserTestBase() { """ ) - - - //**************************************** + // **************************************** // LIKE operator - //**************************************** + // **************************************** /* From SQL92 https://www.contrib.andrew.cmu.edu/~shadow/sql/sql1992.txt ::= [ NOT ] LIKE @@ -955,9 +955,9 @@ class SqlParserTest : SqlParserTestBase() { """(like (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)) (path (id x (case_insensitive) (unqualified)) (path_expr (lit "c") (case_insensitive))))""" ) - //**************************************** + // **************************************** // call date_add and date_diff (special syntax) - //**************************************** + // **************************************** private fun assertDateArithmetic( templateSql: String, @@ -976,8 +976,8 @@ class SqlParserTest : SqlParserTestBase() { assertExpression( templateSql.replace("", operation), templateExpectedV0.replace("", operation), - templateExpectedPartiqlAst.replace("", operation)) - + templateExpectedPartiqlAst.replace("", operation) + ) } @Test @@ -1036,10 +1036,9 @@ class SqlParserTest : SqlParserTestBase() { "(call date_ (lit timezone_minute) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))" ) - - //**************************************** + // **************************************** // call extract (special syntax) - //**************************************** + // **************************************** @Test fun callExtractYear() = assertExpression( "extract(year from a)", @@ -1106,11 +1105,12 @@ class SqlParserTest : SqlParserTestBase() { @Test fun parameterExpression() = assertExpression( "?", - "(parameter 1)") + "(parameter 1)" + ) - //**************************************** + // **************************************** // SELECT - //**************************************** + // **************************************** @Test fun selectWithSingleFrom() = assertExpression( "SELECT a FROM table1", @@ -1153,7 +1153,6 @@ class SqlParserTest : SqlParserTestBase() { "(select (project (project_list (project_all (path (id a (case_insensitive) (unqualified)) (path_expr (lit \"b\") (case_insensitive)))))) (from (scan (id table1 (case_insensitive) (unqualified)) t null null)))" ) - @Test fun selectWithFromAt() = assertExpression( "SELECT ord FROM table1 AT ord", @@ -1168,7 +1167,6 @@ class SqlParserTest : SqlParserTestBase() { "(select (project (project_list (project_expr (id ord (case_insensitive) (unqualified)) null) (project_expr (id val (case_insensitive) (unqualified)) null))) (from (scan (id table1 (case_insensitive) (unqualified)) val ord null)))" ) - @Test fun selectWithFromIdBy() = assertExpression( "SELECT * FROM table1 BY uid", @@ -1197,7 +1195,6 @@ class SqlParserTest : SqlParserTestBase() { "(select (project (project_star)) (from (scan (id table1 (case_insensitive) (unqualified)) val ord uid)))" ) - @Test fun selectWithFromUnpivot() = assertExpression( "SELECT * FROM UNPIVOT item", @@ -1382,7 +1379,6 @@ class SqlParserTest : SqlParserTestBase() { """ ) - @Test fun selectListWithAggregateWildcardCall() = assertExpression( "SELECT sum(a) + count(*), AVG(b), MIN(c), MAX(d + e) FROM foo", @@ -1676,13 +1672,13 @@ class SqlParserTest : SqlParserTestBase() { """ ) - //**************************************** + // **************************************** // ORDER BY - //**************************************** + // **************************************** @Test fun orderBySingleId() = assertExpression( - "SELECT a FROM tb WHERE hk = 1 ORDER BY rk1", - """(select + "SELECT a FROM tb WHERE hk = 1 ORDER BY rk1", + """(select (project (project_list (project_expr @@ -1706,8 +1702,8 @@ class SqlParserTest : SqlParserTestBase() { @Test fun orderByMultipleIds() = assertExpression( - "SELECT a FROM tb WHERE hk = 1 ORDER BY rk1, rk2, rk3", - """(select + "SELECT a FROM tb WHERE hk = 1 ORDER BY rk1, rk2, rk3", + """(select (project (project_list (project_expr @@ -1737,8 +1733,8 @@ class SqlParserTest : SqlParserTestBase() { @Test fun orderBySingleIdDESC() = assertExpression( - "SELECT a FROM tb WHERE hk = 1 ORDER BY rk1 DESC", - """(select + "SELECT a FROM tb WHERE hk = 1 ORDER BY rk1 DESC", + """(select (project (project_list (project_expr @@ -1762,8 +1758,8 @@ class SqlParserTest : SqlParserTestBase() { @Test fun orderByMultipleIdsWithOrderingSpec() = assertExpression( - "SELECT a FROM tb WHERE hk = 1 ORDER BY rk1 ASC, rk2 DESC", - """(select + "SELECT a FROM tb WHERE hk = 1 ORDER BY rk1 ASC, rk2 DESC", + """(select (project (project_list (project_expr @@ -1787,9 +1783,9 @@ class SqlParserTest : SqlParserTestBase() { (desc))))) """ ) - //**************************************** + // **************************************** // GROUP BY and GROUP PARTIAL BY - //**************************************** + // **************************************** @Test fun groupBySingleId() = assertExpression( "SELECT a FROM data GROUP BY a", @@ -1868,9 +1864,9 @@ class SqlParserTest : SqlParserTestBase() { """ ) - //**************************************** + // **************************************** // HAVING - //**************************************** + // **************************************** @Test fun havingMinimal() = assertExpression( "SELECT a FROM data HAVING a = b", @@ -1934,9 +1930,9 @@ class SqlParserTest : SqlParserTestBase() { """ ) - //**************************************** + // **************************************** // PIVOT - //**************************************** + // **************************************** @Test fun pivotWithOnlyFrom() = assertExpression( "PIVOT v AT n FROM data", @@ -1983,9 +1979,9 @@ class SqlParserTest : SqlParserTestBase() { """ ) - //**************************************** + // **************************************** // DML - //**************************************** + // **************************************** @Test fun fromInsertValuesDml() = assertExpression( @@ -2097,8 +2093,8 @@ class SqlParserTest : SqlParserTestBase() { @Test @Ignore fun fromInsertValueReturningDml() = assertExpression( - "FROM x INSERT INTO foo VALUE 1 RETURNING ALL OLD foo", - """ + "FROM x INSERT INTO foo VALUE 1 RETURNING ALL OLD foo", + """ (dml (dml_op_list (insert_value @@ -2246,7 +2242,8 @@ class SqlParserTest : SqlParserTestBase() { ) ) ) - """) + """ + ) @Test fun insertValueAtReturningDml() = assertExpression( @@ -2261,7 +2258,8 @@ class SqlParserTest : SqlParserTestBase() { (returning_elem (all_old) (returning_column (id foo (case_insensitive) (unqualified))))))) - """) + """ + ) @Test fun insertValueAtMultiReturningTwoColsDml() = assertExpression( @@ -2276,12 +2274,13 @@ class SqlParserTest : SqlParserTestBase() { (returning_elem (all_old) (returning_column (id a (case_insensitive) (unqualified))))))) - """) + """ + ) @Test fun insertValueAtMultiReturningThreeColsDml() = assertExpression( - "INSERT INTO foo VALUE 1 AT bar RETURNING MODIFIED OLD bar, MODIFIED NEW bar, ALL NEW *", - """ + "INSERT INTO foo VALUE 1 AT bar RETURNING MODIFIED OLD bar, MODIFIED NEW bar, ALL NEW *", + """ (dml (operations (dml_op_list (insert_value (id foo (case_insensitive) (unqualified)) @@ -2297,12 +2296,13 @@ class SqlParserTest : SqlParserTestBase() { (returning_elem (all_new) (returning_wildcard))))) - """) + """ + ) @Test fun insertValueAtOnConflictDml() = assertExpression( - "INSERT INTO foo VALUE 1 AT bar ON CONFLICT WHERE a DO NOTHING", - """ + "INSERT INTO foo VALUE 1 AT bar ON CONFLICT WHERE a DO NOTHING", + """ (dml (operations (dml_op_list (insert_value @@ -2316,7 +2316,8 @@ class SqlParserTest : SqlParserTestBase() { ) )) ) - """) + """ + ) @Test fun insertValueAtOnConflictReturningDml() = assertExpression( @@ -2336,12 +2337,13 @@ class SqlParserTest : SqlParserTestBase() { (returning_elem (all_old) (returning_column (id foo (case_insensitive) (unqualified))))))) - """) + """ + ) @Test fun insertValueOnConflictDml() = assertExpression( - "INSERT INTO foo VALUE 1 ON CONFLICT WHERE bar DO NOTHING", - """ + "INSERT INTO foo VALUE 1 ON CONFLICT WHERE bar DO NOTHING", + """ (dml (operations (dml_op_list (insert_value @@ -2355,12 +2357,13 @@ class SqlParserTest : SqlParserTestBase() { ) )) ) - """) + """ + ) @Test fun insertValueOnConflictExpr1Dml() = assertExpression( - "INSERT INTO foo VALUE 1 ON CONFLICT WHERE hk=1 DO NOTHING", - """ + "INSERT INTO foo VALUE 1 ON CONFLICT WHERE hk=1 DO NOTHING", + """ (dml (operations (dml_op_list (insert_value @@ -2374,12 +2377,13 @@ class SqlParserTest : SqlParserTestBase() { ) )) ) - """) + """ + ) @Test fun insertValueOnConflictExpr2Dml() = assertExpression( - "INSERT INTO foo VALUE 1 ON CONFLICT WHERE hk=1 and rk=1 DO NOTHING", - """ + "INSERT INTO foo VALUE 1 ON CONFLICT WHERE hk=1 and rk=1 DO NOTHING", + """ (dml (operations (dml_op_list (insert_value @@ -2393,12 +2397,13 @@ class SqlParserTest : SqlParserTestBase() { ) )) ) - """) + """ + ) @Test fun insertValueOnConflictExpr3Dml() = assertExpression( - "INSERT INTO foo VALUE 1 ON CONFLICT WHERE hk BETWEEN 'a' and 'b' or rk = 'c' DO NOTHING", - """ + "INSERT INTO foo VALUE 1 ON CONFLICT WHERE hk BETWEEN 'a' and 'b' or rk = 'c' DO NOTHING", + """ (dml (operations (dml_op_list (insert_value @@ -2412,12 +2417,13 @@ class SqlParserTest : SqlParserTestBase() { ) )) ) - """) + """ + ) @Test fun insertValueOnConflictExpr4Dml() = assertExpression( - "INSERT INTO foo VALUE 1 ON CONFLICT WHERE not hk = 'a' DO NOTHING", - """ + "INSERT INTO foo VALUE 1 ON CONFLICT WHERE not hk = 'a' DO NOTHING", + """ (dml (operations (dml_op_list (insert_value @@ -2431,12 +2437,13 @@ class SqlParserTest : SqlParserTestBase() { ) )) ) - """) + """ + ) @Test fun insertValueOnConflictExpr5Dml() = assertExpression( - "INSERT INTO foo VALUE 1 ON CONFLICT WHERE attribute_exists(hk) DO NOTHING", - """ + "INSERT INTO foo VALUE 1 ON CONFLICT WHERE attribute_exists(hk) DO NOTHING", + """ (dml (operations (dml_op_list (insert_value @@ -2450,12 +2457,13 @@ class SqlParserTest : SqlParserTestBase() { ) )) ) - """) + """ + ) @Test fun insertValueOnConflictExpr6Dml() = assertExpression( - "INSERT INTO foo VALUE 1 ON CONFLICT WHERE not attribute_exists(hk) DO NOTHING", - """ + "INSERT INTO foo VALUE 1 ON CONFLICT WHERE not attribute_exists(hk) DO NOTHING", + """ (dml (operations (dml_op_list (insert_value @@ -2469,7 +2477,8 @@ class SqlParserTest : SqlParserTestBase() { ) )) ) - """) + """ + ) @Test fun insertQueryDml() = assertExpression( @@ -2505,8 +2514,8 @@ class SqlParserTest : SqlParserTestBase() { @Test @Ignore fun insertQueryReturningDml() = assertExpression( - "INSERT INTO foo SELECT y FROM bar RETURNING ALL NEW foo", - """ + "INSERT INTO foo SELECT y FROM bar RETURNING ALL NEW foo", + """ (dml (operations (dml_op_list @@ -2754,7 +2763,8 @@ class SqlParserTest : SqlParserTestBase() { (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified))))) - """) + """ + ) @Test fun legacyUpdateComplexDml() = assertExpression( @@ -2988,7 +2998,8 @@ class SqlParserTest : SqlParserTestBase() { (from (scan (id x (case_insensitive) (unqualified)) null null null)) (where (eq (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))) ) - """) + """ + ) @Test fun fromMultipleRemoveReturningDml() = assertExpression( @@ -3413,8 +3424,8 @@ class SqlParserTest : SqlParserTestBase() { @Test fun updateWhereReturningPathDml() = assertExpression( - "UPDATE x SET k = 5, m = 6 WHERE a = b RETURNING MODIFIED OLD a.b", - """(dml + "UPDATE x SET k = 5, m = 6 WHERE a = b RETURNING MODIFIED OLD a.b", + """(dml (operations (dml_op_list (set (assignment (id k (case_insensitive) (unqualified)) (lit 5))) @@ -3514,8 +3525,8 @@ class SqlParserTest : SqlParserTestBase() { @Test fun deleteReturningDml() = assertExpression( - "DELETE FROM y RETURNING MODIFIED NEW a", - """ + "DELETE FROM y RETURNING MODIFIED NEW a", + """ (dml (operations (dml_op_list (delete))) (from (scan (id y (case_insensitive) (unqualified)) null null null)) @@ -3697,7 +3708,7 @@ class SqlParserTest : SqlParserTestBase() { ) // DDL - //**************************************** + // **************************************** @Test fun createTable() = assertExpression( "CREATE TABLE foo", @@ -3789,8 +3800,8 @@ class SqlParserTest : SqlParserTestBase() { @Test fun unionSelectPrecedence() = assertExpression( - "SELECT * FROM foo UNION SELECT * FROM bar", - """ + "SELECT * FROM foo UNION SELECT * FROM bar", + """ (union (select (project @@ -3805,7 +3816,7 @@ class SqlParserTest : SqlParserTestBase() { (from (id bar case_insensitive)))) """, - """ + """ (union (distinct) (select @@ -3864,9 +3875,9 @@ class SqlParserTest : SqlParserTestBase() { "(intersect (all) (id a (case_insensitive) (unqualified)) (id b (case_insensitive) (unqualified)))" ) - //**************************************** + // **************************************** // semicolon at end of sqlUnderTest - //**************************************** + // **************************************** @Test fun semicolonAtEndOfQuery() = assertExpression( "SELECT * FROM <<1>>;", @@ -3905,9 +3916,9 @@ class SqlParserTest : SqlParserTestBase() { assertEquals(withoutSemicolon, withSemicolon) } - //**************************************** + // **************************************** // LET clause parsing - //**************************************** + // **************************************** private val projectX = PartiqlAst.build { projectList(projectExpr(id("x"))) } @@ -3949,7 +3960,8 @@ class SqlParserTest : SqlParserTestBase() { @Test fun selectFromLetFunctionWithLiteralsTest() = assertExpression( - "SELECT x FROM table1 LET foo(42, 'bar') AS A") { + "SELECT x FROM table1 LET foo(42, 'bar') AS A" + ) { select( project = projectX, from = scan(id("table1")), @@ -3959,7 +3971,8 @@ class SqlParserTest : SqlParserTestBase() { @Test fun selectFromLetFunctionWithVariablesTest() = assertExpression( - "SELECT x FROM table1 LET foo(table1) AS A") { + "SELECT x FROM table1 LET foo(table1) AS A" + ) { select( project = projectX, from = scan(id("table1")), @@ -3967,9 +3980,9 @@ class SqlParserTest : SqlParserTestBase() { ) } - //**************************************** + // **************************************** // OFFSET clause parsing - //**************************************** + // **************************************** private fun buildProject(project: String) = PartiqlAst.build { projectList(projectExpr(id(project))) } @@ -4016,53 +4029,64 @@ class SqlParserTest : SqlParserTestBase() { ) } - //**************************************** + // **************************************** // EXEC clause parsing - //**************************************** + // **************************************** @Test fun execNoArgs() = assertExpression( - "EXEC foo") { + "EXEC foo" + ) { exec("foo", emptyList()) } @Test fun execOneStringArg() = assertExpression( - "EXEC foo 'bar'") { + "EXEC foo 'bar'" + ) { exec("foo", listOf(lit(ionString("bar")))) } @Test fun execOneIntArg() = assertExpression( - "EXEC foo 1") { + "EXEC foo 1" + ) { exec("foo", listOf(lit(ionInt(1)))) } @Test fun execMultipleArg() = assertExpression( - "EXEC foo 'bar0', `1d0`, 2, [3]") { + "EXEC foo 'bar0', `1d0`, 2, [3]" + ) { exec("foo", listOf(lit(ionString("bar0")), lit(ionDecimal(Decimal.valueOf(1))), lit(ionInt(2)), list(lit(ionInt(3))))) } @Test fun execWithMissing() = assertExpression( - "EXEC foo MISSING") { + "EXEC foo MISSING" + ) { exec("foo", listOf(missing())) } @Test fun execWithBag() = assertExpression( - "EXEC foo <<1>>") { + "EXEC foo <<1>>" + ) { exec("foo", listOf(bag(lit(ionInt(1))))) } @Test fun execWithSelectQuery() = assertExpression( - "EXEC foo SELECT baz FROM bar") { - exec("foo", listOf( - select( - project = projectList(projectExpr(id("baz"))), - from = scan(id("bar")) - ))) + "EXEC foo SELECT baz FROM bar" + ) { + exec( + "foo", + listOf( + select( + project = projectList(projectExpr(id("baz"))), + from = scan(id("bar")) + ) + ) + ) } @Test @@ -4085,7 +4109,8 @@ class SqlParserTest : SqlParserTestBase() { not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not not false - """) + """ + ) } val maxParseTime: Long = 5000 t.join(maxParseTime) @@ -4093,6 +4118,7 @@ class SqlParserTest : SqlParserTestBase() { assertTrue( "parsing many nested unary nots should take less than $maxParseTime", - System.currentTimeMillis() - startTime < maxParseTime) + System.currentTimeMillis() - startTime < maxParseTime + ) } } diff --git a/lang/test/org/partiql/lang/syntax/SqlParserTestBase.kt b/lang/test/org/partiql/lang/syntax/SqlParserTestBase.kt index c76d501882..e434a96e2c 100644 --- a/lang/test/org/partiql/lang/syntax/SqlParserTestBase.kt +++ b/lang/test/org/partiql/lang/syntax/SqlParserTestBase.kt @@ -18,6 +18,7 @@ import com.amazon.ion.IonSexp import com.amazon.ionelement.api.SexpElement import com.amazon.ionelement.api.toIonElement import com.amazon.ionelement.api.toIonValue +import org.partiql.lang.CUSTOM_TEST_TYPES import org.partiql.lang.TestBase import org.partiql.lang.ast.AstDeserializerBuilder import org.partiql.lang.ast.AstSerializer @@ -26,7 +27,6 @@ import org.partiql.lang.ast.ExprNode import org.partiql.lang.ast.passes.MetaStrippingRewriter import org.partiql.lang.ast.toAstStatement import org.partiql.lang.ast.toExprNode -import org.partiql.lang.CUSTOM_TEST_TYPES import org.partiql.lang.checkErrorAndErrorContext import org.partiql.lang.domains.PartiqlAst import org.partiql.lang.errors.ErrorCode @@ -126,14 +126,15 @@ abstract class SqlParserTestBase : TestBase() { assertEquals( "actual ExprNodes must match deserialized s-exp $astVersion AST", actualExprNode.stripMetas(), - deserializedExprNodeFromSexp.stripMetas()) + deserializedExprNodeFromSexp.stripMetas() + ) } /** * Converts the given PartiqlAst.Statement into an IonElement. If the given [statement] is a query, extracts * just the expr component to be compatible with the SqlParser tests. */ - private fun unwrapQuery(statement: PartiqlAst.Statement) : SexpElement { + private fun unwrapQuery(statement: PartiqlAst.Statement): SexpElement { return when (statement) { is PartiqlAst.Statement.Query -> statement.expr.toIonElement() is PartiqlAst.Statement.Dml, @@ -186,19 +187,19 @@ abstract class SqlParserTestBase : TestBase() { private fun loadIonSexp(expectedSexpAst: String) = ion.singleValue(expectedSexpAst).asIonSexp() private fun ExprNode.stripMetas() = MetaStrippingRewriter.stripMetas(this) - protected fun checkInputThrowingParserException(input: String, - errorCode: ErrorCode, - expectErrorContextValues: Map) { + protected fun checkInputThrowingParserException( + input: String, + errorCode: ErrorCode, + expectErrorContextValues: Map + ) { softAssert { try { parser.parseAstStatement(input) fail("Expected ParserException but there was no Exception") - } - catch (pex: ParserException) { + } catch (pex: ParserException) { checkErrorAndErrorContext(errorCode, pex, expectErrorContextValues) - } - catch (ex: Exception) { + } catch (ex: Exception) { fail("Expected ParserException but a different exception was thrown \n\t $ex") } } diff --git a/lang/test/org/partiql/lang/thread/EndlessTokenList.kt b/lang/test/org/partiql/lang/thread/EndlessTokenList.kt index 4c39140434..6bd8d860cc 100644 --- a/lang/test/org/partiql/lang/thread/EndlessTokenList.kt +++ b/lang/test/org/partiql/lang/thread/EndlessTokenList.kt @@ -18,7 +18,7 @@ internal class EndlessTokenList(val ion: IonSystem, val startIndex: Int = 0) : A override fun get(index: Int): Token { val span = SourceSpan(index / 80L, index % 80L, 1L) - return when((startIndex + index) % 2) { + return when ((startIndex + index) % 2) { 0 -> Token( type = TokenType.LITERAL, value = ion.newInt(startIndex + index), @@ -35,4 +35,4 @@ internal class EndlessTokenList(val ion: IonSystem, val startIndex: Int = 0) : A override fun subList(fromIndex: Int, toIndex: Int): List = EndlessTokenList(ion, startIndex + fromIndex) -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/thread/ThreadInterruptedTests.kt b/lang/test/org/partiql/lang/thread/ThreadInterruptedTests.kt index 084c83df94..a9528c5ed5 100644 --- a/lang/test/org/partiql/lang/thread/ThreadInterruptedTests.kt +++ b/lang/test/org/partiql/lang/thread/ThreadInterruptedTests.kt @@ -30,7 +30,6 @@ import org.partiql.lang.syntax.SqlParser import java.util.concurrent.atomic.AtomicBoolean import kotlin.concurrent.thread - /** How long (in miilis) to wait after starting a thread to set the interrupted flag. */ const val INTERRUPT_AFTER_MS: Long = 100 @@ -52,7 +51,7 @@ class ThreadInterruptedTests { private val reallyBigNAry = makeBigExprNode(20000000) private val bigNAry = makeBigExprNode(10000000) private val bigPartiqlAst = makeBigPartiqlAstExpr(10000000) - + private fun makeBigSexpAst() = makeBigExprNode(1000000).let { nary -> @Suppress("DEPRECATION") @@ -189,7 +188,7 @@ class ThreadInterruptedTests { @Test fun compilerPipeline() { val numSteps = 10000000 - var accumulator= 0L + var accumulator = 0L val pipeline = CompilerPipeline.build(ion) { repeat(numSteps) { @@ -211,7 +210,7 @@ class ThreadInterruptedTests { // At this point, there's a remote possibility that accumulator has overflowed to zero and the assertion // below might fail. This guarantees that it will always pass. - if(accumulator == 0L) { + if (accumulator == 0L) { accumulator = 1L } diff --git a/lang/test/org/partiql/lang/types/StaticTypeTests.kt b/lang/test/org/partiql/lang/types/StaticTypeTests.kt index 5d32a8a3e0..827543ed0a 100644 --- a/lang/test/org/partiql/lang/types/StaticTypeTests.kt +++ b/lang/test/org/partiql/lang/types/StaticTypeTests.kt @@ -9,19 +9,19 @@ import org.partiql.lang.ION import org.partiql.lang.eval.EvaluationSession import org.partiql.lang.eval.ExprValue import org.partiql.lang.types.StaticType.Companion.BLOB -import org.partiql.lang.types.StaticType.Companion.NULL -import org.partiql.lang.types.StaticType.Companion.MISSING +import org.partiql.lang.types.StaticType.Companion.BOOL +import org.partiql.lang.types.StaticType.Companion.CLOB +import org.partiql.lang.types.StaticType.Companion.DECIMAL +import org.partiql.lang.types.StaticType.Companion.FLOAT import org.partiql.lang.types.StaticType.Companion.INT import org.partiql.lang.types.StaticType.Companion.INT2 import org.partiql.lang.types.StaticType.Companion.INT4 import org.partiql.lang.types.StaticType.Companion.INT8 -import org.partiql.lang.types.StaticType.Companion.FLOAT -import org.partiql.lang.types.StaticType.Companion.DECIMAL -import org.partiql.lang.types.StaticType.Companion.TIMESTAMP -import org.partiql.lang.types.StaticType.Companion.BOOL -import org.partiql.lang.types.StaticType.Companion.CLOB +import org.partiql.lang.types.StaticType.Companion.MISSING +import org.partiql.lang.types.StaticType.Companion.NULL import org.partiql.lang.types.StaticType.Companion.STRING import org.partiql.lang.types.StaticType.Companion.SYMBOL +import org.partiql.lang.types.StaticType.Companion.TIMESTAMP import org.partiql.lang.util.ArgumentsProviderBase import java.math.BigInteger @@ -71,11 +71,11 @@ class StaticTypeTests { data class TestCase( val sqlValue: String, val staticType: StaticType, - val expectedIsInstanceResult: Boolean) + val expectedIsInstanceResult: Boolean + ) fun eval(sql: String): ExprValue = - CompilerPipeline.standard(ION).compile(sql).eval(EvaluationSession.standard()) - + CompilerPipeline.standard(ION).compile(sql).eval(EvaluationSession.standard()) @ParameterizedTest @ArgumentsSource(ScalarIsInstanceArguments::class) @@ -85,8 +85,10 @@ class StaticTypeTests { eval(tc.sqlValue) } - assertEquals(tc.expectedIsInstanceResult, tc.staticType.isInstance(exprValue), - "The result of StaticType.isInstance() should match the expected value for type ${tc.staticType} and \"${tc.sqlValue}\"") + assertEquals( + tc.expectedIsInstanceResult, tc.staticType.isInstance(exprValue), + "The result of StaticType.isInstance() should match the expected value for type ${tc.staticType} and \"${tc.sqlValue}\"" + ) } class ScalarIsInstanceArguments : ArgumentsProviderBase() { @@ -99,7 +101,6 @@ class StaticTypeTests { } } - @ParameterizedTest @ArgumentsSource(SequenceIsInstanceArguments::class) fun sequenceIsInstanceArgumentsTest(tc: TestCase) { @@ -108,8 +109,10 @@ class StaticTypeTests { eval(tc.sqlValue) } - assertEquals(tc.expectedIsInstanceResult, tc.staticType.isInstance(exprValue), - "The result of StaticType.isInstance() should match the expected value for type ${tc.staticType} and \"${tc.sqlValue}\"") + assertEquals( + tc.expectedIsInstanceResult, tc.staticType.isInstance(exprValue), + "The result of StaticType.isInstance() should match the expected value for type ${tc.staticType} and \"${tc.sqlValue}\"" + ) } class SequenceIsInstanceArguments : ArgumentsProviderBase() { @@ -191,8 +194,10 @@ class StaticTypeTests { eval(tc.sqlValue) } - assertEquals(tc.expectedIsInstanceResult, tc.staticType.isInstance(exprValue), - "The result of StaticType.isInstance() should match the expected value for type ${tc.staticType} and \"${tc.sqlValue}\"") + assertEquals( + tc.expectedIsInstanceResult, tc.staticType.isInstance(exprValue), + "The result of StaticType.isInstance() should match the expected value for type ${tc.staticType} and \"${tc.sqlValue}\"" + ) } class StructIsInstanceArguments : ArgumentsProviderBase() { @@ -211,73 +216,85 @@ class StaticTypeTests { TestCase( sqlValue = "{'foo': ${scalarInput1.sqlValue} }", staticType = closedContentStructType, - expectedIsInstanceResult = scalarInput1.expectedTypes.contains(staticType)), + expectedIsInstanceResult = scalarInput1.expectedTypes.contains(staticType) + ), // open content where all fields match TestCase( sqlValue = "{'foo': ${scalarInput1.sqlValue}, 'openContent': 'isAllowed' }", staticType = openContentStructType, - expectedIsInstanceResult = scalarInput1.expectedTypes.contains(staticType)), + expectedIsInstanceResult = scalarInput1.expectedTypes.contains(staticType) + ), // closed content with missing required field TestCase( sqlValue = "{ }", staticType = closedContentStructType, - expectedIsInstanceResult = false), + expectedIsInstanceResult = false + ), // closed content with non-matching field TestCase( sqlValue = "{'bar': ${scalarInput1.sqlValue} }", staticType = closedContentStructType, - expectedIsInstanceResult = false), + expectedIsInstanceResult = false + ), // closed content with optional field that is present TestCase( sqlValue = "{'foo': ${scalarInput1.sqlValue} }", staticType = closedContentWithOptionalField, - expectedIsInstanceResult = scalarInput1.expectedTypes.contains(staticType)), + expectedIsInstanceResult = scalarInput1.expectedTypes.contains(staticType) + ), // closed content with optional field that is not present TestCase( sqlValue = "{ }", staticType = closedContentWithOptionalField, - expectedIsInstanceResult = true), + expectedIsInstanceResult = true + ), // open content with missing required field TestCase( sqlValue = "{ 'openContent': 'isAllowed' }", staticType = openContentStructType, - expectedIsInstanceResult = false), + expectedIsInstanceResult = false + ), // open content with a non-matching field TestCase( sqlValue = "{'bar': ${scalarInput1.sqlValue}, 'openContent': 'isAllowed' }", staticType = openContentStructType, - expectedIsInstanceResult = false), + expectedIsInstanceResult = false + ), // open content with an non-matching field TestCase( sqlValue = "{'bar': ${scalarInput1.sqlValue}, 'openContent': 'isNotAllowed' }", staticType = openContentStructType, - expectedIsInstanceResult = false), + expectedIsInstanceResult = false + ), // duplicate struct fields with values of the same type TestCase( sqlValue = "{'foo': ${scalarInput1.sqlValue}, 'foo': ${scalarInput1.sqlValue} }", staticType = closedContentStructType, - expectedIsInstanceResult = scalarInput1.expectedTypes.contains(staticType))), - // Duplicate struct fields with values of different types. - // We generate one test case for every scalar value with every other scalar value and - // every type. - SCALARS.filterNot { it.sqlValue == "MISSING" }.map { scalarInput2 -> - TestCase( - sqlValue = "{'foo': ${scalarInput1.sqlValue}, 'foo': ${scalarInput2.sqlValue} }", - staticType = closedContentStructType, - // We expect the result to be `TRUE` only if the values of the foo and bar fields - // match the static type. - expectedIsInstanceResult = scalarInput1.expectedTypes.contains(staticType) && scalarInput2.expectedTypes.contains(staticType)) - - }).flatten() + expectedIsInstanceResult = scalarInput1.expectedTypes.contains(staticType) + ) + ), + // Duplicate struct fields with values of different types. + // We generate one test case for every scalar value with every other scalar value and + // every type. + SCALARS.filterNot { it.sqlValue == "MISSING" }.map { scalarInput2 -> + TestCase( + sqlValue = "{'foo': ${scalarInput1.sqlValue}, 'foo': ${scalarInput2.sqlValue} }", + staticType = closedContentStructType, + // We expect the result to be `TRUE` only if the values of the foo and bar fields + // match the static type. + expectedIsInstanceResult = scalarInput1.expectedTypes.contains(staticType) && scalarInput2.expectedTypes.contains(staticType) + ) + } + ).flatten() } } ).flatten() diff --git a/lang/test/org/partiql/lang/util/AssertJExtensions.kt b/lang/test/org/partiql/lang/util/AssertJExtensions.kt index faa7773f50..1258ec4ef5 100644 --- a/lang/test/org/partiql/lang/util/AssertJExtensions.kt +++ b/lang/test/org/partiql/lang/util/AssertJExtensions.kt @@ -16,4 +16,4 @@ package org.partiql.lang.util import org.assertj.core.api.SoftAssertions -internal fun softAssert(assertions: SoftAssertions.() -> Unit) = SoftAssertions().apply(assertions).assertAll() \ No newline at end of file +internal fun softAssert(assertions: SoftAssertions.() -> Unit) = SoftAssertions().apply(assertions).assertAll() diff --git a/lang/test/org/partiql/lang/util/AssertionHelpers.kt b/lang/test/org/partiql/lang/util/AssertionHelpers.kt index 53776bf561..051fd468dc 100644 --- a/lang/test/org/partiql/lang/util/AssertionHelpers.kt +++ b/lang/test/org/partiql/lang/util/AssertionHelpers.kt @@ -16,7 +16,7 @@ fun assertIonEquals(expectedResult: IonValue, actualResult: IonValue, message: S message?.let { print(it) } println() - println("expected: ${expectedResult}") + println("expected: $expectedResult") println("actual : ${actualResult}\n") println("expected (pretty):\n${expectedResult.toPrettyString().trim()}") diff --git a/lang/test/org/partiql/lang/util/AstExtensions.kt b/lang/test/org/partiql/lang/util/AstExtensions.kt index d3460bb8b0..093b3d97f4 100644 --- a/lang/test/org/partiql/lang/util/AstExtensions.kt +++ b/lang/test/org/partiql/lang/util/AstExtensions.kt @@ -32,7 +32,7 @@ internal fun IonSexp.mixIdentifierCase(): IonSexp { fun IonSymbol.cloneMixingCase(): IonSymbol { val mixedCase = stringValue()!!.foldIndexed("") { index, acc, c -> acc + when (index % 2 == 0) { - true -> c.toUpperCase() + true -> c.toUpperCase() false -> c.toLowerCase() } } @@ -41,14 +41,14 @@ internal fun IonSexp.mixIdentifierCase(): IonSexp { } fun IonSexp.isId(): Boolean = size == 2 && - this[0] is IonSymbol && - this[0].stringValue() == "id" + this[0] is IonSymbol && + this[0].stringValue() == "id" fun IonSexp.copyRewritingNodes(): IonSexp = foldIndexed(ion.newEmptySexp()) { index, newNode, element -> val rewritten = when { element is IonSymbol && this.isId() && index == 1 -> element.cloneMixingCase() - element is IonSexp -> element.copyRewritingNodes() - else -> element.clone() + element is IonSexp -> element.copyRewritingNodes() + else -> element.clone() } newNode.add(rewritten) diff --git a/lang/test/org/partiql/lang/util/BindingsExtensions.kt b/lang/test/org/partiql/lang/util/BindingsExtensions.kt index b502e44767..f57f2a80a7 100644 --- a/lang/test/org/partiql/lang/util/BindingsExtensions.kt +++ b/lang/test/org/partiql/lang/util/BindingsExtensions.kt @@ -5,7 +5,6 @@ import org.partiql.lang.eval.Bindings import org.partiql.lang.eval.ExprValue import org.partiql.lang.types.StaticType - /** * Derives a [Bindings] from a [Bindings]. * @@ -21,5 +20,3 @@ fun Bindings.toTypedBindings() = this.let { valuedBindings -> } } } - - diff --git a/lang/test/org/partiql/lang/util/CollectionsListTests.kt b/lang/test/org/partiql/lang/util/CollectionsListTests.kt index 78e0f3d65e..e0ff182cbe 100644 --- a/lang/test/org/partiql/lang/util/CollectionsListTests.kt +++ b/lang/test/org/partiql/lang/util/CollectionsListTests.kt @@ -33,9 +33,9 @@ import org.partiql.lang.TestBase class CollectionsListTests : TestBase() { - val isEven = { x: Int -> (x % 2) == 0} - var empty :List = listOf() + val isEven = { x: Int -> (x % 2) == 0 } + var empty: List = listOf() @Test fun forAllEmptyList() = assertTrue(empty.forAll(isEven)) @Test fun forAllTrue() = assertTrue(listOf(2, 4, 6).forAll(isEven)) @Test fun forAllFalse() = assertFalse(listOf(2, 3, 6).forAll(isEven)) -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/util/CompileOptionsExtensions.kt b/lang/test/org/partiql/lang/util/CompileOptionsExtensions.kt index de8e3853ec..a85fc91853 100644 --- a/lang/test/org/partiql/lang/util/CompileOptionsExtensions.kt +++ b/lang/test/org/partiql/lang/util/CompileOptionsExtensions.kt @@ -4,7 +4,6 @@ import org.partiql.lang.eval.CompileOptions import org.partiql.lang.eval.TypedOpBehavior import org.partiql.lang.eval.TypingMode - fun CompileOptions.Builder.legacyCastBehavior() { typedOpBehavior(TypedOpBehavior.LEGACY) } diff --git a/lang/test/org/partiql/lang/util/ConfigurableExprValueFormatterTest.kt b/lang/test/org/partiql/lang/util/ConfigurableExprValueFormatterTest.kt index 0bd1f9ff1e..bfae5f8dc8 100644 --- a/lang/test/org/partiql/lang/util/ConfigurableExprValueFormatterTest.kt +++ b/lang/test/org/partiql/lang/util/ConfigurableExprValueFormatterTest.kt @@ -13,7 +13,6 @@ import org.partiql.lang.syntax.SqlParser import kotlin.test.assertEquals import kotlin.test.assertTrue - @RunWith(JUnitParamsRunner::class) class ConfigurableExprValueFormatterTest { @@ -38,7 +37,8 @@ class ConfigurableExprValueFormatterTest { "MISSING" to "MISSING", "null" to "NULL", "NULL" to "NULL", - "`null`" to "NULL").map { listOf(it.first, it.second) } + "`null`" to "NULL" + ).map { listOf(it.first, it.second) } private fun baseExamples() = arrayOf( // Bool @@ -74,7 +74,8 @@ class ConfigurableExprValueFormatterTest { // TODO: This should succeed. // Check https://github.com/partiql/partiql-lang-kotlin/issues/386. // "DATE '2021-02-28'" to "DATE '2021-02-28'", - "{}" to "{}").map { listOf(it.first, it.second) } + "{}" to "{}" + ).map { listOf(it.first, it.second) } fun prettyExamples() = baseExamples() + arrayOf( // List @@ -109,7 +110,6 @@ class ConfigurableExprValueFormatterTest { | 3 |>>""".trimMargin(), - "<<1,2,<<1>> >>" to """ |<< | 1, @@ -158,7 +158,7 @@ class ConfigurableExprValueFormatterTest { | } |>> """.trimMargin() - ).map { listOf(it.first, it.second) } + ).map { listOf(it.first, it.second) } fun standardExamples() = baseExamples() + arrayOf( // List @@ -175,7 +175,7 @@ class ConfigurableExprValueFormatterTest { // Mixed containers "<<{'foo': 1, 'bar': [1, 2, 3], 'baz': {'books': <<>>}}>>" - ).map { listOf(it, it) } + ).map { listOf(it, it) } private fun assertFormatter(expression: String, expected: String, formatter: ConfigurableExprValueFormatter) { val value = evalQuery(expression) @@ -197,13 +197,13 @@ class ConfigurableExprValueFormatterTest { @Test @Parameters(method = "unknownExamples") - fun testPrettyUnknown(expression: String, expected: String) - = assertFormatterForUnknown(expression, expected, pretty) + fun testPrettyUnknown(expression: String, expected: String) = + assertFormatterForUnknown(expression, expected, pretty) @Test @Parameters(method = "unknownExamples") - fun testStandardUnknown(expression: String, expected: String) - = assertFormatterForUnknown(expression, expected, standard) + fun testStandardUnknown(expression: String, expected: String) = + assertFormatterForUnknown(expression, expected, standard) @Test @Parameters(method = "prettyExamples") diff --git a/lang/test/org/partiql/lang/util/CrossMap.kt b/lang/test/org/partiql/lang/util/CrossMap.kt index 87e27efd57..dfdfc2569b 100644 --- a/lang/test/org/partiql/lang/util/CrossMap.kt +++ b/lang/test/org/partiql/lang/util/CrossMap.kt @@ -16,4 +16,4 @@ fun crossMap(l1: List, l2: List, l3: List, block: (T block(v1, v2, v3) } }.flatten() - }.flatten() \ No newline at end of file + }.flatten() diff --git a/lang/test/org/partiql/lang/util/ErrorContextHelpers.kt b/lang/test/org/partiql/lang/util/ErrorContextHelpers.kt index c4fdadd6e2..c338db3ca9 100644 --- a/lang/test/org/partiql/lang/util/ErrorContextHelpers.kt +++ b/lang/test/org/partiql/lang/util/ErrorContextHelpers.kt @@ -2,8 +2,6 @@ package org.partiql.lang.util import org.partiql.lang.errors.Property - /** Returns a Map with the specified line & column number. */ internal fun sourceLocationProperties(lineNum: Long, colNum: Long): Map = mapOf(Property.LINE_NUMBER to lineNum, Property.COLUMN_NUMBER to colNum) - diff --git a/lang/test/org/partiql/lang/util/ExprValueFactoryExtensions.kt b/lang/test/org/partiql/lang/util/ExprValueFactoryExtensions.kt index 6264e6ca6e..52480c9a24 100644 --- a/lang/test/org/partiql/lang/util/ExprValueFactoryExtensions.kt +++ b/lang/test/org/partiql/lang/util/ExprValueFactoryExtensions.kt @@ -18,4 +18,4 @@ import org.partiql.lang.eval.ExprValue import org.partiql.lang.eval.ExprValueFactory internal fun ExprValueFactory.newFromIonText(ionText: String): ExprValue = - this.newFromIonValue(this.ion.singleValue(ionText)) \ No newline at end of file + this.newFromIonValue(this.ion.singleValue(ionText)) diff --git a/lang/test/org/partiql/lang/util/LongExtensionsTest.kt b/lang/test/org/partiql/lang/util/LongExtensionsTest.kt index 072438796f..e6ae58f5af 100644 --- a/lang/test/org/partiql/lang/util/LongExtensionsTest.kt +++ b/lang/test/org/partiql/lang/util/LongExtensionsTest.kt @@ -34,4 +34,4 @@ class LongExtensionsTest { val value = assertDoesNotThrow { 0L.toIntExact() } assertEquals(0, value) } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/util/NumbersTest.kt b/lang/test/org/partiql/lang/util/NumbersTest.kt index f1f0cbab6d..0385126672 100644 --- a/lang/test/org/partiql/lang/util/NumbersTest.kt +++ b/lang/test/org/partiql/lang/util/NumbersTest.kt @@ -42,7 +42,7 @@ class NumbersTest : TestBase() { @Test fun coerceBigDecimalLong() = assertCoerce(dec("1.1"), 2L, dec("1.1"), dec("2")) @Test fun coerceBigDecimalDouble() = assertCoerce(dec("1.1"), 2.0, dec("1.1"), dec(2.0)) - @Test fun unaryMinusLong() = assertEquals(-1L,-(1L as Number)) + @Test fun unaryMinusLong() = assertEquals(-1L, -(1L as Number)) @Test fun unaryMinusDouble() = assertEquals(-1.0, -(1.0 as Number)) @Test fun unaryMinusBigDecimal() = assertEquals(dec("-100.1"), -(dec("100.1") as Number)) diff --git a/lang/test/org/partiql/lang/util/SchemaHelpers.kt b/lang/test/org/partiql/lang/util/SchemaHelpers.kt index 2d9ba462a2..c1ced7a3d4 100644 --- a/lang/test/org/partiql/lang/util/SchemaHelpers.kt +++ b/lang/test/org/partiql/lang/util/SchemaHelpers.kt @@ -4,7 +4,6 @@ import com.amazon.ion.IonSystem import com.amazon.ionschema.IonSchemaSystemBuilder import org.partiql.lang.partiqlisl.getResourceAuthority - fun createPartiqlIonSchemaSystem(ion: IonSystem) = IonSchemaSystemBuilder.standard() .addAuthority(getResourceAuthority(ion)) .withIonSystem(ion) diff --git a/lang/test/org/partiql/lang/util/SexpAstPrettyPrinter.kt b/lang/test/org/partiql/lang/util/SexpAstPrettyPrinter.kt index 94fe3c5829..ea9228b7ee 100644 --- a/lang/test/org/partiql/lang/util/SexpAstPrettyPrinter.kt +++ b/lang/test/org/partiql/lang/util/SexpAstPrettyPrinter.kt @@ -55,18 +55,17 @@ class SexpAstPrettyPrinter(val builder: StringBuilder = StringBuilder()) { } private fun append(node: IonValue) { - when(node) { + when (node) { is IonSexp -> { builder.append('(') val tag = node.firstOrNull() - if(tag != null) { + if (tag != null) { val tagSymbol = tag as? IonSymbol if (tagSymbol == null) { builder.append(tag.toString()) builder.append(" /* <-- NOTE: first position of s-exp was not a symbol */") - } - else { + } else { builder.append(tagSymbol.stringValue()) if (dontIndent.contains(tagSymbol.stringValue())) { @@ -74,8 +73,7 @@ class SexpAstPrettyPrinter(val builder: StringBuilder = StringBuilder()) { builder.append(' ') append(it) } - } - else { + } else { if (node.size > 1) { nestLevel++ @@ -96,5 +94,4 @@ class SexpAstPrettyPrinter(val builder: StringBuilder = StringBuilder()) { } } } - -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/util/testdsl/ExprNodeTestCase.kt b/lang/test/org/partiql/lang/util/testdsl/ExprNodeTestCase.kt index 9262e7feae..db082a44cc 100644 --- a/lang/test/org/partiql/lang/util/testdsl/ExprNodeTestCase.kt +++ b/lang/test/org/partiql/lang/util/testdsl/ExprNodeTestCase.kt @@ -10,11 +10,11 @@ data class ExprNodeTestCase(val name: String, val expr: ExprNode) { override fun toString(): String = "$name - $expr" fun assertEquals(actual: ExprNode) { - if(expr != actual) { + if (expr != actual) { println("Failing test case \"${name}\"") - println("expected: ${expr}") + println("expected: $expr") println("actual : $actual") fail("Unexpected ExprNode AST for test: '$name', see console") } } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/util/testdsl/GroupBuilder.kt b/lang/test/org/partiql/lang/util/testdsl/GroupBuilder.kt index 6a60a9f599..c4149aaa16 100644 --- a/lang/test/org/partiql/lang/util/testdsl/GroupBuilder.kt +++ b/lang/test/org/partiql/lang/util/testdsl/GroupBuilder.kt @@ -33,9 +33,10 @@ class GroupBuilderImpl(private val groupName: String) : GroupBuilder { sqlUnderTest = sql, expectedIonResult = expected, compileOptions = compileOptions, - extraAssertions = extraAssertions)) - + extraAssertions = extraAssertions + ) + ) } fun build() = IonResultTestGroup(groupName, tests) -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/util/testdsl/IonResultTestCase.kt b/lang/test/org/partiql/lang/util/testdsl/IonResultTestCase.kt index 8ff0df9a5c..6228861a89 100644 --- a/lang/test/org/partiql/lang/util/testdsl/IonResultTestCase.kt +++ b/lang/test/org/partiql/lang/util/testdsl/IonResultTestCase.kt @@ -60,7 +60,6 @@ data class IonResultTestCase( assertDoesNotThrow("IonResultTestCase ${toString()} should not throw when parsing") { ExprNodeTestCase(name, SqlParser(ION).parseExprNode(sqlUnderTest)) } - } internal fun IonResultTestCase.runTestCase( @@ -83,12 +82,11 @@ internal fun IonResultTestCase.runTestCase( expectedIonResult?.let { ION.singleValue(it) } } - val modifiedCompileOptions = when(compileOptionsBlock) { + val modifiedCompileOptions = when (compileOptionsBlock) { null -> compileOptions else -> CompileOptions.build { compileOptionsBlock() } } - val pipeline = CompilerPipeline.build(ION) pipelineBlock@{ compileOptions(modifiedCompileOptions) pipelineBlock?.invoke(this) @@ -128,4 +126,4 @@ internal fun IonResultTestCase.runTestCase( } } } -} \ No newline at end of file +} diff --git a/lang/test/org/partiql/lang/util/testdsl/IonResultTestGroup.kt b/lang/test/org/partiql/lang/util/testdsl/IonResultTestGroup.kt index 202e67b905..bce17cb24f 100644 --- a/lang/test/org/partiql/lang/util/testdsl/IonResultTestGroup.kt +++ b/lang/test/org/partiql/lang/util/testdsl/IonResultTestGroup.kt @@ -1,4 +1,4 @@ package org.partiql.lang.util.testdsl /** Defines a group of related tests. */ -data class IonResultTestGroup(val name: String, val tests: List) \ No newline at end of file +data class IonResultTestGroup(val name: String, val tests: List) diff --git a/lang/test/org/partiql/lang/util/testdsl/IonResultTestSuite.kt b/lang/test/org/partiql/lang/util/testdsl/IonResultTestSuite.kt index 895d2feef3..9894f717b9 100644 --- a/lang/test/org/partiql/lang/util/testdsl/IonResultTestSuite.kt +++ b/lang/test/org/partiql/lang/util/testdsl/IonResultTestSuite.kt @@ -41,8 +41,8 @@ data class IonResultTestSuite( ): List { val allTests = groups.flatMap { category -> category.tests.map { it.copy(group = category.name) } } // find any names in the skip list that are not actual tests (to help keep the skip list clean and sane) - val invalidFailListNames = (failingTestNames.filter { failEntry -> allTests.none { it.name == failEntry }}) - if(invalidFailListNames.any()) { + val invalidFailListNames = (failingTestNames.filter { failEntry -> allTests.none { it.name == failEntry } }) + if (invalidFailListNames.any()) { println("The following failing test names entries do not match the name of any test:") invalidFailListNames.forEach { println("\"$it\"") @@ -50,7 +50,6 @@ data class IonResultTestSuite( fail("invalid failing test names found, see console") } - return allTests.map { it.copy(expectFailure = failingTestNames.contains(it.name)) } @@ -68,7 +67,6 @@ data class IonResultTestSuite( /** Instantiates an instance of [MockDb] with values and types instantiated using [globals]. */ fun mockDb(valueFactory: ExprValueFactory): MockDb = MockDb(globals, valueFactory, createPartiqlIonSchemaSystem(ION)) - } /** @@ -84,5 +82,3 @@ data class IonResultTestSuite( */ internal fun defineTestSuite(block: SuiteBuilder.() -> Unit): IonResultTestSuite = SuiteBuilderImpl().apply(block).build() - - diff --git a/lang/test/org/partiql/lang/util/testdsl/SuiteBuilder.kt b/lang/test/org/partiql/lang/util/testdsl/SuiteBuilder.kt index 2ed33ab436..78fb38aca9 100644 --- a/lang/test/org/partiql/lang/util/testdsl/SuiteBuilder.kt +++ b/lang/test/org/partiql/lang/util/testdsl/SuiteBuilder.kt @@ -27,10 +27,12 @@ class SuiteBuilderImpl : SuiteBuilder { } override infix fun String.hasVal(ionText: String) = - globals.put(this, - assertDoesNotThrow("Parsing global variable '${this}' should not throw") { + globals.put( + this, + assertDoesNotThrow("Parsing global variable '$this' should not throw") { ION.singleValue(ionText) - }) + } + ) override fun parameterFactory(block: (ExprValueFactory) -> List) { factoryBlock = block diff --git a/lang/test/org/partiql/lang/util/testdsl/TestDslMarker.kt b/lang/test/org/partiql/lang/util/testdsl/TestDslMarker.kt index 0c4616cbdb..3e0d8a26aa 100644 --- a/lang/test/org/partiql/lang/util/testdsl/TestDslMarker.kt +++ b/lang/test/org/partiql/lang/util/testdsl/TestDslMarker.kt @@ -6,4 +6,4 @@ package org.partiql.lang.util.testdsl * See Kotlin's documentation regarding [type safe builders](https://kotlinlang.org/docs/reference/type-safe-builders.html) */ @DslMarker -annotation class TestDslMarker \ No newline at end of file +annotation class TestDslMarker diff --git a/pts/test/org/partiql/lang/pts/PtsTest.kt b/pts/test/org/partiql/lang/pts/PtsTest.kt index af338dbc62..fa12cdb9eb 100644 --- a/pts/test/org/partiql/lang/pts/PtsTest.kt +++ b/pts/test/org/partiql/lang/pts/PtsTest.kt @@ -14,5 +14,4 @@ class PtsTest : Junit4PtsTest() { override fun getEvaluator(): Evaluator = PartiQlPtsEvaluator(PtsEquality.getDefault()) override fun getPtsFilePaths() = listOf("../testscript/pts/test-scripts") - -} \ No newline at end of file +} diff --git a/testscript/src/org/partiql/testscript/PtsException.kt b/testscript/src/org/partiql/testscript/PtsException.kt index dd4bf876c9..e48c5cae4e 100644 --- a/testscript/src/org/partiql/testscript/PtsException.kt +++ b/testscript/src/org/partiql/testscript/PtsException.kt @@ -4,8 +4,8 @@ import org.partiql.testscript.parser.ScriptLocation import java.lang.Exception private val ptsErrorComparator = Comparator.comparing { it.scriptLocation.inputName } - .thenBy { it.scriptLocation.lineNum } - .thenBy { it.message } + .thenBy { it.scriptLocation.lineNum } + .thenBy { it.message } data class PtsError(val scriptLocation: ScriptLocation, val message: String) : Comparable { override fun compareTo(other: PtsError): Int = ptsErrorComparator.compare(this, other) @@ -16,5 +16,3 @@ data class PtsError(val scriptLocation: ScriptLocation, val message: String) : C abstract class PtsException(val errors: List = listOf(), exception: Exception? = null) : RuntimeException(exception) { protected val formattedErrors = errors.sorted().joinToString("\n") { " $it" } } - - diff --git a/testscript/src/org/partiql/testscript/Result.kt b/testscript/src/org/partiql/testscript/Result.kt index 6f6f203867..d006bb0b7f 100644 --- a/testscript/src/org/partiql/testscript/Result.kt +++ b/testscript/src/org/partiql/testscript/Result.kt @@ -3,7 +3,7 @@ package org.partiql.testscript import org.partiql.testscript.parser.ScriptLocation /** - * Specialized either type to make it easier to accumulate failures when processing test scripts + * Specialized either type to make it easier to accumulate failures when processing test scripts */ internal sealed class Result @@ -24,23 +24,21 @@ internal abstract class TestScriptError { } /** - * Folds an iterable of results into a single [Result]. When there is at least a single failure it merges all + * Folds an iterable of results into a single [Result]. When there is at least a single failure it merges all * errors into a single Failure otherwise it uses `block` to aggregate the success values - * - * @param block lambda executed to group all successes into a single result. + * + * @param block lambda executed to group all successes into a single result. */ internal fun Iterable>.foldToResult(block: (List>) -> Result): Result { - + val (successes: List>, failures: List>) = this.partition { it is Success } - return if(failures.isEmpty()) { + return if (failures.isEmpty()) { block(successes.filterIsInstance>()) - } - else { + } else { Failure(failures.filterIsInstance>().flatMap { it.errors }) } - } internal fun Iterable>.foldToResult(): Result> { return this.foldToResult { successes -> Success(successes.map { it.value }) } -} \ No newline at end of file +} diff --git a/testscript/src/org/partiql/testscript/compiler/CompilerErrors.kt b/testscript/src/org/partiql/testscript/compiler/CompilerErrors.kt index 2aaa74bff1..b25ad6f322 100644 --- a/testscript/src/org/partiql/testscript/compiler/CompilerErrors.kt +++ b/testscript/src/org/partiql/testscript/compiler/CompilerErrors.kt @@ -5,31 +5,35 @@ import org.partiql.testscript.TestScriptError import org.partiql.testscript.parser.ScriptLocation internal sealed class CompilerErrors( - override val errorMessage: String, - override val scriptLocation: ScriptLocation) : TestScriptError() + override val errorMessage: String, + override val scriptLocation: ScriptLocation +) : TestScriptError() internal class TestIdNotUniqueError( - testId: String, - scriptLocation: ScriptLocation, - otherScriptLocation: ScriptLocation) - : CompilerErrors("testId: $testId not unique also found in: $otherScriptLocation", scriptLocation) + testId: String, + scriptLocation: ScriptLocation, + otherScriptLocation: ScriptLocation +) : + CompilerErrors("testId: $testId not unique also found in: $otherScriptLocation", scriptLocation) internal class NoTestMatchForAppendTestError( - pattern: String, - scriptLocation: ScriptLocation) - : CompilerErrors("No testId matched the pattern: $pattern", scriptLocation) + pattern: String, + scriptLocation: ScriptLocation +) : + CompilerErrors("No testId matched the pattern: $pattern", scriptLocation) internal class AppendingAppendedTestError( - testId: String, - otherLocation: ScriptLocation, - scriptLocation: ScriptLocation) - : CompilerErrors("testId: $testId was already appended on $otherLocation", scriptLocation) + testId: String, + otherLocation: ScriptLocation, + scriptLocation: ScriptLocation +) : + CompilerErrors("testId: $testId was already appended on $otherLocation", scriptLocation) -internal class FileSetDefaultEnvironmentNotSingleValue(path: String, scriptLocation: ScriptLocation) - : CompilerErrors("Environment file $path is not a single value", scriptLocation) +internal class FileSetDefaultEnvironmentNotSingleValue(path: String, scriptLocation: ScriptLocation) : + CompilerErrors("Environment file $path is not a single value", scriptLocation) -internal class FileSetDefaultEnvironmentNotExists(path: String, scriptLocation: ScriptLocation) - : CompilerErrors("Environment file $path does not exist", scriptLocation) +internal class FileSetDefaultEnvironmentNotExists(path: String, scriptLocation: ScriptLocation) : + CompilerErrors("Environment file $path does not exist", scriptLocation) -internal class FileSetDefaultEnvironmentNotStruct(path: String, actualType: IonType, scriptLocation: ScriptLocation) - : CompilerErrors("Environment file $path does not contain a STRUCT but a $actualType", scriptLocation) \ No newline at end of file +internal class FileSetDefaultEnvironmentNotStruct(path: String, actualType: IonType, scriptLocation: ScriptLocation) : + CompilerErrors("Environment file $path does not contain a STRUCT but a $actualType", scriptLocation) diff --git a/testscript/src/org/partiql/testscript/compiler/CompilerException.kt b/testscript/src/org/partiql/testscript/compiler/CompilerException.kt index 0da2ec7574..0e4c2dca93 100644 --- a/testscript/src/org/partiql/testscript/compiler/CompilerException.kt +++ b/testscript/src/org/partiql/testscript/compiler/CompilerException.kt @@ -6,4 +6,3 @@ import org.partiql.testscript.PtsException class CompilerException(errors: List) : PtsException(errors) { override val message: String = "Errors found when compiling test scripts:\n$formattedErrors" } - diff --git a/testscript/src/org/partiql/testscript/evaluator/TestResult.kt b/testscript/src/org/partiql/testscript/evaluator/TestResult.kt index e046601646..5672c66733 100644 --- a/testscript/src/org/partiql/testscript/evaluator/TestResult.kt +++ b/testscript/src/org/partiql/testscript/evaluator/TestResult.kt @@ -23,9 +23,10 @@ data class TestResultSuccess(override val test: TestScriptExpression) : TestResu * @param reason failure reason */ data class TestFailure( - override val test: TestScriptExpression, - val actualResult: String, - val reason: FailureReason) : TestResult() { + override val test: TestScriptExpression, + val actualResult: String, + val reason: FailureReason +) : TestResult() { enum class FailureReason { /** Evaluation threw an error when none was expected */ diff --git a/testscript/src/org/partiql/testscript/extensions/FileExtensions.kt b/testscript/src/org/partiql/testscript/extensions/FileExtensions.kt index f1a46595cc..047728dac1 100644 --- a/testscript/src/org/partiql/testscript/extensions/FileExtensions.kt +++ b/testscript/src/org/partiql/testscript/extensions/FileExtensions.kt @@ -15,4 +15,4 @@ internal fun File.listRecursive(filter: FileFilter = FileFilter { true }): List< this.isDirectory -> this.listFiles(filter).flatMap { f -> f.listRecursive(filter) } this.isFile -> listOf(this) else -> throw IllegalArgumentException("couldn't read '${this.path}'. It's neither a file nor a directory") -} \ No newline at end of file +} diff --git a/testscript/src/org/partiql/testscript/extensions/IonExtensions.kt b/testscript/src/org/partiql/testscript/extensions/IonExtensions.kt index ef9f5cf267..bfe4e8c0bb 100644 --- a/testscript/src/org/partiql/testscript/extensions/IonExtensions.kt +++ b/testscript/src/org/partiql/testscript/extensions/IonExtensions.kt @@ -3,8 +3,8 @@ package org.partiql.testscript.extensions import com.amazon.ion.IonValue internal fun IonValue.toIonText(): String { - val sb = StringBuilder() + val sb = StringBuilder() this.system.newTextWriter(sb).use { w -> this.writeTo(w) } - + return sb.toString() -} \ No newline at end of file +} diff --git a/testscript/src/org/partiql/testscript/parser/Ion.kt b/testscript/src/org/partiql/testscript/parser/Ion.kt index 9052230f6c..cf1d61a50c 100644 --- a/testscript/src/org/partiql/testscript/parser/Ion.kt +++ b/testscript/src/org/partiql/testscript/parser/Ion.kt @@ -9,9 +9,11 @@ internal data class IonValueWithLocation(val ionValue: IonValue, val scriptLocat /** * A decorated IonReader specialized for the [Parser] */ -internal class IonInputReader(val inputName: String, - private val ion: IonSystem, - private val reader: IonReader) : IonReader by reader { +internal class IonInputReader( + val inputName: String, + private val ion: IonSystem, + private val reader: IonReader +) : IonReader by reader { /** * SpanProvider does not provide accurate line numbers for structs, see https://github.com/amzn/ion-java/issues/226 @@ -39,10 +41,10 @@ internal class IonInputReader(val inputName: String, fun ionValueWithLocation(): IonValueWithLocation { val location = currentScriptLocation() - + return IonValueWithLocation(ion.newValue(reader), location) } - + fun stepIn(block: (Sequence) -> Unit) { this.stepIn() block(this.asSequence()) @@ -50,28 +52,28 @@ internal class IonInputReader(val inputName: String, } fun asSequence(): Sequence = Sequence { - object: Iterator { + object : Iterator { var nextCalled = false var hasNext = false - + private fun handleHasNext() { - if(!nextCalled) { + if (!nextCalled) { hasNext = this@IonInputReader.next() != null nextCalled = true } } - + override fun next(): IonInputReader { handleHasNext() nextCalled = false - + return this@IonInputReader } override fun hasNext(): Boolean { handleHasNext() - + return hasNext } } diff --git a/testscript/src/org/partiql/testscript/parser/NamedInputStream.kt b/testscript/src/org/partiql/testscript/parser/NamedInputStream.kt index ac2f683148..d9f17ae916 100644 --- a/testscript/src/org/partiql/testscript/parser/NamedInputStream.kt +++ b/testscript/src/org/partiql/testscript/parser/NamedInputStream.kt @@ -3,8 +3,8 @@ package org.partiql.testscript.parser import java.io.InputStream /** - * A named [InputStream] + * A named [InputStream] */ -class NamedInputStream(val name: String, private val inputStream: InputStream) : InputStream(){ +class NamedInputStream(val name: String, private val inputStream: InputStream) : InputStream() { override fun read(): Int = inputStream.read() -} \ No newline at end of file +} diff --git a/testscript/src/org/partiql/testscript/parser/ParserError.kt b/testscript/src/org/partiql/testscript/parser/ParserError.kt index d22ebbccde..ac8db9b040 100644 --- a/testscript/src/org/partiql/testscript/parser/ParserError.kt +++ b/testscript/src/org/partiql/testscript/parser/ParserError.kt @@ -1,60 +1,61 @@ package org.partiql.testscript.parser import com.amazon.ion.IonType -import org.partiql.testscript.PtsError import org.partiql.testscript.TestScriptError internal sealed class ParserError( - override val errorMessage: String, - override val scriptLocation: ScriptLocation) : TestScriptError() + override val errorMessage: String, + override val scriptLocation: ScriptLocation +) : TestScriptError() internal class EmptyError(valuePath: String, scriptLocation: ScriptLocation) : - ParserError("Field must have at least one element: $valuePath", scriptLocation) - + ParserError("Field must have at least one element: $valuePath", scriptLocation) internal class InvalidNumberOfAnnotationsError(numberOfAnnotations: Int, scriptLocation: ScriptLocation) : - ParserError("Wrong number of annotations. Expected 1 got: $numberOfAnnotations", scriptLocation) + ParserError("Wrong number of annotations. Expected 1 got: $numberOfAnnotations", scriptLocation) internal class UnknownFunctionError(functionName: String, scriptLocation: ScriptLocation) : - ParserError("Unknown PTS function: $functionName", scriptLocation) + ParserError("Unknown PTS function: $functionName", scriptLocation) internal class DuplicatedFieldError(valuePath: String, scriptLocation: ScriptLocation) : - ParserError("DuplicatedField: $valuePath", scriptLocation) - + ParserError("DuplicatedField: $valuePath", scriptLocation) internal class MissingRequiredError(valuePath: String, scriptLocation: ScriptLocation) : - ParserError("Missing required field: $valuePath", scriptLocation) + ParserError("Missing required field: $valuePath", scriptLocation) internal class UnexpectedIonTypeError( + valuePath: String, + expected: List, + actual: IonType, + scriptLocation: ScriptLocation +) : + ParserError("Wrong type for $valuePath. Expected $expected, got $actual", scriptLocation) { + + constructor( valuePath: String, - expected: List, + expected: IonType, actual: IonType, - scriptLocation: ScriptLocation) : - ParserError("Wrong type for $valuePath. Expected $expected, got $actual", scriptLocation) { - - constructor(valuePath: String, - expected: IonType, - actual: IonType, - scriptLocation: ScriptLocation) : this(valuePath, listOf(expected), actual, scriptLocation) + scriptLocation: ScriptLocation + ) : this(valuePath, listOf(expected), actual, scriptLocation) } internal class InvalidTemplateValueError(valuePath: String, scriptLocation: ScriptLocation) : - ParserError( - "Invalid template value for field: $valuePath. Must start with '$' when it's a SYMBOL", - scriptLocation) + ParserError( + "Invalid template value for field: $valuePath. Must start with '$' when it's a SYMBOL", + scriptLocation + ) internal class UnexpectedFieldError(valuePath: String, scriptLocation: ScriptLocation) : - ParserError("Unexpected field: $valuePath", scriptLocation) + ParserError("Unexpected field: $valuePath", scriptLocation) internal class MissingTemplateVariableError(variable: String, scriptLocation: ScriptLocation) : - ParserError("Missing template variable: $variable", scriptLocation) + ParserError("Missing template variable: $variable", scriptLocation) internal class InvalidExpectedTagError(valuePath: String, tag: String, scriptLocation: ScriptLocation) : - ParserError("Invalid $valuePath tag, must be either 'success' or 'error' got '$tag'", scriptLocation) + ParserError("Invalid $valuePath tag, must be either 'success' or 'error' got '$tag'", scriptLocation) internal class InvalidExpectedErrorSizeError(valuePath: String, scriptLocation: ScriptLocation) : - ParserError("$valuePath error can only have a single element, e.g. (error)", scriptLocation) + ParserError("$valuePath error can only have a single element, e.g. (error)", scriptLocation) internal class InvalidExpectedSuccessSizeError(valuePath: String, scriptLocation: ScriptLocation) : - ParserError("$valuePath success must have two elements, e.g. (success (bag {a: 1}))", scriptLocation) - + ParserError("$valuePath success must have two elements, e.g. (success (bag {a: 1}))", scriptLocation) diff --git a/testscript/src/org/partiql/testscript/parser/ParserException.kt b/testscript/src/org/partiql/testscript/parser/ParserException.kt index 5d08dced8c..508757bd2a 100644 --- a/testscript/src/org/partiql/testscript/parser/ParserException.kt +++ b/testscript/src/org/partiql/testscript/parser/ParserException.kt @@ -10,4 +10,4 @@ class ParserException(errors: List) : PtsException(errors) { class ParserIonException(filePath: String, e: IonException) : PtsException(exception = e) { override val message: String = "IonException on file $filePath: ${e.message}" -} \ No newline at end of file +} diff --git a/testscript/src/org/partiql/testscript/parser/ScriptLocation.kt b/testscript/src/org/partiql/testscript/parser/ScriptLocation.kt index 30c54f5a51..a41e68fe76 100644 --- a/testscript/src/org/partiql/testscript/parser/ScriptLocation.kt +++ b/testscript/src/org/partiql/testscript/parser/ScriptLocation.kt @@ -4,4 +4,4 @@ data class ScriptLocation(val inputName: String, val lineNum: Long) { override fun toString(): String { return "$inputName:$lineNum" } -} \ No newline at end of file +} diff --git a/testscript/src/org/partiql/testscript/parser/ast/AstNode.kt b/testscript/src/org/partiql/testscript/parser/ast/AstNode.kt index 7e7cb708d0..ec51ce6037 100644 --- a/testscript/src/org/partiql/testscript/parser/ast/AstNode.kt +++ b/testscript/src/org/partiql/testscript/parser/ast/AstNode.kt @@ -10,23 +10,31 @@ sealed class AstNode { data class ModuleNode(val nodes: List, override val scriptLocation: ScriptLocation) : AstNode() -data class TestNode(val id: String, - val description: String?, - val statement: String, - val environment: IonStruct?, - val expected: IonSexp, - override val scriptLocation: ScriptLocation) : AstNode() +data class TestNode( + val id: String, + val description: String?, + val statement: String, + val environment: IonStruct?, + val expected: IonSexp, + override val scriptLocation: ScriptLocation +) : AstNode() sealed class SetDefaultEnvironmentNode : AstNode() -data class FileSetDefaultEnvironmentNode(val environmentRelativeFilePath: String, - override val scriptLocation: ScriptLocation) : SetDefaultEnvironmentNode() +data class FileSetDefaultEnvironmentNode( + val environmentRelativeFilePath: String, + override val scriptLocation: ScriptLocation +) : SetDefaultEnvironmentNode() -data class InlineSetDefaultEnvironmentNode(val environment: IonStruct, - override val scriptLocation: ScriptLocation) : SetDefaultEnvironmentNode() +data class InlineSetDefaultEnvironmentNode( + val environment: IonStruct, + override val scriptLocation: ScriptLocation +) : SetDefaultEnvironmentNode() data class SkipListNode(val patterns: List, override val scriptLocation: ScriptLocation) : AstNode() -data class AppendTestNode(val pattern: String, - val additionalData: IonStruct, - override val scriptLocation: ScriptLocation) : AstNode() \ No newline at end of file +data class AppendTestNode( + val pattern: String, + val additionalData: IonStruct, + override val scriptLocation: ScriptLocation +) : AstNode() diff --git a/testscript/src/org/partiql/testscript/parser/ast/MacroNodes.kt b/testscript/src/org/partiql/testscript/parser/ast/MacroNodes.kt index 2422342d95..93b29512e0 100644 --- a/testscript/src/org/partiql/testscript/parser/ast/MacroNodes.kt +++ b/testscript/src/org/partiql/testscript/parser/ast/MacroNodes.kt @@ -6,9 +6,11 @@ import org.partiql.testscript.parser.ScriptLocation internal data class VariableSet(val variables: IonStruct, val scriptLocation: ScriptLocation) -internal data class TestTemplate(val id: String, - val description: IonValue?, - val statement: IonValue, - val environment: IonValue?, - val expected: IonValue, - val scriptLocation: ScriptLocation) +internal data class TestTemplate( + val id: String, + val description: IonValue?, + val statement: IonValue, + val environment: IonValue?, + val expected: IonValue, + val scriptLocation: ScriptLocation +) diff --git a/testscript/src/org/partiql/testscript/parser/ast/builders/AppendTestBuilder.kt b/testscript/src/org/partiql/testscript/parser/ast/builders/AppendTestBuilder.kt index 688cbbcb67..783ff4f6e3 100644 --- a/testscript/src/org/partiql/testscript/parser/ast/builders/AppendTestBuilder.kt +++ b/testscript/src/org/partiql/testscript/parser/ast/builders/AppendTestBuilder.kt @@ -7,7 +7,6 @@ import org.partiql.testscript.Failure import org.partiql.testscript.Result import org.partiql.testscript.Success import org.partiql.testscript.parser.ScriptLocation -import org.partiql.testscript.parser.UnexpectedFieldError import org.partiql.testscript.parser.ast.AppendTestNode internal class AppendTestBuilder(location: ScriptLocation) : StructBuilder("append_test", location) { @@ -25,12 +24,15 @@ internal class AppendTestBuilder(location: ScriptLocation) : StructBuilder) { val inputs = File("integration-test2/test-scripts") - .listRecursive(ptsFileFilter) - .map { file -> NamedInputStream(file.absolutePath, FileInputStream(file)) } + .listRecursive(ptsFileFilter) + .map { file -> NamedInputStream(file.absolutePath, FileInputStream(file)) } val ast = ptsParser.parse(inputs) ptsCompiler.compile(ast) diff --git a/testscript/test/org/partiql/testscript/compiler/CompilerTest.kt b/testscript/test/org/partiql/testscript/compiler/CompilerTest.kt index 56b2cc64a1..ae7c91946c 100644 --- a/testscript/test/org/partiql/testscript/compiler/CompilerTest.kt +++ b/testscript/test/org/partiql/testscript/compiler/CompilerTest.kt @@ -16,13 +16,13 @@ class CompilerTest { * We use `#` instead of `$` in test fixtures because escaping `$` in a kotlin * multiline string is messy, e.g. `"""${"$"}"""` results in `"$"` */ - + private val ion = IonSystemBuilder.standard().build() private val emptyStruct = ion.newEmptyStruct().apply { makeReadOnly() } private val parser = Parser(ion) private val compiler = Compiler(ion) - + private fun String.toStruct() = ion.singleValue(this) as IonStruct private fun assertCompile(vararg scripts: String, expected: List) { @@ -47,25 +47,29 @@ class CompilerTest { } @Test - fun singleTest() = assertCompile(""" + fun singleTest() = assertCompile( + """ |test::{ | id: test1, | statement: "SELECT * FROM <<1,2,3>>", | expected: (success 1) |} """.trimMargin(), - expected = listOf( - TestExpression( - id = "test1", - description = null, - statement = "SELECT * FROM <<1,2,3>>", - environment = ion.newEmptyStruct(), - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) - ))) + expected = listOf( + TestExpression( + id = "test1", + description = null, + statement = "SELECT * FROM <<1,2,3>>", + environment = ion.newEmptyStruct(), + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ) + ) + ) @Test - fun multipleTests() = assertCompile(""" + fun multipleTests() = assertCompile( + """ |test::{ | id: test1, | statement: "SELECT * FROM <<1,2,3>>", @@ -79,25 +83,29 @@ class CompilerTest { | expected: (success 2) |} """.trimMargin(), - expected = listOf( - TestExpression( - id = "test1", - description = null, - statement = "SELECT * FROM <<1,2,3>>", - environment = ion.newEmptyStruct(), - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1)), - TestExpression( - id = "test2", - description = "second test", - statement = "SELECT * FROM {}", - environment = ion.newEmptyStruct(), - expected = ExpectedSuccess(ion.newInt(2)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 7) - ))) + expected = listOf( + TestExpression( + id = "test1", + description = null, + statement = "SELECT * FROM <<1,2,3>>", + environment = ion.newEmptyStruct(), + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ), + TestExpression( + id = "test2", + description = "second test", + statement = "SELECT * FROM {}", + environment = ion.newEmptyStruct(), + expected = ExpectedSuccess(ion.newInt(2)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 7) + ) + ) + ) @Test - fun multipleTestsWithSameId() = assertCompileError(""" + fun multipleTestsWithSameId() = assertCompileError( + """ |test::{ | id: test1, | statement: "SELECT * FROM {}", @@ -110,13 +118,15 @@ class CompilerTest { | expected: (success 1) |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when compiling test scripts: | $inputBasePath/input[0].sqlts:7 - testId: test1 not unique also found in: $inputBasePath/input[0].sqlts:1 - """.trimMargin()) + """.trimMargin() + ) @Test - fun singleTestWithEnvironment() = assertCompile(""" + fun singleTestWithEnvironment() = assertCompile( + """ |test::{ | id: test1, | statement: "SELECT * FROM <<1,2,3>>", @@ -124,18 +134,21 @@ class CompilerTest { | expected: (success 1) |} """.trimMargin(), - expected = listOf( - TestExpression( - id = "test1", - description = null, - statement = "SELECT * FROM <<1,2,3>>", - environment = "{a: 12}".toStruct(), - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) - ))) + expected = listOf( + TestExpression( + id = "test1", + description = null, + statement = "SELECT * FROM <<1,2,3>>", + environment = "{a: 12}".toStruct(), + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ) + ) + ) @Test - fun setDefaultThenTest() = assertCompile(""" + fun setDefaultThenTest() = assertCompile( + """ |set_default_environment::{a: 12} | |test::{ @@ -144,18 +157,21 @@ class CompilerTest { | expected: (success 1) |} """.trimMargin(), - expected = listOf( - TestExpression( - id = "test1", - description = null, - statement = "SELECT * FROM <<1,2,3>>", - environment = "{a: 12}".toStruct(), - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 3) - ))) + expected = listOf( + TestExpression( + id = "test1", + description = null, + statement = "SELECT * FROM <<1,2,3>>", + environment = "{a: 12}".toStruct(), + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 3) + ) + ) + ) @Test - fun setDefaultFromFileTest() = assertCompile(""" + fun setDefaultFromFileTest() = assertCompile( + """ |set_default_environment::"environment.ion" | |test::{ @@ -164,42 +180,54 @@ class CompilerTest { | expected: (success 1) |} """.trimMargin(), - expected = listOf( - TestExpression( - id = "test1", - description = null, - statement = "SELECT * FROM <<1,2,3>>", - environment = "{b: 99}".toStruct(), - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 3) - ))) + expected = listOf( + TestExpression( + id = "test1", + description = null, + statement = "SELECT * FROM <<1,2,3>>", + environment = "{b: 99}".toStruct(), + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 3) + ) + ) + ) @Test - fun setDefaultFromFileNotExistsTest() = assertCompileError(""" + fun setDefaultFromFileNotExistsTest() = assertCompileError( + """ |set_default_environment::"dont_exist.ion" - """.trimMargin(), """ + """.trimMargin(), + """ |Errors found when compiling test scripts: | $inputBasePath/input[0].sqlts:1 - Environment file $inputBasePath/dont_exist.ion does not exist - """.trimMargin()) + """.trimMargin() + ) @Test - fun setDefaultFromFileMultipleValuesTest() = assertCompileError(""" + fun setDefaultFromFileMultipleValuesTest() = assertCompileError( + """ |set_default_environment::"multiple_values.ion" - """.trimMargin(), """ + """.trimMargin(), + """ |Errors found when compiling test scripts: | $inputBasePath/input[0].sqlts:1 - Environment file $inputBasePath/multiple_values.ion is not a single value - """.trimMargin()) + """.trimMargin() + ) @Test - fun setDefaultFromFileNotStructTest() = assertCompileError(""" + fun setDefaultFromFileNotStructTest() = assertCompileError( + """ |set_default_environment::"not_struct.ion" - """.trimMargin(), """ + """.trimMargin(), + """ |Errors found when compiling test scripts: | $inputBasePath/input[0].sqlts:1 - Environment file $inputBasePath/not_struct.ion does not contain a STRUCT but a STRING - """.trimMargin()) + """.trimMargin() + ) @Test - fun setDefaultOnlyAffectsSubsequentTests() = assertCompile(""" + fun setDefaultOnlyAffectsSubsequentTests() = assertCompile( + """ |test::{ | id: test1, | statement: "SELECT * FROM <<1,2,3>>", @@ -214,26 +242,29 @@ class CompilerTest { | expected: (success 1) |} """.trimMargin(), - expected = listOf( - TestExpression( - id = "test1", - description = null, - statement = "SELECT * FROM <<1,2,3>>", - environment = emptyStruct, - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) - ), - TestExpression( - id = "test2", - description = null, - statement = "SELECT * FROM <<1,2,3>>", - environment = "{a: 12}".toStruct(), - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 9) - ))) + expected = listOf( + TestExpression( + id = "test1", + description = null, + statement = "SELECT * FROM <<1,2,3>>", + environment = emptyStruct, + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ), + TestExpression( + id = "test2", + description = null, + statement = "SELECT * FROM <<1,2,3>>", + environment = "{a: 12}".toStruct(), + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 9) + ) + ) + ) @Test - fun setDefaultResetsForNextModule() = assertCompile(""" + fun setDefaultResetsForNextModule() = assertCompile( + """ |set_default_environment::{a: 12} | |test::{ @@ -243,7 +274,7 @@ class CompilerTest { |} """.trimMargin(), - """ + """ |test::{ | id: test2, | statement: "SELECT * FROM <<1,2,3>>", @@ -251,26 +282,29 @@ class CompilerTest { |} """.trimMargin(), - expected = listOf( - TestExpression( - id = "test1", - description = null, - statement = "SELECT * FROM <<1,2,3>>", - environment = "{a: 12}".toStruct(), - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 3) - ), - TestExpression( - id = "test2", - description = null, - statement = "SELECT * FROM <<1,2,3>>", - environment = emptyStruct, - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[1].sqlts", 1) - ))) + expected = listOf( + TestExpression( + id = "test1", + description = null, + statement = "SELECT * FROM <<1,2,3>>", + environment = "{a: 12}".toStruct(), + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 3) + ), + TestExpression( + id = "test2", + description = null, + statement = "SELECT * FROM <<1,2,3>>", + environment = emptyStruct, + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[1].sqlts", 1) + ) + ) + ) @Test - fun testEnvironmentWinsOverDefault() = assertCompile(""" + fun testEnvironmentWinsOverDefault() = assertCompile( + """ |set_default_environment::{a: 12} | |test::{ @@ -280,18 +314,21 @@ class CompilerTest { | expected: (success 1) |} """.trimMargin(), - expected = listOf( - TestExpression( - id = "test1", - description = null, - statement = "SELECT * FROM <<1,2,3>>", - environment = "{foo: 20}".toStruct(), - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 3) - ))) + expected = listOf( + TestExpression( + id = "test1", + description = null, + statement = "SELECT * FROM <<1,2,3>>", + environment = "{foo: 20}".toStruct(), + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 3) + ) + ) + ) @Test - fun skipList() = assertCompile(""" + fun skipList() = assertCompile( + """ |test::{ | id: test1, | statement: "SELECT * FROM <<1,2,3>>", @@ -300,21 +337,25 @@ class CompilerTest { | |skip_list::["test1"] """.trimMargin(), - expected = listOf( - SkippedTestExpression( - id = "test1", - original = TestExpression( - id = "test1", - description = null, - statement = "SELECT * FROM <<1,2,3>>", - environment = emptyStruct, - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) - ), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 7)))) + expected = listOf( + SkippedTestExpression( + id = "test1", + original = TestExpression( + id = "test1", + description = null, + statement = "SELECT * FROM <<1,2,3>>", + environment = emptyStruct, + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 7) + ) + ) + ) @Test - fun skipListMultipleTestsSkipSingle() = assertCompile(""" + fun skipListMultipleTestsSkipSingle() = assertCompile( + """ |test::{ | id: test1, | statement: "SELECT * FROM <<1,2,3>>", @@ -329,29 +370,34 @@ class CompilerTest { | |skip_list::["test1"] """.trimMargin(), - expected = listOf( - TestExpression( - id = "test2", - description = null, - statement = "SELECT * FROM <<1,2,3>>", - environment = emptyStruct, - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 7)), - - SkippedTestExpression( - id = "test1", - original = TestExpression( - id = "test1", - description = null, - statement = "SELECT * FROM <<1,2,3>>", - environment = emptyStruct, - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) - ), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 13)))) + expected = listOf( + TestExpression( + id = "test2", + description = null, + statement = "SELECT * FROM <<1,2,3>>", + environment = emptyStruct, + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 7) + ), + + SkippedTestExpression( + id = "test1", + original = TestExpression( + id = "test1", + description = null, + statement = "SELECT * FROM <<1,2,3>>", + environment = emptyStruct, + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 13) + ) + ) + ) @Test - fun skipListMultipleTestsSkipAll() = assertCompile(""" + fun skipListMultipleTestsSkipAll() = assertCompile( + """ |test::{ | id: test1, | statement: "SELECT * FROM 1", @@ -366,32 +412,38 @@ class CompilerTest { | |skip_list::[".*"] """.trimMargin(), - expected = listOf( - SkippedTestExpression( - id = "test1", - original = TestExpression( - id = "test1", - description = null, - statement = "SELECT * FROM 1", - environment = emptyStruct, - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 13)), - - SkippedTestExpression( - id = "test2", - original = TestExpression( - id = "test2", - description = null, - statement = "SELECT * FROM 2", - environment = emptyStruct, - expected = ExpectedSuccess(ion.newInt(2)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 7) - ), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 13)))) + expected = listOf( + SkippedTestExpression( + id = "test1", + original = TestExpression( + id = "test1", + description = null, + statement = "SELECT * FROM 1", + environment = emptyStruct, + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 13) + ), + + SkippedTestExpression( + id = "test2", + original = TestExpression( + id = "test2", + description = null, + statement = "SELECT * FROM 2", + environment = emptyStruct, + expected = ExpectedSuccess(ion.newInt(2)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 7) + ), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 13) + ) + ) + ) @Test - fun skipListBeforeTest() = assertCompile(""" + fun skipListBeforeTest() = assertCompile( + """ |skip_list::[".*"] | |test::{ @@ -400,20 +452,25 @@ class CompilerTest { | expected: (success 1) |} """.trimMargin(), - expected = listOf( - SkippedTestExpression( - id = "test1", - original = TestExpression( - id = "test1", - description = null, - statement = "SELECT * FROM 1", - environment = emptyStruct, - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 3)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1)))) + expected = listOf( + SkippedTestExpression( + id = "test1", + original = TestExpression( + id = "test1", + description = null, + statement = "SELECT * FROM 1", + environment = emptyStruct, + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 3) + ), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ) + ) + ) @Test - fun skipListSameTestMultipleTimes() = assertCompile(""" + fun skipListSameTestMultipleTimes() = assertCompile( + """ |skip_list::[".*", "test1", "test.*"] | |test::{ @@ -422,21 +479,25 @@ class CompilerTest { | expected: (success 1) |} """.trimMargin(), - expected = listOf( - SkippedTestExpression( - id = "test1", - original = TestExpression( - id = "test1", - description = null, - statement = "SELECT * FROM 1", - environment = emptyStruct, - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 3)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1)))) - + expected = listOf( + SkippedTestExpression( + id = "test1", + original = TestExpression( + id = "test1", + description = null, + statement = "SELECT * FROM 1", + environment = emptyStruct, + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 3) + ), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ) + ) + ) @Test - fun appendTest() = assertCompile(""" + fun appendTest() = assertCompile( + """ |test::{ | id: test1, | statement: "SELECT * FROM 1", @@ -448,21 +509,26 @@ class CompilerTest { | additional_data: { foo: bar } |} """.trimMargin(), - expected = listOf( - AppendedTestExpression( - id = "test1", - additionalData = "{ foo: bar }".toStruct(), - original = TestExpression( - id = "test1", - description = null, - statement = "SELECT * FROM 1", - environment = emptyStruct, - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 7)))) + expected = listOf( + AppendedTestExpression( + id = "test1", + additionalData = "{ foo: bar }".toStruct(), + original = TestExpression( + id = "test1", + description = null, + statement = "SELECT * FROM 1", + environment = emptyStruct, + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 7) + ) + ) + ) @Test - fun appendTestBeforeTest() = assertCompile(""" + fun appendTestBeforeTest() = assertCompile( + """ |append_test::{ | pattern: "test1", | additional_data: { foo: bar } @@ -474,21 +540,26 @@ class CompilerTest { | expected: (success 1) |} """.trimMargin(), - expected = listOf( - AppendedTestExpression( - id = "test1", - additionalData = "{ foo: bar }".toStruct(), - original = TestExpression( - id = "test1", - description = null, - statement = "SELECT * FROM 1", - environment = emptyStruct, - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 6)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1)))) + expected = listOf( + AppendedTestExpression( + id = "test1", + additionalData = "{ foo: bar }".toStruct(), + original = TestExpression( + id = "test1", + description = null, + statement = "SELECT * FROM 1", + environment = emptyStruct, + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 6) + ), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ) + ) + ) @Test - fun appendAndSkippedTest() = assertCompile(""" + fun appendAndSkippedTest() = assertCompile( + """ |skip_list::["test1"] | |append_test::{ @@ -502,20 +573,25 @@ class CompilerTest { | expected: (success 1) |} """.trimMargin(), - expected = listOf( - SkippedTestExpression( - id = "test1", - original = TestExpression( - id = "test1", - description = null, - statement = "SELECT * FROM 1", - environment = emptyStruct, - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 8)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1)))) + expected = listOf( + SkippedTestExpression( + id = "test1", + original = TestExpression( + id = "test1", + description = null, + statement = "SELECT * FROM 1", + environment = emptyStruct, + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 8) + ), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ) + ) + ) @Test - fun appendSkippedTestMultipleTimes() = assertCompile(""" + fun appendSkippedTestMultipleTimes() = assertCompile( + """ |skip_list::["test1"] | |append_test::{ @@ -534,32 +610,39 @@ class CompilerTest { | expected: (success 1) |} """.trimMargin(), - expected = listOf( - SkippedTestExpression( - id = "test1", - original = TestExpression( - id = "test1", - description = null, - statement = "SELECT * FROM 1", - environment = emptyStruct, - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 13)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1)))) + expected = listOf( + SkippedTestExpression( + id = "test1", + original = TestExpression( + id = "test1", + description = null, + statement = "SELECT * FROM 1", + environment = emptyStruct, + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 13) + ), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ) + ) + ) @Test - fun appendTestNoTestMatches() = assertCompileError(""" + fun appendTestNoTestMatches() = assertCompileError( + """ |append_test::{ | pattern: "test1", | additional_data: { foo: bar } |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when compiling test scripts: | $inputBasePath/input[0].sqlts:1 - No testId matched the pattern: test1 - """.trimMargin()) + """.trimMargin() + ) @Test - fun appendTestMoreThanOneTestMatches() = assertCompile(""" + fun appendTestMoreThanOneTestMatches() = assertCompile( + """ |append_test::{ | pattern: ".*", | additional_data: { foo: bar } @@ -577,32 +660,39 @@ class CompilerTest { | expected: (success 2) |} """.trimMargin(), - expected = listOf( - AppendedTestExpression( - id = "test1", - additionalData = "{ foo: bar }".toStruct(), - original = TestExpression( - id = "test1", - description = null, - statement = "SELECT * FROM 1", - environment = emptyStruct, - expected = ExpectedSuccess(ion.newInt(1)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 6)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1)), - AppendedTestExpression( - id = "test2", - additionalData = "{ foo: bar }".toStruct(), - original = TestExpression( - id = "test2", - description = null, - statement = "SELECT * FROM 2", - environment = emptyStruct, - expected = ExpectedSuccess(ion.newInt(2)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 12)), - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1)))) + expected = listOf( + AppendedTestExpression( + id = "test1", + additionalData = "{ foo: bar }".toStruct(), + original = TestExpression( + id = "test1", + description = null, + statement = "SELECT * FROM 1", + environment = emptyStruct, + expected = ExpectedSuccess(ion.newInt(1)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 6) + ), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ), + AppendedTestExpression( + id = "test2", + additionalData = "{ foo: bar }".toStruct(), + original = TestExpression( + id = "test2", + description = null, + statement = "SELECT * FROM 2", + environment = emptyStruct, + expected = ExpectedSuccess(ion.newInt(2)), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 12) + ), + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 1) + ) + ) + ) @Test - fun appendAppendedTest() = assertCompileError(""" + fun appendAppendedTest() = assertCompileError( + """ |test::{ | id: test1, | statement: "SELECT * FROM 1", @@ -619,8 +709,9 @@ class CompilerTest { | additional_data: { foo: baz } |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when compiling test scripts: | $inputBasePath/input[0].sqlts:12 - testId: test1 was already appended on $inputBasePath/input[0].sqlts:7 - """.trimMargin()) -} \ No newline at end of file + """.trimMargin() + ) +} diff --git a/testscript/test/org/partiql/testscript/evaluator/DefaultPtsEqualityTest.kt b/testscript/test/org/partiql/testscript/evaluator/DefaultPtsEqualityTest.kt index d50023c492..6d7dda2ea0 100644 --- a/testscript/test/org/partiql/testscript/evaluator/DefaultPtsEqualityTest.kt +++ b/testscript/test/org/partiql/testscript/evaluator/DefaultPtsEqualityTest.kt @@ -41,83 +41,83 @@ internal class DefaultPtsEqualityTest { companion object { // values are grouped by equivalent values private val values = listOf( - // unknown - listOf("missing::null"), - listOf("null"), - - // boolean - listOf("true"), - listOf("false"), - - // int - listOf("1"), - listOf("2"), - - // float - listOf("nan"), - listOf("-inf", "-1e1000", "-1.000000000000e1000"), - listOf("1e0", "1e00"), - listOf("-5e-1", "-0.5e0"), - listOf("+inf", "1e1000", "1.000000000000e1000"), - - // decimal - listOf("1.0", "1.00"), - listOf("-0.0", "-0.0000000000", "0d10000"), - listOf("-1d1000", "-1.000000000000d1000"), - - // timestamp - listOf("2019T"), - listOf( - "2017T", - "2017-01T", - "2017-01-01T", - "2017-01-01T00:00-00:00", - "2017-01-01T01:00+01:00" - ), - - // symbol - listOf("aSymbol"), - - // string - listOf("\"a string\""), - - // clob - listOf("{{ \"This is a CLOB of text.\" }}"), - - // blob - listOf("{{ dHdvIHBhZGRpbmcgY2hhcmFjdGVycw== }}"), - - // list - listOf("[]"), - listOf("[[]]"), - listOf("[[1]]"), - listOf("[1, 2, 3]"), - listOf("[1, 2, 4]"), - listOf("[1, 2, [10,11,[12,[]]]]"), - - // S-exp - listOf("()"), - listOf("(1 2 3)"), - listOf("(1 2 4)"), - listOf("(3 2 1)"), - listOf("(1 2 (3))"), - listOf("(1 2 (3 4 5 (6)) () () (()))"), - - // bag - listOf("(bag)"), - listOf("(bag 1 2 3)", "(bag 3 2 1)"), - listOf("(bag 1 1 2)"), - listOf("(bag 1 2 2)"), - listOf("(bag 1 2 (3))"), - listOf("(bag 1 2 (3 4 5 (6)) () () (()))"), - listOf("(bag 1 2 (3 4 5 (6)) () () ((null) missing::null))"), - - // struct - listOf("{}"), - listOf("{foo: 1.0}", "{foo: 1.00}"), - listOf("{foo: 1, bar: {}}"), - listOf("{foo: 1, bar: 2}", "{bar: 2, foo: 1}"), - listOf("{foo: [1,2, (a {bar: baz})], bar: {}}") + // unknown + listOf("missing::null"), + listOf("null"), + + // boolean + listOf("true"), + listOf("false"), + + // int + listOf("1"), + listOf("2"), + + // float + listOf("nan"), + listOf("-inf", "-1e1000", "-1.000000000000e1000"), + listOf("1e0", "1e00"), + listOf("-5e-1", "-0.5e0"), + listOf("+inf", "1e1000", "1.000000000000e1000"), + + // decimal + listOf("1.0", "1.00"), + listOf("-0.0", "-0.0000000000", "0d10000"), + listOf("-1d1000", "-1.000000000000d1000"), + + // timestamp + listOf("2019T"), + listOf( + "2017T", + "2017-01T", + "2017-01-01T", + "2017-01-01T00:00-00:00", + "2017-01-01T01:00+01:00" + ), + + // symbol + listOf("aSymbol"), + + // string + listOf("\"a string\""), + + // clob + listOf("{{ \"This is a CLOB of text.\" }}"), + + // blob + listOf("{{ dHdvIHBhZGRpbmcgY2hhcmFjdGVycw== }}"), + + // list + listOf("[]"), + listOf("[[]]"), + listOf("[[1]]"), + listOf("[1, 2, 3]"), + listOf("[1, 2, 4]"), + listOf("[1, 2, [10,11,[12,[]]]]"), + + // S-exp + listOf("()"), + listOf("(1 2 3)"), + listOf("(1 2 4)"), + listOf("(3 2 1)"), + listOf("(1 2 (3))"), + listOf("(1 2 (3 4 5 (6)) () () (()))"), + + // bag + listOf("(bag)"), + listOf("(bag 1 2 3)", "(bag 3 2 1)"), + listOf("(bag 1 1 2)"), + listOf("(bag 1 2 2)"), + listOf("(bag 1 2 (3))"), + listOf("(bag 1 2 (3 4 5 (6)) () () (()))"), + listOf("(bag 1 2 (3 4 5 (6)) () () ((null) missing::null))"), + + // struct + listOf("{}"), + listOf("{foo: 1.0}", "{foo: 1.00}"), + listOf("{foo: 1, bar: {}}"), + listOf("{foo: 1, bar: 2}", "{bar: 2, foo: 1}"), + listOf("{foo: [1,2, (a {bar: baz})], bar: {}}") ) @JvmStatic @@ -126,24 +126,24 @@ internal class DefaultPtsEqualityTest { // combine each element from a equivalent group with the elements from the other groups @JvmStatic fun nonEquivalentValuesTestCases(): Stream> = - values.foldIndexed(mutableListOf>()) { index, pairs, list -> - val nonEquivalent = values.toMutableList().apply { removeAt(index) }.flatten() + values.foldIndexed(mutableListOf>()) { index, pairs, list -> + val nonEquivalent = values.toMutableList().apply { removeAt(index) }.flatten() - list.crossProduct(nonEquivalent).mapTo(pairs) { arrayOf(it.first, it.second) } + list.crossProduct(nonEquivalent).mapTo(pairs) { arrayOf(it.first, it.second) } - pairs - }.stream() + pairs + }.stream() } } // all combinations of size 2 including each element with itself private fun List.combinations2(): List> = - when (this.size) { - 0 -> throw IllegalArgumentException() - 1 -> listOf(arrayOf(this[0], this[0])) - else -> this.foldIndexed(mutableListOf()) { index, pairs, el -> - this.subList(index, this.size).forEach { subEl -> pairs.add(arrayOf(el, subEl)) } + when (this.size) { + 0 -> throw IllegalArgumentException() + 1 -> listOf(arrayOf(this[0], this[0])) + else -> this.foldIndexed(mutableListOf()) { index, pairs, el -> + this.subList(index, this.size).forEach { subEl -> pairs.add(arrayOf(el, subEl)) } - pairs - } + pairs } + } diff --git a/testscript/test/org/partiql/testscript/parser/BaseParseTests.kt b/testscript/test/org/partiql/testscript/parser/BaseParseTests.kt index 642f5f605a..fd76802cd5 100644 --- a/testscript/test/org/partiql/testscript/parser/BaseParseTests.kt +++ b/testscript/test/org/partiql/testscript/parser/BaseParseTests.kt @@ -20,12 +20,16 @@ abstract class BaseParseTests { assertEquals(expectedErrorMessage, exception.message) } - protected fun singleModulesList(vararg node: AstNode) = listOf(ModuleNode(node.asList(), - ScriptLocation("$inputBasePath/input[0].sqlts", 0))) + protected fun singleModulesList(vararg node: AstNode) = listOf( + ModuleNode( + node.asList(), + ScriptLocation("$inputBasePath/input[0].sqlts", 0) + ) + ) protected fun assertParse(vararg ionDocuments: String, expected: List) { val inputs = createInput(*ionDocuments) assertEquals(expected, parser.parse(inputs)) } -} \ No newline at end of file +} diff --git a/testscript/test/org/partiql/testscript/parser/ParserForTests.kt b/testscript/test/org/partiql/testscript/parser/ParserForTests.kt index 8fef94e3a0..bb35e8cd41 100644 --- a/testscript/test/org/partiql/testscript/parser/ParserForTests.kt +++ b/testscript/test/org/partiql/testscript/parser/ParserForTests.kt @@ -8,7 +8,8 @@ import org.partiql.testscript.parser.ast.TestNode class ParserForTests : BaseParseTests() { @Test - fun forWithSingleTestAndVariable() = assertParse(""" + fun forWithSingleTestAndVariable() = assertParse( + """ |for::{ | template: [ | test::{ @@ -22,15 +23,21 @@ class ParserForTests : BaseParseTests() { | { value: 1, expected: (success 2) } | ] |}""".trimMargin(), - expected = singleModulesList(TestNode(id = "testTemplate\$\${value:1,expected:(success 2)}", - description = null, - statement = "1 + 1", - environment = null, - expected = BaseParseTests.ion.singleValue("(success 2)") as IonSexp, - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 11)))) + expected = singleModulesList( + TestNode( + id = "testTemplate\$\${value:1,expected:(success 2)}", + description = null, + statement = "1 + 1", + environment = null, + expected = BaseParseTests.ion.singleValue("(success 2)") as IonSexp, + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 11) + ) + ) + ) @Test - fun forWithMultipleTestsAndMultipleVariables() = assertParse(""" + fun forWithMultipleTestsAndMultipleVariables() = assertParse( + """ |for::{ | template: [ | @@ -56,45 +63,57 @@ class ParserForTests : BaseParseTests() { | { description: "description 2", value: 2, table: [2], result: 2, environment: {foo: "2"}, expected: (success 20) } | ] |}""".trimMargin(), - expected = singleModulesList(TestNode(id = "testTemplate1\$\${description:\"description 1\",value:1,table:[1],result:1,environment:{foo:\"1\"},expected:(success 10)}", - description = "test: description 1", - statement = "1 + 1", - environment = ion.singleValue("{myTable: [1]}") as IonStruct, - expected = ion.singleValue("(success 1)") as IonSexp, - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 22)), + expected = singleModulesList( + TestNode( + id = "testTemplate1\$\${description:\"description 1\",value:1,table:[1],result:1,environment:{foo:\"1\"},expected:(success 10)}", + description = "test: description 1", + statement = "1 + 1", + environment = ion.singleValue("{myTable: [1]}") as IonStruct, + expected = ion.singleValue("(success 1)") as IonSexp, + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 22) + ), - TestNode(id = "testTemplate1\$\${description:\"description 2\",value:2,table:[2],result:2,environment:{foo:\"2\"},expected:(success 20)}", - description = "test: description 2", - statement = "1 + 2", - environment = ion.singleValue("{myTable: [2]}") as IonStruct, - expected = ion.singleValue("(success 2)") as IonSexp, - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 23)), + TestNode( + id = "testTemplate1\$\${description:\"description 2\",value:2,table:[2],result:2,environment:{foo:\"2\"},expected:(success 20)}", + description = "test: description 2", + statement = "1 + 2", + environment = ion.singleValue("{myTable: [2]}") as IonStruct, + expected = ion.singleValue("(success 2)") as IonSexp, + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 23) + ), - TestNode(id = "testTemplate2\$\${description:\"description 1\",value:1,table:[1],result:1,environment:{foo:\"1\"},expected:(success 10)}", - description = "description 1", - statement = "1", - environment = ion.singleValue("{foo: \"1\"}") as IonStruct, - expected = ion.singleValue("(success 10)") as IonSexp, - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 22)), + TestNode( + id = "testTemplate2\$\${description:\"description 1\",value:1,table:[1],result:1,environment:{foo:\"1\"},expected:(success 10)}", + description = "description 1", + statement = "1", + environment = ion.singleValue("{foo: \"1\"}") as IonStruct, + expected = ion.singleValue("(success 10)") as IonSexp, + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 22) + ), - TestNode(id = "testTemplate2\$\${description:\"description 2\",value:2,table:[2],result:2,environment:{foo:\"2\"},expected:(success 20)}", - description = "description 2", - statement = "2", - environment = ion.singleValue("{foo: \"2\"}") as IonStruct, - expected = ion.singleValue("(success 20)") as IonSexp, - scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 23)))) + TestNode( + id = "testTemplate2\$\${description:\"description 2\",value:2,table:[2],result:2,environment:{foo:\"2\"},expected:(success 20)}", + description = "description 2", + statement = "2", + environment = ion.singleValue("{foo: \"2\"}") as IonStruct, + expected = ion.singleValue("(success 20)") as IonSexp, + scriptLocation = ScriptLocation("$inputBasePath/input[0].sqlts", 23) + ) + ) + ) @Test fun forWrongType() = assertParseError( - input = """ for::"should be a struct" """, - expectedErrorMessage = """ + input = """ for::"should be a struct" """, + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Wrong type for for. Expected [STRUCT], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test fun forWrongTemplateType() = assertParseError( - input = """ + input = """ |for::{ | template: "should be a list", | @@ -103,14 +122,15 @@ class ParserForTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:2 - Wrong type for for.template. Expected [LIST], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test fun forEmptyTemplate() = assertParseError( - input = """ + input = """ |for::{ | template: [], | @@ -118,14 +138,15 @@ class ParserForTests : BaseParseTests() { | { value: 1, expected: (success 2) } | ] |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Field must have at least one element: for.template - """.trimMargin()) + """.trimMargin() + ) @Test fun forTestTemplateWrongType() = assertParseError( - input = """ + input = """ |for::{ | template: [ | test::"should be a struct" @@ -136,14 +157,15 @@ class ParserForTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:3 - Wrong type for for.template[0]. Expected [STRUCT], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test fun forTestTemplateWrongIdType() = assertParseError( - input = """ + input = """ |for::{ | template: [ | test::{ @@ -158,14 +180,15 @@ class ParserForTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:4 - Wrong type for for.template[0].id. Expected [SYMBOL], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test fun forTestTemplateMissingId() = assertParseError( - input = """ + input = """ |for::{ | template: [ | test::{ @@ -179,14 +202,15 @@ class ParserForTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:3 - Missing required field: for.template[0].id - """.trimMargin()) + """.trimMargin() + ) @Test fun forTestTemplateWrongDescriptionType() = assertParseError( - input = """ + input = """ |for::{ | template: [ | test::{ @@ -202,14 +226,15 @@ class ParserForTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:5 - Invalid template value for field: for.template[0].description. Must start with '${'$'}' when it's a SYMBOL - """.trimMargin()) + """.trimMargin() + ) @Test fun forTestTemplateWrongStatementType() = assertParseError( - input = """ + input = """ |for::{ | template: [ | test::{ @@ -224,14 +249,15 @@ class ParserForTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:5 - Invalid template value for field: for.template[0].statement. Must start with '${'$'}' when it's a SYMBOL - """.trimMargin()) + """.trimMargin() + ) @Test fun forTestTemplateMissingStatement() = assertParseError( - input = """ + input = """ |for::{ | template: [ | test::{ @@ -245,14 +271,15 @@ class ParserForTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:3 - Missing required field: for.template[0].statement - """.trimMargin()) + """.trimMargin() + ) @Test fun forTestTemplateWrongEnvironmentType() = assertParseError( - input = """ + input = """ |for::{ | template: [ | test::{ @@ -267,14 +294,15 @@ class ParserForTests : BaseParseTests() { | { value: 1, expected: (success 2) } | ] |}""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:6 - Wrong type for for.template[0].environment. Expected [STRUCT], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test fun forTestTemplateWrongExpectedType() = assertParseError( - input = """ + input = """ |for::{ | template: [ | test::{ @@ -288,14 +316,15 @@ class ParserForTests : BaseParseTests() { | { value: 1, expected: (success 2) } | ] |}""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:7 - Invalid template value for field: for.template[0].expected. Must start with '${'$'}' when it's a SYMBOL - """.trimMargin()) + """.trimMargin() + ) @Test fun forTestTemplateMissingExpected() = assertParseError( - input = """ + input = """ |for::{ | template: [ | test::{ @@ -309,28 +338,30 @@ class ParserForTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:3 - Missing required field: for.template[0].expected - """.trimMargin()) + """.trimMargin() + ) @Test fun forMissingTemplate() = assertParseError( - input = """ + input = """ |for::{ | variable_sets: [ | { value: 1, expected: (success 2) } | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Missing required field: for.template - """.trimMargin()) + """.trimMargin() + ) @Test fun forWrongVariableSetType() = assertParseError( - input = """ + input = """ |for::{ | template: [ | test::{ @@ -343,14 +374,15 @@ class ParserForTests : BaseParseTests() { | variable_sets: "should be a list" |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:10 - Wrong type for for.variable_sets. Expected [LIST], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test fun forMissingVariableSet() = assertParseError( - input = """ + input = """ |for::{ | template: [ | test::{ @@ -361,14 +393,15 @@ class ParserForTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Missing required field: for.variable_sets - """.trimMargin()) + """.trimMargin() + ) @Test fun forWrongVariableSetElementType() = assertParseError( - input = """ + input = """ |for::{ | template: [ | test::{ @@ -383,14 +416,15 @@ class ParserForTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:11 - Wrong type for variable_sets[0]. Expected [STRUCT], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test fun forEmptyVariableSet() = assertParseError( - input = """ + input = """ |for::{ | template: [ | test::{ @@ -403,14 +437,15 @@ class ParserForTests : BaseParseTests() { | variable_sets: [] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Field must have at least one element: for.variable_sets - """.trimMargin()) + """.trimMargin() + ) @Test fun forUnknownField() = assertParseError( - input = """ + input = """ |for::{ | template: [ | test::{ @@ -427,14 +462,15 @@ class ParserForTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:10 - Unexpected field: for.shouldNotBeHere - """.trimMargin()) + """.trimMargin() + ) @Test fun forUnknownVariable() = assertParseError( - input = """ + input = """ |for::{ | template: [ | test::{ @@ -449,14 +485,15 @@ class ParserForTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:11 - Missing template variable: unknown - """.trimMargin()) + """.trimMargin() + ) @Test fun forInvalidExpectedVariable() = assertParseError( - input = """ + input = """ |for::{ | template: [ | test::{ @@ -471,8 +508,9 @@ class ParserForTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:11 - for.template.expected success must have two elements, e.g. (success (bag {a: 1})) - """.trimMargin()) -} \ No newline at end of file + """.trimMargin() + ) +} diff --git a/testscript/test/org/partiql/testscript/parser/ParserMacroTests.kt b/testscript/test/org/partiql/testscript/parser/ParserMacroTests.kt index 4f9a8d7f42..9da0988bc4 100644 --- a/testscript/test/org/partiql/testscript/parser/ParserMacroTests.kt +++ b/testscript/test/org/partiql/testscript/parser/ParserMacroTests.kt @@ -11,9 +11,10 @@ class ParserMacroTests : BaseParseTests() { * We use `#` instead of `$` in test fixtures because escaping `$` in a kotlin * multiline string is messy, e.g. `"""${"$"}"""` results in `"$"` */ - + @Test - fun forWithSingleTestAndVariable() = assertParse(""" + fun forWithSingleTestAndVariable() = assertParse( + """ |for::{ | template: [ | test::{ @@ -27,17 +28,24 @@ class ParserMacroTests : BaseParseTests() { | { value: 1, expected: (success 2) } | ] |}""".trimMargin(), - expected = singleModulesList(TestNode(id = "testTemplate\$\${value:1,expected:(success 2)}", - description = null, - statement = "1 + 1", - environment = null, - expected = BaseParseTests.ion.singleValue("(success 2)") as IonSexp, - scriptLocation = ScriptLocation( - "$inputBasePath/input[0].sqlts", - 11)))) + expected = singleModulesList( + TestNode( + id = "testTemplate\$\${value:1,expected:(success 2)}", + description = null, + statement = "1 + 1", + environment = null, + expected = BaseParseTests.ion.singleValue("(success 2)") as IonSexp, + scriptLocation = ScriptLocation( + "$inputBasePath/input[0].sqlts", + 11 + ) + ) + ) + ) @Test - fun forWithMultipleTestsAndMultipleVariables() = assertParse(""" + fun forWithMultipleTestsAndMultipleVariables() = assertParse( + """ |for::{ | template: [ | @@ -63,61 +71,87 @@ class ParserMacroTests : BaseParseTests() { | { description: "description 2", value: 2, table: [2], result: 2, environment: {foo: 2}, expected: (success 20) } | ] |}""".trimMargin(), - expected = singleModulesList(TestNode(id = "testTemplate1\$\${description:\"description 1\",value:1,table:[1],result:1,environment:{foo:1},expected:(success 10)}", - description = "test: description 1", - statement = "1 + 1", - environment = BaseParseTests.ion.singleValue( - "{myTable: [1]}") as IonStruct, - expected = BaseParseTests.ion.singleValue( - "(success 1)") as IonSexp, - scriptLocation = ScriptLocation( - "$inputBasePath/input[0].sqlts", - 22)), + expected = singleModulesList( + TestNode( + id = "testTemplate1\$\${description:\"description 1\",value:1,table:[1],result:1,environment:{foo:1},expected:(success 10)}", + description = "test: description 1", + statement = "1 + 1", + environment = BaseParseTests.ion.singleValue( + "{myTable: [1]}" + ) as IonStruct, + expected = BaseParseTests.ion.singleValue( + "(success 1)" + ) as IonSexp, + scriptLocation = ScriptLocation( + "$inputBasePath/input[0].sqlts", + 22 + ) + ), - TestNode(id = "testTemplate1\$\${description:\"description 2\",value:2,table:[2],result:2,environment:{foo:2},expected:(success 20)}", - description = "test: description 2", - statement = "1 + 2", - environment = BaseParseTests.ion.singleValue( - "{myTable: [2]}") as IonStruct, - expected = BaseParseTests.ion.singleValue( - "(success 2)") as IonSexp, - scriptLocation = ScriptLocation( - "$inputBasePath/input[0].sqlts", - 23)), + TestNode( + id = "testTemplate1\$\${description:\"description 2\",value:2,table:[2],result:2,environment:{foo:2},expected:(success 20)}", + description = "test: description 2", + statement = "1 + 2", + environment = BaseParseTests.ion.singleValue( + "{myTable: [2]}" + ) as IonStruct, + expected = BaseParseTests.ion.singleValue( + "(success 2)" + ) as IonSexp, + scriptLocation = ScriptLocation( + "$inputBasePath/input[0].sqlts", + 23 + ) + ), - TestNode(id = "testTemplate2\$\${description:\"description 1\",value:1,table:[1],result:1,environment:{foo:1},expected:(success 10)}", - description = "description 1", - statement = "1", - environment = ion.singleValue( - "{foo: 1}") as IonStruct, - expected = ion.singleValue( - "(success 10)") as IonSexp, - scriptLocation = ScriptLocation( - "$inputBasePath/input[0].sqlts", - 22)), + TestNode( + id = "testTemplate2\$\${description:\"description 1\",value:1,table:[1],result:1,environment:{foo:1},expected:(success 10)}", + description = "description 1", + statement = "1", + environment = ion.singleValue( + "{foo: 1}" + ) as IonStruct, + expected = ion.singleValue( + "(success 10)" + ) as IonSexp, + scriptLocation = ScriptLocation( + "$inputBasePath/input[0].sqlts", + 22 + ) + ), - TestNode(id = "testTemplate2\$\${description:\"description 2\",value:2,table:[2],result:2,environment:{foo:2},expected:(success 20)}", - description = "description 2", - statement = "2", - environment = BaseParseTests.ion.singleValue( - "{foo: 2}") as IonStruct, - expected = BaseParseTests.ion.singleValue( - "(success 20)") as IonSexp, - scriptLocation = ScriptLocation( - "$inputBasePath/input[0].sqlts", - 23)))) + TestNode( + id = "testTemplate2\$\${description:\"description 2\",value:2,table:[2],result:2,environment:{foo:2},expected:(success 20)}", + description = "description 2", + statement = "2", + environment = BaseParseTests.ion.singleValue( + "{foo: 2}" + ) as IonStruct, + expected = BaseParseTests.ion.singleValue( + "(success 20)" + ) as IonSexp, + scriptLocation = ScriptLocation( + "$inputBasePath/input[0].sqlts", + 23 + ) + ) + ) + ) @Test fun forWrongType() = - assertParseError(input = """ for::"should be a struct" """, - expectedErrorMessage = """ + assertParseError( + input = """ for::"should be a struct" """, + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Wrong type for for. Expected [STRUCT], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test fun forWrongTemplateType() = - assertParseError(input = """ + assertParseError( + input = """ |for::{ | template: "should be a list", | @@ -126,13 +160,15 @@ class ParserMacroTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:2 - Wrong type for for.template. Expected [LIST], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test - fun forEmptyTemplate() = assertParseError(input = """ + fun forEmptyTemplate() = assertParseError( + input = """ |for::{ | template: [], | @@ -140,13 +176,15 @@ class ParserMacroTests : BaseParseTests() { | { value: 1, expected: (success 2) } | ] |} """.trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Field must have at least one element: for.template - """.trimMargin()) + """.trimMargin() + ) @Test - fun forTestTemplateWrongType() = assertParseError(input = """ + fun forTestTemplateWrongType() = assertParseError( + input = """ |for::{ | template: [ | test::"should be a struct" @@ -157,13 +195,15 @@ class ParserMacroTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:3 - Wrong type for for.template[0]. Expected [STRUCT], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test - fun forTestTemplateWrongIdType() = assertParseError(input = """ + fun forTestTemplateWrongIdType() = assertParseError( + input = """ |for::{ | template: [ | test::{ @@ -178,13 +218,15 @@ class ParserMacroTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:4 - Wrong type for for.template[0].id. Expected [SYMBOL], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test - fun forTestTemplateMissingId() = assertParseError(input = """ + fun forTestTemplateMissingId() = assertParseError( + input = """ |for::{ | template: [ | test::{ @@ -198,13 +240,15 @@ class ParserMacroTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:3 - Missing required field: for.template[0].id - """.trimMargin()) + """.trimMargin() + ) @Test - fun forTestTemplateWrongDescriptionType() = assertParseError(input = """ + fun forTestTemplateWrongDescriptionType() = assertParseError( + input = """ |for::{ | template: [ | test::{ @@ -220,13 +264,15 @@ class ParserMacroTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:5 - Invalid template value for field: for.template[0].description. Must start with '${'$'}' when it's a SYMBOL - """.trimMargin()) + """.trimMargin() + ) @Test - fun forTestTemplateWrongStatementType() = assertParseError(input = """ + fun forTestTemplateWrongStatementType() = assertParseError( + input = """ |for::{ | template: [ | test::{ @@ -241,13 +287,15 @@ class ParserMacroTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:5 - Invalid template value for field: for.template[0].statement. Must start with '${'$'}' when it's a SYMBOL - """.trimMargin()) + """.trimMargin() + ) @Test - fun forTestTemplateMissingStatement() = assertParseError(input = """ + fun forTestTemplateMissingStatement() = assertParseError( + input = """ |for::{ | template: [ | test::{ @@ -261,13 +309,15 @@ class ParserMacroTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:3 - Missing required field: for.template[0].statement - """.trimMargin()) + """.trimMargin() + ) @Test - fun forTestTemplateWrongEnvironmentType() = assertParseError(input = """ + fun forTestTemplateWrongEnvironmentType() = assertParseError( + input = """ |for::{ | template: [ | test::{ @@ -282,13 +332,15 @@ class ParserMacroTests : BaseParseTests() { | { value: 1, expected: (success 2) } | ] |}""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:6 - Wrong type for for.template[0].environment. Expected [STRUCT], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test - fun forTestTemplateWrongExpectedType() = assertParseError(input = """ + fun forTestTemplateWrongExpectedType() = assertParseError( + input = """ |for::{ | template: [ | test::{ @@ -302,13 +354,15 @@ class ParserMacroTests : BaseParseTests() { | { value: 1, expected: (success 2) } | ] |}""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:7 - Invalid template value for field: for.template[0].expected. Must start with '${'$'}' when it's a SYMBOL - """.trimMargin()) + """.trimMargin() + ) @Test - fun forTestTemplateMissingExpected() = assertParseError(input = """ + fun forTestTemplateMissingExpected() = assertParseError( + input = """ |for::{ | template: [ | test::{ @@ -322,26 +376,30 @@ class ParserMacroTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:3 - Missing required field: for.template[0].expected - """.trimMargin()) + """.trimMargin() + ) @Test - fun forMissingTemplate() = assertParseError(input = """ + fun forMissingTemplate() = assertParseError( + input = """ |for::{ | variable_sets: [ | { value: 1, expected: (success 2) } | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Missing required field: for.template - """.trimMargin()) + """.trimMargin() + ) @Test - fun forWrongVariableSetType() = assertParseError(input = """ + fun forWrongVariableSetType() = assertParseError( + input = """ |for::{ | template: [ | test::{ @@ -354,13 +412,15 @@ class ParserMacroTests : BaseParseTests() { | variable_sets: "should be a list" |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:10 - Wrong type for for.variable_sets. Expected [LIST], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test - fun forMissingVariableSet() = assertParseError(input = """ + fun forMissingVariableSet() = assertParseError( + input = """ |for::{ | template: [ | test::{ @@ -371,13 +431,15 @@ class ParserMacroTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Missing required field: for.variable_sets - """.trimMargin()) + """.trimMargin() + ) @Test - fun forWrongVariableSetElementType() = assertParseError(input = """ + fun forWrongVariableSetElementType() = assertParseError( + input = """ |for::{ | template: [ | test::{ @@ -392,13 +454,15 @@ class ParserMacroTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:11 - Wrong type for variable_sets[0]. Expected [STRUCT], got STRING - """.trimMargin()) + """.trimMargin() + ) @Test - fun forEmptyVariableSet() = assertParseError(input = """ + fun forEmptyVariableSet() = assertParseError( + input = """ |for::{ | template: [ | test::{ @@ -411,13 +475,15 @@ class ParserMacroTests : BaseParseTests() { | variable_sets: [] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:1 - Field must have at least one element: for.variable_sets - """.trimMargin()) + """.trimMargin() + ) @Test - fun forUnknownField() = assertParseError(input = """ + fun forUnknownField() = assertParseError( + input = """ |for::{ | template: [ | test::{ @@ -434,13 +500,15 @@ class ParserMacroTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:10 - Unexpected field: for.shouldNotBeHere - """.trimMargin()) + """.trimMargin() + ) @Test - fun forUnknownVariable() = assertParseError(input = """ + fun forUnknownVariable() = assertParseError( + input = """ |for::{ | template: [ | test::{ @@ -455,13 +523,15 @@ class ParserMacroTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:11 - Missing template variable: unknown - """.trimMargin()) + """.trimMargin() + ) @Test - fun forInvalidExpectedVariable() = assertParseError(input = """ + fun forInvalidExpectedVariable() = assertParseError( + input = """ |for::{ | template: [ | test::{ @@ -476,10 +546,9 @@ class ParserMacroTests : BaseParseTests() { | ] |} |""".trimMargin(), - expectedErrorMessage = """ + expectedErrorMessage = """ |Errors found when parsing test scripts: | $inputBasePath/input[0].sqlts:11 - for.template.expected success must have two elements, e.g. (success (bag {a: 1})) - """.trimMargin()) - - -} \ No newline at end of file + """.trimMargin() + ) +} diff --git a/testscript/test/org/partiql/testscript/parser/util.kt b/testscript/test/org/partiql/testscript/parser/util.kt index 2821ff75b0..8d504a8e94 100644 --- a/testscript/test/org/partiql/testscript/parser/util.kt +++ b/testscript/test/org/partiql/testscript/parser/util.kt @@ -7,11 +7,12 @@ val inputBasePath = "${File(".").absolutePath.removeSuffix("/.")}/test/resources /** * Creates the input and replaces `#` by `$`. We use `#` in the test fixtures because escaping `$` in a kotlin - * multiline string is messy, e.g. `"""${"$"}"""` results in `"$"` + * multiline string is messy, e.g. `"""${"$"}"""` results in `"$"` */ fun createInput(vararg ionDocuments: String): List = - ionDocuments.mapIndexed { index, doc -> - NamedInputStream( - "$inputBasePath/input[$index].sqlts", - doc.replace("#", "$").byteInputStream(Charset.forName("UTF-8"))) - } \ No newline at end of file + ionDocuments.mapIndexed { index, doc -> + NamedInputStream( + "$inputBasePath/input[$index].sqlts", + doc.replace("#", "$").byteInputStream(Charset.forName("UTF-8")) + ) + }