From 761a90e8bc94585e174839c717ddf99555c1f6e9 Mon Sep 17 00:00:00 2001 From: Arash M <27716912+am357@users.noreply.github.com> Date: Tue, 21 Dec 2021 11:01:12 -0800 Subject: [PATCH 1/6] [lang] Remove wildcard imports - Replace the wildcard imports with explicit ones to make the code easier to read. - Also fixes some redundant import namespacing which was causing conflicts as a result of wildcard import. - Removes unused imports --- lang/src/org/partiql/lang/CompilerPipeline.kt | 75 +- lang/src/org/partiql/lang/Exceptions.kt | 11 +- .../lang/ast/AggregateCallSiteListMeta.kt | 2 +- .../partiql/lang/ast/AstDeserialization.kt | 19 +- .../src/org/partiql/lang/ast/InternalMetas.kt | 2 +- .../org/partiql/lang/ast/IsImplictJoinMeta.kt | 3 - .../org/partiql/lang/ast/IsIonLiteralMeta.kt | 3 - .../lang/ast/MemoizedMetaDeserializer.kt | 2 +- .../partiql/lang/ast/SourceLocationMeta.kt | 8 +- .../partiql/lang/ast/StatementToExprNode.kt | 402 ++++---- .../org/partiql/lang/ast/StaticTypeMeta.kt | 12 +- lang/src/org/partiql/lang/ast/Util.kt | 8 +- lang/src/org/partiql/lang/ast/ast.kt | 12 +- lang/src/org/partiql/lang/ast/meta.kt | 7 +- .../lang/ast/passes/AstRewriterBase.kt | 122 ++- .../org/partiql/lang/ast/passes/AstVisitor.kt | 9 +- .../org/partiql/lang/ast/passes/AstWalker.kt | 83 +- .../lang/ast/passes/SemanticException.kt | 13 +- .../lang/errors/ErrorAndErrorContexts.kt | 97 +- lang/src/org/partiql/lang/eval/Bindings.kt | 8 +- lang/src/org/partiql/lang/eval/Environment.kt | 2 +- .../partiql/lang/eval/EvaluatingCompiler.kt | 167 ++- .../partiql/lang/eval/EvaluationSession.kt | 2 +- lang/src/org/partiql/lang/eval/Exceptions.kt | 10 +- .../src/org/partiql/lang/eval/ExprFunction.kt | 7 +- .../partiql/lang/eval/ExprNodeExtensions.kt | 13 +- .../partiql/lang/eval/ExprValueExtensions.kt | 63 +- .../org/partiql/lang/eval/ExprValueFactory.kt | 23 +- .../org/partiql/lang/eval/ExprValueType.kt | 2 +- .../partiql/lang/eval/GroupKeyExprValue.kt | 2 +- .../partiql/lang/eval/IonStructBindings.kt | 5 +- .../lang/eval/NaturalExprValueComparators.kt | 23 +- .../src/org/partiql/lang/eval/NodeMetadata.kt | 7 +- .../org/partiql/lang/eval/OrdinalBindings.kt | 2 +- .../org/partiql/lang/eval/StructExprValue.kt | 13 +- lang/src/org/partiql/lang/eval/Thunk.kt | 5 +- .../partiql/lang/eval/binding/LocalsBinder.kt | 9 +- .../lang/eval/builtins/BuiltinFunctions.kt | 8 +- .../eval/builtins/CoalesceExprFunction.kt | 12 +- .../lang/eval/builtins/DateAddExprFunction.kt | 50 +- .../eval/builtins/DateDiffExprFunction.kt | 17 +- .../lang/eval/builtins/ExtractExprFunction.kt | 16 +- .../eval/builtins/MakeDateExprFunction.kt | 10 +- .../eval/builtins/MakeTimeExprFunction.kt | 17 +- .../lang/eval/builtins/NullIfExprFunction.kt | 8 +- .../lang/eval/builtins/SizeExprFunction.kt | 15 +- .../eval/builtins/SubstringExprFunction.kt | 16 +- .../lang/eval/builtins/TimestampParser.kt | 22 +- .../builtins/TimestampTemporalAccessor.kt | 10 +- .../eval/builtins/ToStringExprFunction.kt | 25 +- .../eval/builtins/ToTimestampExprFunction.kt | 16 +- .../lang/eval/builtins/TrimExprFunction.kt | 22 +- .../eval/builtins/timestamp/FormatItem.kt | 2 - .../eval/builtins/timestamp/FormatPattern.kt | 36 +- .../timestamp/TimestampFormatPatternLexer.kt | 8 +- .../timestamp/TimestampFormatPatternParser.kt | 7 +- .../partiql/lang/eval/io/DelimitedValues.kt | 20 +- lang/src/org/partiql/lang/syntax/Parser.kt | 4 +- lang/src/org/partiql/lang/syntax/SqlLexer.kt | 282 +++--- lang/src/org/partiql/lang/syntax/SqlParser.kt | 949 ++++++++++-------- lang/src/org/partiql/lang/syntax/Token.kt | 17 +- .../org/partiql/lang/util/BindingHelpers.kt | 7 +- .../partiql/lang/util/CollectionExtensions.kt | 7 +- .../partiql/lang/util/ExprValueFormatter.kt | 33 +- .../partiql/lang/util/IonValueExtensions.kt | 22 +- .../org/partiql/lang/util/IonWriterContext.kt | 4 +- .../org/partiql/lang/util/NumberExtensions.kt | 13 +- .../partiql/lang/util/PropertyMapHelpers.kt | 9 +- .../partiql/lang/util/TokenListExtensions.kt | 6 +- lang/test/org/partiql/lang/TestBase.kt | 30 +- lang/test/org/partiql/lang/ast/AstNodeTest.kt | 16 +- .../org/partiql/lang/ast/AstSerDeTests.kt | 6 +- .../partiql/lang/ast/IsIonLiteralMetaTest.kt | 5 +- .../partiql/lang/ast/PathComponentExprTest.kt | 14 +- .../lang/ast/SourceLocationMetaTest.kt | 5 +- .../partiql/lang/ast/VariableReferenceTest.kt | 4 +- .../partiql/lang/ast/passes/AstWalkerTests.kt | 21 +- .../lang/ast/passes/RewriterTestBase.kt | 3 +- .../partiql/lang/errors/LexerErrorsTest.kt | 7 +- .../partiql/lang/errors/ParserErrorsTest.kt | 4 +- .../lang/errors/PropertyValueMapTest.kt | 29 +- .../partiql/lang/errors/SqlExceptionTest.kt | 23 +- .../org/partiql/lang/eval/BindingsTest.kt | 8 +- .../partiql/lang/eval/CompileOptionsTest.kt | 4 +- .../lang/eval/EvaluatingCompilerCastTest.kt | 798 +++++++-------- .../eval/EvaluatingCompilerDateTimeTests.kt | 6 +- .../eval/EvaluatingCompilerExceptionsTest.kt | 6 +- .../eval/EvaluatingCompilerGroupByTest.kt | 8 +- .../lang/eval/EvaluatingCompilerHavingTest.kt | 6 +- .../lang/eval/EvaluatingCompilerNAryTests.kt | 17 +- .../lang/eval/EvaluationSessionTest.kt | 7 +- .../partiql/lang/eval/EvaluatorTestBase.kt | 25 +- .../lang/eval/ExprValueAdaptersTest.kt | 4 +- .../partiql/lang/eval/ExprValueFactoryTest.kt | 21 +- .../partiql/lang/eval/FormalParameterTest.kt | 2 +- lang/test/org/partiql/lang/eval/IntTest.kt | 9 +- .../partiql/lang/eval/LikePredicateTest.kt | 11 +- .../eval/NaturalExprValueComparatorsTest.kt | 8 +- .../org/partiql/lang/eval/NodeMetadataTest.kt | 17 +- .../lang/eval/QuotedIdentifierTests.kt | 6 +- .../eval/SimpleEvaluatingCompilerTests.kt | 6 +- .../org/partiql/lang/eval/UnknownValTest.kt | 2 +- .../eval/builtins/BuiltinFunctionsTest.kt | 18 +- .../eval/builtins/CoalesceEvaluationTest.kt | 10 +- .../eval/builtins/ConcatEvaluationTest.kt | 35 +- .../eval/builtins/DateAddEvaluationTest.kt | 5 +- .../eval/builtins/DateAddExprFunctionTest.kt | 17 +- .../eval/builtins/DateDiffEvaluationTest.kt | 5 +- .../eval/builtins/DateDiffExprFunctionTest.kt | 15 +- .../eval/builtins/ExtractEvaluationTest.kt | 10 +- .../eval/builtins/MakeDateEvaluationTest.kt | 6 +- .../eval/builtins/MakeTimeEvaluationTest.kt | 6 +- .../eval/builtins/NullIfEvaluationTest.kt | 7 +- .../lang/eval/builtins/SizeEvaluationTest.kt | 7 +- .../eval/builtins/SizeExprFunctionTest.kt | 15 +- .../builtins/SubstringExprFunctionTest.kt | 14 +- .../lang/eval/builtins/TimestampParserTest.kt | 24 +- .../TimestampTemporalAccessorTests.kt | 24 +- .../eval/builtins/ToStringExprFunctionTest.kt | 7 +- .../builtins/ToTimestampExprFunctionTest.kt | 7 +- .../lang/eval/builtins/TrimEvaluationTest.kt | 6 +- .../eval/builtins/TrimExprFunctionTest.kt | 13 +- .../TimestampFormatPatternLexerTest.kt | 8 +- .../TimestampFormatPatternParserTest.kt | 14 +- .../ToTimestampFormatPatternValidationTest.kt | 14 +- .../eval/io/CustomExceptionHandlerTest.kt | 11 +- .../lang/eval/io/DelimitedValuesTest.kt | 14 +- .../org/partiql/lang/eval/time/TimeTest.kt | 2 +- .../org/partiql/lang/syntax/SqlLexerTest.kt | 242 +++-- .../lang/syntax/SqlParserDateTimeTests.kt | 2 +- .../lang/syntax/SqlParserPrecedenceTest.kt | 7 +- .../partiql/lang/util/AssertJExtensions.kt | 2 +- .../org/partiql/lang/util/AstExtensions.kt | 8 +- .../util/CollectionsFoldLeftProductTest.kt | 2 - .../partiql/lang/util/CollectionsListTests.kt | 2 +- .../lang/util/CollectionsProductTest.kt | 2 +- .../ConfigurableExprValueFormatterTest.kt | 19 +- .../lang/util/ExprValueFactoryExtensions.kt | 3 +- .../test/org/partiql/lang/util/NumbersTest.kt | 2 +- .../partiql/lang/util/SexpAstPrettyPrinter.kt | 4 +- 140 files changed, 2623 insertions(+), 2067 deletions(-) diff --git a/lang/src/org/partiql/lang/CompilerPipeline.kt b/lang/src/org/partiql/lang/CompilerPipeline.kt index 59d7c457da..7ab0c3430b 100644 --- a/lang/src/org/partiql/lang/CompilerPipeline.kt +++ b/lang/src/org/partiql/lang/CompilerPipeline.kt @@ -14,36 +14,41 @@ package org.partiql.lang -import com.amazon.ion.* -import org.partiql.lang.ast.* -import org.partiql.lang.eval.* -import org.partiql.lang.eval.builtins.* +import com.amazon.ion.IonSystem +import org.partiql.lang.ast.ExprNode +import org.partiql.lang.eval.CompileOptions +import org.partiql.lang.eval.EvaluatingCompiler +import org.partiql.lang.eval.ExprFunction +import org.partiql.lang.eval.ExprValueFactory +import org.partiql.lang.eval.Expression +import org.partiql.lang.eval.builtins.createBuiltinFunctions import org.partiql.lang.eval.builtins.storedprocedure.StoredProcedure -import org.partiql.lang.syntax.* +import org.partiql.lang.syntax.Parser +import org.partiql.lang.syntax.SqlParser import org.partiql.lang.util.interruptibleFold /** * Contains all of the information needed for processing steps. */ data class StepContext( - /** The instance of [ExprValueFactory] that is used by the pipeline. */ - val valueFactory: ExprValueFactory, + /** The instance of [ExprValueFactory] that is used by the pipeline. */ + val valueFactory: ExprValueFactory, - /** The compilation options. */ - val compileOptions: CompileOptions, + /** The compilation options. */ + val compileOptions: CompileOptions, - /** - * Returns a list of all functions which are available for execution. - * Includes built-in functions as well as custom functions added while the [CompilerPipeline] - * was being built. - */ - val functions: @JvmSuppressWildcards Map, + /** + * * Returns a list of all functions which are available for execution. + * Includes built-in functions as well as custom functions added while the [CompilerPipeline] + * was being built. + */ + val functions: @JvmSuppressWildcards Map, - /** - * Returns a list of all stored procedures which are available for execution. - * Only includes the custom stored procedures added while the [CompilerPipeline] was being built. - */ - val procedures: @JvmSuppressWildcards Map + /** + * Returns a list of all stored procedures which are available for execution. + * Only includes the custom stored procedures added while the [CompilerPipeline] was being built. + */ + val procedures: @JvmSuppressWildcards Map ) /** @@ -108,7 +113,7 @@ interface CompilerPipeline { /** Returns an implementation of [CompilerPipeline] with all properties set to their defaults. */ @JvmStatic fun standard(valueFactory: ExprValueFactory): CompilerPipeline = - builder(valueFactory).build() + builder(valueFactory).build() } /** @@ -171,23 +176,23 @@ interface CompilerPipeline { val allFunctions = builtinFunctions + customFunctions return CompilerPipelineImpl( - valueFactory, - parser ?: SqlParser(valueFactory.ion), - compileOptions ?: CompileOptions.standard(), - allFunctions, - customProcedures, - preProcessingSteps) + valueFactory, + parser ?: SqlParser(valueFactory.ion), + compileOptions ?: CompileOptions.standard(), + allFunctions, + customProcedures, + preProcessingSteps) } } } internal class CompilerPipelineImpl( - override val valueFactory: ExprValueFactory, - private val parser: Parser, - override val compileOptions: CompileOptions, - override val functions: Map, - override val procedures: Map, - private val preProcessingSteps: List + override val valueFactory: ExprValueFactory, + private val parser: Parser, + override val compileOptions: CompileOptions, + override val functions: Map, + override val procedures: Map, + private val preProcessingSteps: List ) : CompilerPipeline { private val compiler = EvaluatingCompiler(valueFactory, functions, procedures, compileOptions) @@ -206,8 +211,8 @@ internal class CompilerPipelineImpl( } internal fun executePreProcessingSteps( - query: ExprNode, - context: StepContext + query: ExprNode, + context: StepContext ) = preProcessingSteps.interruptibleFold(query) { currentExprNode, step -> step(currentExprNode, context) } diff --git a/lang/src/org/partiql/lang/Exceptions.kt b/lang/src/org/partiql/lang/Exceptions.kt index 16ab003d7a..b2cb21a192 100644 --- a/lang/src/org/partiql/lang/Exceptions.kt +++ b/lang/src/org/partiql/lang/Exceptions.kt @@ -14,9 +14,10 @@ package org.partiql.lang -import org.partiql.lang.errors.* -import org.partiql.lang.errors.Property.* - +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.errors.Property +import org.partiql.lang.errors.PropertyValueMap +import org.partiql.lang.errors.UNKNOWN /** * General exception class for the interpreter. @@ -82,8 +83,8 @@ open class SqlException(override var message: String, errorCode?.getErrorMessage(propertyValueMap) ?: UNKNOWN private fun errorLocation(propertyValueMap: PropertyValueMap?): String { - val lineNo = propertyValueMap?.get(LINE_NUMBER)?.longValue() - val columnNo = propertyValueMap?.get(COLUMN_NUMBER)?.longValue() + val lineNo = propertyValueMap?.get(Property.LINE_NUMBER)?.longValue() + val columnNo = propertyValueMap?.get(Property.COLUMN_NUMBER)?.longValue() return "at line ${lineNo ?: UNKNOWN}, column ${columnNo ?: UNKNOWN}" } diff --git a/lang/src/org/partiql/lang/ast/AggregateCallSiteListMeta.kt b/lang/src/org/partiql/lang/ast/AggregateCallSiteListMeta.kt index b302897c38..11b1ed37b4 100644 --- a/lang/src/org/partiql/lang/ast/AggregateCallSiteListMeta.kt +++ b/lang/src/org/partiql/lang/ast/AggregateCallSiteListMeta.kt @@ -14,7 +14,7 @@ package org.partiql.lang.ast -import com.amazon.ion.* +import com.amazon.ion.IonWriter import org.partiql.lang.domains.PartiqlAst /** diff --git a/lang/src/org/partiql/lang/ast/AstDeserialization.kt b/lang/src/org/partiql/lang/ast/AstDeserialization.kt index e0aee57484..2fee0f6773 100644 --- a/lang/src/org/partiql/lang/ast/AstDeserialization.kt +++ b/lang/src/org/partiql/lang/ast/AstDeserialization.kt @@ -14,8 +14,23 @@ package org.partiql.lang.ast -import com.amazon.ion.* -import org.partiql.lang.util.* +import com.amazon.ion.IonSexp +import com.amazon.ion.IonSymbol +import com.amazon.ion.IonSystem +import com.amazon.ion.IonValue +import org.partiql.lang.util.args +import org.partiql.lang.util.arity +import org.partiql.lang.util.asIonInt +import org.partiql.lang.util.asIonSexp +import org.partiql.lang.util.asIonStruct +import org.partiql.lang.util.asIonSymbol +import org.partiql.lang.util.checkThreadInterrupted +import org.partiql.lang.util.field +import org.partiql.lang.util.longValue +import org.partiql.lang.util.singleArgWithTag +import org.partiql.lang.util.stringValue +import org.partiql.lang.util.tagText +import org.partiql.lang.util.toListOfIonSexp /** * Deserializes an s-expression based AST. diff --git a/lang/src/org/partiql/lang/ast/InternalMetas.kt b/lang/src/org/partiql/lang/ast/InternalMetas.kt index 1e8e5bd789..4e394a013b 100644 --- a/lang/src/org/partiql/lang/ast/InternalMetas.kt +++ b/lang/src/org/partiql/lang/ast/InternalMetas.kt @@ -14,7 +14,7 @@ package org.partiql.lang.ast -import com.amazon.ion.* +import com.amazon.ion.IonWriter /** * Base class for [Meta] implementations which are used internally by [org.partiql.lang.eval.EvaluatingCompiler] diff --git a/lang/src/org/partiql/lang/ast/IsImplictJoinMeta.kt b/lang/src/org/partiql/lang/ast/IsImplictJoinMeta.kt index 5a0cf6425f..2d6e59df39 100644 --- a/lang/src/org/partiql/lang/ast/IsImplictJoinMeta.kt +++ b/lang/src/org/partiql/lang/ast/IsImplictJoinMeta.kt @@ -17,9 +17,6 @@ */ package org.partiql.lang.ast -import com.amazon.ion.* -import org.partiql.lang.ast.* - /** * Meta node intended to be attached to an instance of [FromSourcedJoin] to indicate that no * join condition was specified in the original query and therefore this is an implicit join. diff --git a/lang/src/org/partiql/lang/ast/IsIonLiteralMeta.kt b/lang/src/org/partiql/lang/ast/IsIonLiteralMeta.kt index 97e81efa8a..5b91be7f31 100644 --- a/lang/src/org/partiql/lang/ast/IsIonLiteralMeta.kt +++ b/lang/src/org/partiql/lang/ast/IsIonLiteralMeta.kt @@ -17,9 +17,6 @@ */ package org.partiql.lang.ast -import com.amazon.ion.* -import org.partiql.lang.ast.* - /** * Meta node intended to be attached to an instance of [Literal] to indicate that it was * designated as an `ionLiteral` in the parsed statement. diff --git a/lang/src/org/partiql/lang/ast/MemoizedMetaDeserializer.kt b/lang/src/org/partiql/lang/ast/MemoizedMetaDeserializer.kt index 1c888fbfcd..20f11294d4 100644 --- a/lang/src/org/partiql/lang/ast/MemoizedMetaDeserializer.kt +++ b/lang/src/org/partiql/lang/ast/MemoizedMetaDeserializer.kt @@ -14,7 +14,7 @@ package org.partiql.lang.ast -import com.amazon.ion.* +import com.amazon.ion.IonSexp /** * Provides a common way to "deserialize" a memoized meta instance. diff --git a/lang/src/org/partiql/lang/ast/SourceLocationMeta.kt b/lang/src/org/partiql/lang/ast/SourceLocationMeta.kt index 1d4bce7912..5645123a9c 100644 --- a/lang/src/org/partiql/lang/ast/SourceLocationMeta.kt +++ b/lang/src/org/partiql/lang/ast/SourceLocationMeta.kt @@ -14,9 +14,13 @@ package org.partiql.lang.ast -import com.amazon.ion.* +import com.amazon.ion.IonSexp +import com.amazon.ion.IonWriter import com.amazon.ionelement.api.metaOrNull -import org.partiql.lang.util.* +import org.partiql.lang.util.IonWriterContext +import org.partiql.lang.util.asIonStruct +import org.partiql.lang.util.field +import org.partiql.lang.util.longValue /** * Represents a specific location within a source file. diff --git a/lang/src/org/partiql/lang/ast/StatementToExprNode.kt b/lang/src/org/partiql/lang/ast/StatementToExprNode.kt index 445d5a488f..274e9d9d60 100644 --- a/lang/src/org/partiql/lang/ast/StatementToExprNode.kt +++ b/lang/src/org/partiql/lang/ast/StatementToExprNode.kt @@ -5,36 +5,24 @@ package org.partiql.lang.ast import com.amazon.ion.IonSystem import com.amazon.ionelement.api.toIonValue import org.partiql.lang.domains.PartiqlAst -import org.partiql.lang.domains.PartiqlAst.* import org.partiql.lang.util.checkThreadInterrupted - import org.partiql.pig.runtime.SymbolPrimitive -import org.partiql.lang.ast.SetQuantifier as ExprNodeSetQuantifier // Conflicts with PartiqlAst.SetQuantifier -import org.partiql.lang.ast.ReturningMapping as ExprNodeReturningMapping // Conflicts with PartiqlAst.ReturningMapping - -// Note that IntelliJ believes the next 3 aliases are unused without the @file:Suppress("UnusedImport") above, -// however they are actually preventing naming collisions between their ExprNode and PartiqlAst counterparts so don't -// remove them! -import org.partiql.lang.ast.CaseSensitivity as ExprNodeCaseSensitivity // Conflicts with PartiqlAst.CaseSensitivity -import org.partiql.lang.ast.ScopeQualifier as ExprNodeScopeQualifier // Conflicts with PartiqlAst.ScopeQualifier -import org.partiql.lang.ast.GroupingStrategy as ExprNodeGroupingStrategy // Conflicts with PartiqlAst.GroupingStrategy - internal typealias PartiQlMetaContainer = org.partiql.lang.ast.MetaContainer internal typealias IonElementMetaContainer = com.amazon.ionelement.api.MetaContainer /** Converts a [partiql_ast.statement] to an [ExprNode], preserving all metas where possible. */ -fun Statement.toExprNode(ion: IonSystem): ExprNode = +fun PartiqlAst.Statement.toExprNode(ion: IonSystem): ExprNode = StatementTransformer(ion).transform(this) -internal fun Expr.toExprNode(ion: IonSystem): ExprNode { +internal fun PartiqlAst.Expr.toExprNode(ion: IonSystem): ExprNode { return StatementTransformer(ion).transform(this) } -internal fun SetQuantifier.toExprNodeSetQuantifier(): ExprNodeSetQuantifier = +internal fun PartiqlAst.SetQuantifier.toExprNodeSetQuantifier(): SetQuantifier = when (this) { - is SetQuantifier.All -> ExprNodeSetQuantifier.ALL - is SetQuantifier.Distinct -> ExprNodeSetQuantifier.DISTINCT + is PartiqlAst.SetQuantifier.All -> SetQuantifier.ALL + is PartiqlAst.SetQuantifier.Distinct -> SetQuantifier.DISTINCT } internal fun com.amazon.ionelement.api.MetaContainer.toPartiQlMetaContainer(): PartiQlMetaContainer { @@ -46,181 +34,181 @@ internal fun com.amazon.ionelement.api.MetaContainer.toPartiQlMetaContainer(): P partiQlMeta } - return org.partiql.lang.ast.metaContainerOf(nonLocationMetas) + return metaContainerOf(nonLocationMetas) } private class StatementTransformer(val ion: IonSystem) { - fun transform(stmt: Statement): ExprNode = + fun transform(stmt: PartiqlAst.Statement): ExprNode = when (stmt) { - is Statement.Query -> stmt.toExprNode() - is Statement.Dml -> stmt.toExprNode() - is Statement.Ddl -> stmt.toExprNode() - is Statement.Exec -> stmt.toExprNode() + is PartiqlAst.Statement.Query -> stmt.toExprNode() + is PartiqlAst.Statement.Dml -> stmt.toExprNode() + is PartiqlAst.Statement.Ddl -> stmt.toExprNode() + is PartiqlAst.Statement.Exec -> stmt.toExprNode() } - fun transform(stmt: Expr): ExprNode = + fun transform(stmt: PartiqlAst.Expr): ExprNode = stmt.toExprNode() - private fun Statement.Query.toExprNode(): ExprNode { + private fun PartiqlAst.Statement.Query.toExprNode(): ExprNode { return this.expr.toExprNode() } - private fun List.toExprNodeList(): List = + private fun List.toExprNodeList(): List = this.map { it.toExprNode() } - private fun Expr.toExprNode(): ExprNode { + private fun PartiqlAst.Expr.toExprNode(): ExprNode { checkThreadInterrupted() val metas = this.metas.toPartiQlMetaContainer() return when (this) { - is Expr.Missing -> LiteralMissing(metas) + is PartiqlAst.Expr.Missing -> LiteralMissing(metas) // https://github.com/amzn/ion-element-kotlin/issues/35, .asAnyElement() is unfortunately needed for now - is Expr.Lit -> Literal(value.asAnyElement().toIonValue(ion), metas) - is Expr.Id -> VariableReference(name.text, case.toCaseSensitivity(), qualifier.toScopeQualifier(), metas) - is Expr.Parameter -> Parameter(index.value.toInt(), metas) - is Expr.Not -> NAry(NAryOp.NOT, listOf(expr.toExprNode()), metas) - is Expr.Pos -> expr.toExprNode() - is Expr.Neg -> NAry(NAryOp.SUB, listOf(expr.toExprNode()), metas) - is Expr.Plus -> NAry(NAryOp.ADD, operands.toExprNodeList(), metas) - is Expr.Minus -> NAry(NAryOp.SUB, operands.toExprNodeList(), metas) - is Expr.Times -> NAry(NAryOp.MUL, operands.toExprNodeList(), metas) - is Expr.Divide -> NAry(NAryOp.DIV, operands.toExprNodeList(), metas) - is Expr.Modulo -> NAry(NAryOp.MOD, operands.toExprNodeList(), metas) - is Expr.Concat -> NAry(NAryOp.STRING_CONCAT, operands.toExprNodeList(), metas) - is Expr.And -> NAry(NAryOp.AND, operands.toExprNodeList(), metas) - is Expr.Or -> NAry(NAryOp.OR, operands.toExprNodeList(), metas) - is Expr.Eq -> NAry(NAryOp.EQ, operands.toExprNodeList(), metas) - is Expr.Ne -> NAry(NAryOp.NE, operands.toExprNodeList(), metas) - is Expr.Gt -> NAry(NAryOp.GT, operands.toExprNodeList(), metas) - is Expr.Gte -> NAry(NAryOp.GTE, operands.toExprNodeList(), metas) - is Expr.Lt -> NAry(NAryOp.LT, operands.toExprNodeList(), metas) - is Expr.Lte -> NAry(NAryOp.LTE, operands.toExprNodeList(), metas) - - is Expr.Union -> + is PartiqlAst.Expr.Lit -> Literal(value.asAnyElement().toIonValue(ion), metas) + is PartiqlAst.Expr.Id -> VariableReference(name.text, case.toCaseSensitivity(), qualifier.toScopeQualifier(), metas) + is PartiqlAst.Expr.Parameter -> Parameter(index.value.toInt(), metas) + is PartiqlAst.Expr.Not -> NAry(NAryOp.NOT, listOf(expr.toExprNode()), metas) + is PartiqlAst.Expr.Pos -> expr.toExprNode() + is PartiqlAst.Expr.Neg -> NAry(NAryOp.SUB, listOf(expr.toExprNode()), metas) + is PartiqlAst.Expr.Plus -> NAry(NAryOp.ADD, operands.toExprNodeList(), metas) + is PartiqlAst.Expr.Minus -> NAry(NAryOp.SUB, operands.toExprNodeList(), metas) + is PartiqlAst.Expr.Times -> NAry(NAryOp.MUL, operands.toExprNodeList(), metas) + is PartiqlAst.Expr.Divide -> NAry(NAryOp.DIV, operands.toExprNodeList(), metas) + is PartiqlAst.Expr.Modulo -> NAry(NAryOp.MOD, operands.toExprNodeList(), metas) + is PartiqlAst.Expr.Concat -> NAry(NAryOp.STRING_CONCAT, operands.toExprNodeList(), metas) + is PartiqlAst.Expr.And -> NAry(NAryOp.AND, operands.toExprNodeList(), metas) + is PartiqlAst.Expr.Or -> NAry(NAryOp.OR, operands.toExprNodeList(), metas) + is PartiqlAst.Expr.Eq -> NAry(NAryOp.EQ, operands.toExprNodeList(), metas) + is PartiqlAst.Expr.Ne -> NAry(NAryOp.NE, operands.toExprNodeList(), metas) + is PartiqlAst.Expr.Gt -> NAry(NAryOp.GT, operands.toExprNodeList(), metas) + is PartiqlAst.Expr.Gte -> NAry(NAryOp.GTE, operands.toExprNodeList(), metas) + is PartiqlAst.Expr.Lt -> NAry(NAryOp.LT, operands.toExprNodeList(), metas) + is PartiqlAst.Expr.Lte -> NAry(NAryOp.LTE, operands.toExprNodeList(), metas) + + is PartiqlAst.Expr.Union -> NAry( - when(setq) { - is SetQuantifier.Distinct -> NAryOp.UNION - is SetQuantifier.All -> NAryOp.UNION_ALL - }, - operands.toExprNodeList(), - metas) - is Expr.Intersect -> + when(setq) { + is PartiqlAst.SetQuantifier.Distinct -> NAryOp.UNION + is PartiqlAst.SetQuantifier.All -> NAryOp.UNION_ALL + }, + operands.toExprNodeList(), + metas) + is PartiqlAst.Expr.Intersect -> NAry( - when(setq) { - is SetQuantifier.Distinct -> NAryOp.INTERSECT - is SetQuantifier.All -> NAryOp.INTERSECT_ALL - }, - operands.toExprNodeList(), - metas) - is Expr.Except -> + when(setq) { + is PartiqlAst.SetQuantifier.Distinct -> NAryOp.INTERSECT + is PartiqlAst.SetQuantifier.All -> NAryOp.INTERSECT_ALL + }, + operands.toExprNodeList(), + metas) + is PartiqlAst.Expr.Except -> NAry( - when(setq) { - is SetQuantifier.Distinct -> NAryOp.EXCEPT - is SetQuantifier.All -> NAryOp.EXCEPT_ALL - }, - operands.toExprNodeList(), - metas) + when(setq) { + is PartiqlAst.SetQuantifier.Distinct -> NAryOp.EXCEPT + is PartiqlAst.SetQuantifier.All -> NAryOp.EXCEPT_ALL + }, + operands.toExprNodeList(), + metas) - is Expr.Like -> NAry(NAryOp.LIKE, listOfNotNull(value.toExprNode(), pattern.toExprNode(), escape?.toExprNode()), metas) - is Expr.Between -> NAry(NAryOp.BETWEEN, listOf(value.toExprNode(), from.toExprNode(), to.toExprNode()), metas) - is Expr.InCollection -> NAry(NAryOp.IN, operands.toExprNodeList(), metas) - is Expr.IsType -> Typed(TypedOp.IS, value.toExprNode(), type.toExprNodeType(), metas) - is Expr.Cast -> Typed(TypedOp.CAST, value.toExprNode(), asType.toExprNodeType(), metas) + is PartiqlAst.Expr.Like -> NAry(NAryOp.LIKE, listOfNotNull(value.toExprNode(), pattern.toExprNode(), escape?.toExprNode()), metas) + is PartiqlAst.Expr.Between -> NAry(NAryOp.BETWEEN, listOf(value.toExprNode(), from.toExprNode(), to.toExprNode()), metas) + is PartiqlAst.Expr.InCollection -> NAry(NAryOp.IN, operands.toExprNodeList(), metas) + is PartiqlAst.Expr.IsType -> Typed(TypedOp.IS, value.toExprNode(), type.toExprNodeType(), metas) + is PartiqlAst.Expr.Cast -> Typed(TypedOp.CAST, value.toExprNode(), asType.toExprNodeType(), metas) - is Expr.SimpleCase -> + is PartiqlAst.Expr.SimpleCase -> SimpleCase( - expr.toExprNode(), - cases.pairs.map { SimpleCaseWhen(it.first.toExprNode(), it.second.toExprNode()) }, - default?.toExprNode(), - metas) - is Expr.SearchedCase -> + expr.toExprNode(), + cases.pairs.map { SimpleCaseWhen(it.first.toExprNode(), it.second.toExprNode()) }, + default?.toExprNode(), + metas) + is PartiqlAst.Expr.SearchedCase -> SearchedCase( - cases.pairs.map { SearchedCaseWhen(it.first.toExprNode(), it.second.toExprNode()) }, - this.default?.toExprNode(), - metas) - is Expr.Struct -> Struct(this.fields.map { StructField(it.first.toExprNode(), it.second.toExprNode()) }, metas) - is Expr.Bag -> Seq(SeqType.BAG, values.toExprNodeList(), metas) - is Expr.List -> Seq(SeqType.LIST, values.toExprNodeList(), metas) - is Expr.Sexp -> Seq(SeqType.SEXP, values.toExprNodeList(), metas) - is Expr.Path -> + cases.pairs.map { SearchedCaseWhen(it.first.toExprNode(), it.second.toExprNode()) }, + this.default?.toExprNode(), + metas) + is PartiqlAst.Expr.Struct -> Struct(this.fields.map { StructField(it.first.toExprNode(), it.second.toExprNode()) }, metas) + is PartiqlAst.Expr.Bag -> Seq(SeqType.BAG, values.toExprNodeList(), metas) + is PartiqlAst.Expr.List -> Seq(SeqType.LIST, values.toExprNodeList(), metas) + is PartiqlAst.Expr.Sexp -> Seq(SeqType.SEXP, values.toExprNodeList(), metas) + is PartiqlAst.Expr.Path -> Path( - root.toExprNode(), - steps.map { - val componentMetas = it.metas.toPartiQlMetaContainer() - when (it) { - is PathStep.PathExpr -> - PathComponentExpr( - it.index.toExprNode(), - it.case.toCaseSensitivity()) - is PathStep.PathUnpivot -> PathComponentUnpivot(componentMetas) - is PathStep.PathWildcard -> PathComponentWildcard(componentMetas) - } - }, - metas) - is Expr.Call -> + root.toExprNode(), + steps.map { + val componentMetas = it.metas.toPartiQlMetaContainer() + when (it) { + is PartiqlAst.PathStep.PathExpr -> + PathComponentExpr( + it.index.toExprNode(), + it.case.toCaseSensitivity()) + is PartiqlAst.PathStep.PathUnpivot -> PathComponentUnpivot(componentMetas) + is PartiqlAst.PathStep.PathWildcard -> PathComponentWildcard(componentMetas) + } + }, + metas) + is PartiqlAst.Expr.Call -> NAry( - NAryOp.CALL, - listOf( - VariableReference( - funcName.text, - org.partiql.lang.ast.CaseSensitivity.INSENSITIVE, - org.partiql.lang.ast.ScopeQualifier.UNQUALIFIED, - emptyMetaContainer) - ) + args.map { it.toExprNode() }, - metas) - is Expr.CallAgg -> + NAryOp.CALL, + listOf( + VariableReference( + funcName.text, + CaseSensitivity.INSENSITIVE, + ScopeQualifier.UNQUALIFIED, + emptyMetaContainer) + ) + args.map { it.toExprNode() }, + metas) + is PartiqlAst.Expr.CallAgg -> CallAgg( - VariableReference( - funcName.text, - org.partiql.lang.ast.CaseSensitivity.INSENSITIVE, - org.partiql.lang.ast.ScopeQualifier.UNQUALIFIED, - funcName.metas.toPartiQlMetaContainer()), - setq.toSetQuantifier(), - arg.toExprNode(), - metas) - is Expr.Select -> + VariableReference( + funcName.text, + CaseSensitivity.INSENSITIVE, + ScopeQualifier.UNQUALIFIED, + funcName.metas.toPartiQlMetaContainer()), + setq.toSetQuantifier(), + arg.toExprNode(), + metas) + is PartiqlAst.Expr.Select -> Select( - setQuantifier = setq?.toSetQuantifier() ?: ExprNodeSetQuantifier.ALL, - projection = project.toSelectProjection(), - from = from.toFromSource(), - fromLet = fromLet?.toLetSource(), - where = where?.toExprNode(), - groupBy = group?.toGroupBy(), - having = having?.toExprNode(), - orderBy = order?.toOrderBy(), - limit = limit?.toExprNode(), - offset = offset?.toExprNode(), - metas = metas - ) - is Expr.Date -> + setQuantifier = setq?.toSetQuantifier() ?: SetQuantifier.ALL, + projection = project.toSelectProjection(), + from = from.toFromSource(), + fromLet = fromLet?.toLetSource(), + where = where?.toExprNode(), + groupBy = group?.toGroupBy(), + having = having?.toExprNode(), + orderBy = order?.toOrderBy(), + limit = limit?.toExprNode(), + offset = offset?.toExprNode(), + metas = metas + ) + is PartiqlAst.Expr.Date -> DateTimeType.Date(year.value.toInt(), month.value.toInt(), day.value.toInt(), metas) - is Expr.LitTime -> + is PartiqlAst.Expr.LitTime -> DateTimeType.Time( - value.hour.value.toInt(), - value.minute.value.toInt(), - value.second.value.toInt(), - value.nano.value.toInt(), - value.precision.value.toInt(), - value.withTimeZone.value, - value.tzMinutes?.value?.toInt(), - metas + value.hour.value.toInt(), + value.minute.value.toInt(), + value.second.value.toInt(), + value.nano.value.toInt(), + value.precision.value.toInt(), + value.withTimeZone.value, + value.tzMinutes?.value?.toInt(), + metas ) } } - private fun Projection.toSelectProjection(): SelectProjection { + private fun PartiqlAst.Projection.toSelectProjection(): SelectProjection { val metas = this.metas.toPartiQlMetaContainer() return when (this) { - is Projection.ProjectStar -> SelectProjectionList(listOf(SelectListItemStar(metas))) - is Projection.ProjectValue -> SelectProjectionValue(this.value.toExprNode()) - is Projection.ProjectPivot -> SelectProjectionPivot(this.value.toExprNode(), this.key.toExprNode()) - is Projection.ProjectList -> + is PartiqlAst.Projection.ProjectStar -> SelectProjectionList(listOf(SelectListItemStar(metas))) + is PartiqlAst.Projection.ProjectValue -> SelectProjectionValue(this.value.toExprNode()) + is PartiqlAst.Projection.ProjectPivot -> SelectProjectionPivot(this.value.toExprNode(), this.key.toExprNode()) + is PartiqlAst.Projection.ProjectList -> SelectProjectionList( this.projectItems.map { when (it) { - is ProjectItem.ProjectAll -> SelectListItemProjectAll(it.expr.toExprNode()) - is ProjectItem.ProjectExpr -> + is PartiqlAst.ProjectItem.ProjectAll -> SelectListItemProjectAll(it.expr.toExprNode()) + is PartiqlAst.ProjectItem.ProjectExpr -> SelectListItemExpr( it.expr.toExprNode(), it.asAlias?.toSymbolicName()) @@ -257,15 +245,15 @@ private class StatementTransformer(val ion: IonSystem) { } } - private fun JoinType.toJoinOp(): JoinOp = + private fun PartiqlAst.JoinType.toJoinOp(): JoinOp = when (this) { - is JoinType.Inner -> JoinOp.INNER - is JoinType.Left -> JoinOp.LEFT - is JoinType.Right -> JoinOp.RIGHT - is JoinType.Full -> JoinOp.OUTER + is PartiqlAst.JoinType.Inner -> JoinOp.INNER + is PartiqlAst.JoinType.Left -> JoinOp.LEFT + is PartiqlAst.JoinType.Right -> JoinOp.RIGHT + is PartiqlAst.JoinType.Full -> JoinOp.OUTER } - private fun Let.toLetSource(): LetSource { + private fun PartiqlAst.Let.toLetSource(): LetSource { return LetSource( this.letBindings.map { LetBinding( @@ -291,7 +279,7 @@ private class StatementTransformer(val ion: IonSystem) { else -> OrderingSpec.ASC } - private fun PartiqlAst.GroupBy.toGroupBy(): org.partiql.lang.ast.GroupBy = + private fun PartiqlAst.GroupBy.toGroupBy(): GroupBy = GroupBy( grouping = strategy.toGroupingStrategy(), groupByItems = keyList.keys.map { @@ -301,60 +289,60 @@ private class StatementTransformer(val ion: IonSystem) { }, groupName = groupAsAlias?.toSymbolicName()) - private fun GroupingStrategy.toGroupingStrategy(): org.partiql.lang.ast.GroupingStrategy = + private fun PartiqlAst.GroupingStrategy.toGroupingStrategy(): GroupingStrategy = when(this) { - is GroupingStrategy.GroupFull-> org.partiql.lang.ast.GroupingStrategy.FULL - is GroupingStrategy.GroupPartial -> org.partiql.lang.ast.GroupingStrategy.PARTIAL + is PartiqlAst.GroupingStrategy.GroupFull-> GroupingStrategy.FULL + is PartiqlAst.GroupingStrategy.GroupPartial -> GroupingStrategy.PARTIAL } - private fun Type.toExprNodeType(): DataType { + private fun PartiqlAst.Type.toExprNodeType(): DataType { val metas = this.metas.toPartiQlMetaContainer() return when (this) { - is Type.NullType -> DataType(SqlDataType.NULL, listOf(), metas) - is Type.MissingType -> DataType(SqlDataType.MISSING, listOf(), metas) - is Type.BooleanType -> DataType(SqlDataType.BOOLEAN, listOf(), metas) - is Type.IntegerType -> DataType(SqlDataType.INTEGER, listOf(), metas) - is Type.SmallintType -> DataType(SqlDataType.SMALLINT, listOf(), metas) - is Type.FloatType -> DataType(SqlDataType.FLOAT, listOfNotNull(precision?.value), metas) - is Type.RealType -> DataType(SqlDataType.REAL, listOf(), metas) - is Type.DoublePrecisionType -> DataType(SqlDataType.DOUBLE_PRECISION, listOf(), metas) - is Type.DecimalType -> DataType(SqlDataType.DECIMAL, listOfNotNull(precision?.value, scale?.value), metas) - is Type.NumericType -> DataType(SqlDataType.NUMERIC, listOfNotNull(precision?.value, scale?.value), metas) - is Type.TimestampType -> DataType(SqlDataType.TIMESTAMP, listOf(), metas) - is Type.CharacterType -> DataType(SqlDataType.CHARACTER, listOfNotNull(length?.value), metas) - is Type.CharacterVaryingType -> DataType(SqlDataType.CHARACTER_VARYING, listOfNotNull(length?.value), metas) - is Type.StringType -> DataType(SqlDataType.STRING, listOf(), metas) - is Type.SymbolType -> DataType(SqlDataType.SYMBOL, listOf(), metas) - is Type.BlobType -> DataType(SqlDataType.BLOB, listOf(), metas) - is Type.ClobType -> DataType(SqlDataType.CLOB, listOf(), metas) - is Type.StructType -> DataType(SqlDataType.STRUCT, listOf(), metas) - is Type.TupleType -> DataType(SqlDataType.TUPLE, listOf(), metas) - is Type.ListType -> DataType(SqlDataType.LIST, listOf(), metas) - is Type.SexpType -> DataType(SqlDataType.SEXP, listOf(), metas) - is Type.BagType -> DataType(SqlDataType.BAG, listOf(), metas) - is Type.DateType -> DataType(SqlDataType.DATE, listOf(), metas) - is Type.TimeType -> DataType(SqlDataType.TIME, listOfNotNull(precision?.value), metas) - is Type.TimeWithTimeZoneType -> DataType(SqlDataType.TIME_WITH_TIME_ZONE, listOfNotNull(precision?.value), metas) + is PartiqlAst.Type.NullType -> DataType(SqlDataType.NULL, listOf(), metas) + is PartiqlAst.Type.MissingType -> DataType(SqlDataType.MISSING, listOf(), metas) + is PartiqlAst.Type.BooleanType -> DataType(SqlDataType.BOOLEAN, listOf(), metas) + is PartiqlAst.Type.IntegerType -> DataType(SqlDataType.INTEGER, listOf(), metas) + is PartiqlAst.Type.SmallintType -> DataType(SqlDataType.SMALLINT, listOf(), metas) + is PartiqlAst.Type.FloatType -> DataType(SqlDataType.FLOAT, listOfNotNull(precision?.value), metas) + is PartiqlAst.Type.RealType -> DataType(SqlDataType.REAL, listOf(), metas) + is PartiqlAst.Type.DoublePrecisionType -> DataType(SqlDataType.DOUBLE_PRECISION, listOf(), metas) + is PartiqlAst.Type.DecimalType -> DataType(SqlDataType.DECIMAL, listOfNotNull(precision?.value, scale?.value), metas) + is PartiqlAst.Type.NumericType -> DataType(SqlDataType.NUMERIC, listOfNotNull(precision?.value, scale?.value), metas) + is PartiqlAst.Type.TimestampType -> DataType(SqlDataType.TIMESTAMP, listOf(), metas) + is PartiqlAst.Type.CharacterType -> DataType(SqlDataType.CHARACTER, listOfNotNull(length?.value), metas) + is PartiqlAst.Type.CharacterVaryingType -> DataType(SqlDataType.CHARACTER_VARYING, listOfNotNull(length?.value), metas) + is PartiqlAst.Type.StringType -> DataType(SqlDataType.STRING, listOf(), metas) + is PartiqlAst.Type.SymbolType -> DataType(SqlDataType.SYMBOL, listOf(), metas) + is PartiqlAst.Type.BlobType -> DataType(SqlDataType.BLOB, listOf(), metas) + is PartiqlAst.Type.ClobType -> DataType(SqlDataType.CLOB, listOf(), metas) + is PartiqlAst.Type.StructType -> DataType(SqlDataType.STRUCT, listOf(), metas) + is PartiqlAst.Type.TupleType -> DataType(SqlDataType.TUPLE, listOf(), metas) + is PartiqlAst.Type.ListType -> DataType(SqlDataType.LIST, listOf(), metas) + is PartiqlAst.Type.SexpType -> DataType(SqlDataType.SEXP, listOf(), metas) + is PartiqlAst.Type.BagType -> DataType(SqlDataType.BAG, listOf(), metas) + is PartiqlAst.Type.DateType -> DataType(SqlDataType.DATE, listOf(), metas) + is PartiqlAst.Type.TimeType -> DataType(SqlDataType.TIME, listOfNotNull(precision?.value), metas) + is PartiqlAst.Type.TimeWithTimeZoneType -> DataType(SqlDataType.TIME_WITH_TIME_ZONE, listOfNotNull(precision?.value), metas) } } - private fun PartiqlAst.SetQuantifier.toSetQuantifier(): ExprNodeSetQuantifier = + private fun PartiqlAst.SetQuantifier.toSetQuantifier(): SetQuantifier = when (this) { - is PartiqlAst.SetQuantifier.All -> ExprNodeSetQuantifier.ALL - is PartiqlAst.SetQuantifier.Distinct -> ExprNodeSetQuantifier.DISTINCT + is PartiqlAst.SetQuantifier.All -> SetQuantifier.ALL + is PartiqlAst.SetQuantifier.Distinct -> SetQuantifier.DISTINCT } - private fun ScopeQualifier.toScopeQualifier(): org.partiql.lang.ast.ScopeQualifier = + private fun PartiqlAst.ScopeQualifier.toScopeQualifier(): ScopeQualifier = when (this) { - is ScopeQualifier.Unqualified -> org.partiql.lang.ast.ScopeQualifier.UNQUALIFIED - is ScopeQualifier.LocalsFirst -> org.partiql.lang.ast.ScopeQualifier.LEXICAL + is PartiqlAst.ScopeQualifier.Unqualified -> ScopeQualifier.UNQUALIFIED + is PartiqlAst.ScopeQualifier.LocalsFirst -> ScopeQualifier.LEXICAL } - private fun CaseSensitivity.toCaseSensitivity(): org.partiql.lang.ast.CaseSensitivity = + private fun PartiqlAst.CaseSensitivity.toCaseSensitivity(): CaseSensitivity = when (this) { - is CaseSensitivity.CaseSensitive -> org.partiql.lang.ast.CaseSensitivity.SENSITIVE - is CaseSensitivity.CaseInsensitive -> org.partiql.lang.ast.CaseSensitivity.INSENSITIVE + is PartiqlAst.CaseSensitivity.CaseSensitive -> CaseSensitivity.SENSITIVE + is PartiqlAst.CaseSensitivity.CaseInsensitive -> CaseSensitivity.INSENSITIVE } private fun PartiqlAst.OnConflict.toOnConflictNode(): OnConflict { @@ -416,20 +404,20 @@ private class StatementTransformer(val ion: IonSystem) { } } - private fun ReturningMapping.toExprNodeReturningMapping(): ExprNodeReturningMapping = + private fun PartiqlAst.ReturningMapping.toExprNodeReturningMapping(): ReturningMapping = when(this) { - is ReturningMapping.ModifiedOld -> ExprNodeReturningMapping.MODIFIED_OLD - is ReturningMapping.ModifiedNew -> ExprNodeReturningMapping.MODIFIED_NEW - is ReturningMapping.AllOld -> ExprNodeReturningMapping.ALL_OLD - is ReturningMapping.AllNew -> ExprNodeReturningMapping.ALL_NEW + is PartiqlAst.ReturningMapping.ModifiedOld -> ReturningMapping.MODIFIED_OLD + is PartiqlAst.ReturningMapping.ModifiedNew -> ReturningMapping.MODIFIED_NEW + is PartiqlAst.ReturningMapping.AllOld -> ReturningMapping.ALL_OLD + is PartiqlAst.ReturningMapping.AllNew -> ReturningMapping.ALL_NEW } - private fun Statement.Ddl.toExprNode(): ExprNode { + private fun PartiqlAst.Statement.Ddl.toExprNode(): ExprNode { val op = this.op val metas = this.metas.toPartiQlMetaContainer() return when(op) { - is DdlOp.CreateTable -> CreateTable(op.tableName.text, metas) - is DdlOp.DropTable -> + is PartiqlAst.DdlOp.CreateTable -> CreateTable(op.tableName.text, metas) + is PartiqlAst.DdlOp.DropTable -> DropTable( tableId = Identifier( id = op.tableName.name.text, @@ -437,7 +425,7 @@ private class StatementTransformer(val ion: IonSystem) { metas = emptyMetaContainer ), metas = metas) - is DdlOp.CreateIndex -> + is PartiqlAst.DdlOp.CreateIndex -> CreateIndex( tableId = Identifier( id = op.indexName.name.text, @@ -446,7 +434,7 @@ private class StatementTransformer(val ion: IonSystem) { ), keys = op.fields.map { it.toExprNode() }, metas = metas) - is DdlOp.DropIndex -> + is PartiqlAst.DdlOp.DropIndex -> DropIndex( tableId = Identifier( id = op.table.name.text, @@ -462,7 +450,7 @@ private class StatementTransformer(val ion: IonSystem) { } } - private fun Statement.Exec.toExprNode(): ExprNode { + private fun PartiqlAst.Statement.Exec.toExprNode(): ExprNode { return Exec(procedureName.toSymbolicName(), this.args.toExprNodeList(), metas.toPartiQlMetaContainer()) } } diff --git a/lang/src/org/partiql/lang/ast/StaticTypeMeta.kt b/lang/src/org/partiql/lang/ast/StaticTypeMeta.kt index 4d89f8b5d1..32d84a9ce9 100644 --- a/lang/src/org/partiql/lang/ast/StaticTypeMeta.kt +++ b/lang/src/org/partiql/lang/ast/StaticTypeMeta.kt @@ -1,10 +1,12 @@ package org.partiql.lang.ast -import com.amazon.ion.* -import org.partiql.lang.eval.ExprValue -import org.partiql.lang.eval.ExprValueType -import org.partiql.lang.types.* -import org.partiql.lang.util.* +import com.amazon.ion.IonSexp +import com.amazon.ion.IonWriter +import org.partiql.lang.types.StaticType +import org.partiql.lang.util.IonWriterContext +import org.partiql.lang.util.asIonStruct +import org.partiql.lang.util.field +import org.partiql.lang.util.stringValue /** * Represents a static type for an AST element. diff --git a/lang/src/org/partiql/lang/ast/Util.kt b/lang/src/org/partiql/lang/ast/Util.kt index 8fdcf0806b..aec028b67d 100644 --- a/lang/src/org/partiql/lang/ast/Util.kt +++ b/lang/src/org/partiql/lang/ast/Util.kt @@ -15,9 +15,9 @@ package org.partiql.lang.ast -import com.amazon.ion.* -import org.partiql.lang.ast.SetQuantifier.* -import org.partiql.lang.errors.* +import com.amazon.ion.IonSystem +import org.partiql.lang.errors.Property +import org.partiql.lang.errors.PropertyValueMap fun PropertyValueMap.addSourceLocation(metas: MetaContainer): PropertyValueMap { (metas.find(SourceLocationMeta.TAG) as? SourceLocationMeta)?.let { @@ -44,7 +44,7 @@ fun createCountStar(ion: IonSystem, metas: MetaContainer): CallAgg { case = CaseSensitivity.INSENSITIVE, scopeQualifier = ScopeQualifier.UNQUALIFIED, metas = srcLocationMetaOnly), - setQuantifier = ALL, + setQuantifier = SetQuantifier.ALL, arg = Literal(ion.newInt(1), srcLocationMetaOnly), metas = metas.add(IsCountStarMeta.instance) ) diff --git a/lang/src/org/partiql/lang/ast/ast.kt b/lang/src/org/partiql/lang/ast/ast.kt index dd4540eae1..03ffbbc083 100644 --- a/lang/src/org/partiql/lang/ast/ast.kt +++ b/lang/src/org/partiql/lang/ast/ast.kt @@ -14,9 +14,11 @@ package org.partiql.lang.ast -import com.amazon.ion.* -import org.partiql.lang.util.* -import java.util.* +import com.amazon.ion.IonType +import com.amazon.ion.IonValue +import org.partiql.lang.util.interruptibleMap +import org.partiql.lang.util.stringValue +import java.util.Arrays /** * Base type for all AST nodes. @@ -139,8 +141,8 @@ sealed class ExprNode : AstNode(), HasMetas { /** Represents a literal value. */ data class Literal( - val ionValue: IonValue, - override val metas: MetaContainer + val ionValue: IonValue, + override val metas: MetaContainer ) : ExprNode() { init { ionValue.clone().makeReadOnly() diff --git a/lang/src/org/partiql/lang/ast/meta.kt b/lang/src/org/partiql/lang/ast/meta.kt index 7b7ce01c4f..56b8384ca3 100644 --- a/lang/src/org/partiql/lang/ast/meta.kt +++ b/lang/src/org/partiql/lang/ast/meta.kt @@ -14,9 +14,10 @@ package org.partiql.lang.ast -import com.amazon.ion.* -import org.partiql.lang.util.* -import java.util.* +import com.amazon.ion.IonWriter +import org.partiql.lang.util.IonWriterContext +import java.util.Arrays +import java.util.TreeMap /** * The [Meta] interface is implemented by classes that provide an object mapping view to AST meta nodes. diff --git a/lang/src/org/partiql/lang/ast/passes/AstRewriterBase.kt b/lang/src/org/partiql/lang/ast/passes/AstRewriterBase.kt index 79dca09be7..dab0a08455 100644 --- a/lang/src/org/partiql/lang/ast/passes/AstRewriterBase.kt +++ b/lang/src/org/partiql/lang/ast/passes/AstRewriterBase.kt @@ -14,7 +14,71 @@ package org.partiql.lang.ast.passes -import org.partiql.lang.ast.* +import org.partiql.lang.ast.Assignment +import org.partiql.lang.ast.AssignmentOp +import org.partiql.lang.ast.CallAgg +import org.partiql.lang.ast.CreateIndex +import org.partiql.lang.ast.CreateTable +import org.partiql.lang.ast.DataManipulation +import org.partiql.lang.ast.DataManipulationOperation +import org.partiql.lang.ast.DataType +import org.partiql.lang.ast.DateTimeType +import org.partiql.lang.ast.DeleteOp +import org.partiql.lang.ast.DmlOpList +import org.partiql.lang.ast.DropIndex +import org.partiql.lang.ast.DropTable +import org.partiql.lang.ast.Exec +import org.partiql.lang.ast.ExprNode +import org.partiql.lang.ast.FromSource +import org.partiql.lang.ast.FromSourceExpr +import org.partiql.lang.ast.FromSourceJoin +import org.partiql.lang.ast.FromSourceLet +import org.partiql.lang.ast.FromSourceUnpivot +import org.partiql.lang.ast.GroupBy +import org.partiql.lang.ast.GroupByItem +import org.partiql.lang.ast.HasMetas +import org.partiql.lang.ast.Identifier +import org.partiql.lang.ast.InsertOp +import org.partiql.lang.ast.InsertValueOp +import org.partiql.lang.ast.LetBinding +import org.partiql.lang.ast.LetSource +import org.partiql.lang.ast.LetVariables +import org.partiql.lang.ast.Literal +import org.partiql.lang.ast.LiteralMissing +import org.partiql.lang.ast.MetaContainer +import org.partiql.lang.ast.NAry +import org.partiql.lang.ast.OnConflict +import org.partiql.lang.ast.OrderBy +import org.partiql.lang.ast.Parameter +import org.partiql.lang.ast.Path +import org.partiql.lang.ast.PathComponent +import org.partiql.lang.ast.PathComponentExpr +import org.partiql.lang.ast.PathComponentUnpivot +import org.partiql.lang.ast.PathComponentWildcard +import org.partiql.lang.ast.RemoveOp +import org.partiql.lang.ast.ReturningElem +import org.partiql.lang.ast.ReturningExpr +import org.partiql.lang.ast.SearchedCase +import org.partiql.lang.ast.SearchedCaseWhen +import org.partiql.lang.ast.Select +import org.partiql.lang.ast.SelectListItem +import org.partiql.lang.ast.SelectListItemExpr +import org.partiql.lang.ast.SelectListItemProjectAll +import org.partiql.lang.ast.SelectListItemStar +import org.partiql.lang.ast.SelectProjection +import org.partiql.lang.ast.SelectProjectionList +import org.partiql.lang.ast.SelectProjectionPivot +import org.partiql.lang.ast.SelectProjectionValue +import org.partiql.lang.ast.Seq +import org.partiql.lang.ast.SeqType +import org.partiql.lang.ast.SimpleCase +import org.partiql.lang.ast.SimpleCaseWhen +import org.partiql.lang.ast.SortSpec +import org.partiql.lang.ast.Struct +import org.partiql.lang.ast.StructField +import org.partiql.lang.ast.SymbolicName +import org.partiql.lang.ast.Typed +import org.partiql.lang.ast.VariableReference import org.partiql.lang.util.checkThreadInterrupted /** @@ -35,25 +99,25 @@ open class AstRewriterBase : AstRewriter { override fun rewriteExprNode(node: ExprNode): ExprNode { checkThreadInterrupted() return when (node) { - is Literal -> rewriteLiteral(node) - is LiteralMissing -> rewriteLiteralMissing(node) + is Literal -> rewriteLiteral(node) + is LiteralMissing -> rewriteLiteralMissing(node) is VariableReference -> rewriteVariableReference(node) - is NAry -> rewriteNAry(node) - is CallAgg -> rewriteCallAgg(node) - is Typed -> rewriteTyped(node) - is Path -> rewritePath(node) - is SimpleCase -> rewriteSimpleCase(node) - is SearchedCase -> rewriteSearchedCase(node) - is Struct -> rewriteStruct(node) - is Seq -> rewriteSeq(node) - is Select -> rewriteSelect(node) - is Parameter -> rewriteParameter(node) - is DataManipulation -> rewriteDataManipulation(node) - is CreateTable -> rewriteCreateTable(node) - is CreateIndex -> rewriteCreateIndex(node) - is DropTable -> rewriteDropTable(node) - is DropIndex -> rewriteDropIndex(node) - is Exec -> rewriteExec(node) + is NAry -> rewriteNAry(node) + is CallAgg -> rewriteCallAgg(node) + is Typed -> rewriteTyped(node) + is Path -> rewritePath(node) + is SimpleCase -> rewriteSimpleCase(node) + is SearchedCase -> rewriteSearchedCase(node) + is Struct -> rewriteStruct(node) + is Seq -> rewriteSeq(node) + is Select -> rewriteSelect(node) + is Parameter -> rewriteParameter(node) + is DataManipulation -> rewriteDataManipulation(node) + is CreateTable -> rewriteCreateTable(node) + is CreateIndex -> rewriteCreateIndex(node) + is DropTable -> rewriteDropTable(node) + is DropIndex -> rewriteDropIndex(node) + is Exec -> rewriteExec(node) is DateTimeType.Date -> rewriteDate(node) is DateTimeType.Time -> rewriteTime(node) } @@ -195,7 +259,7 @@ open class AstRewriterBase : AstRewriter { open fun rewriteSelectProjection(projection: SelectProjection): SelectProjection = when (projection) { - is SelectProjectionList -> rewriteSelectProjectionList(projection) + is SelectProjectionList -> rewriteSelectProjectionList(projection) is SelectProjectionValue -> rewriteSelectProjectionValue(projection) is SelectProjectionPivot -> rewriteSelectProjectionPivot(projection) } @@ -215,8 +279,8 @@ open class AstRewriterBase : AstRewriter { open fun rewriteSelectListItem(item: SelectListItem): SelectListItem = when(item) { - is SelectListItemStar -> rewriteSelectListItemStar(item) - is SelectListItemExpr -> rewriteSelectListItemExpr(item) + is SelectListItemStar -> rewriteSelectListItemStar(item) + is SelectListItemExpr -> rewriteSelectListItemExpr(item) is SelectListItemProjectAll -> rewriteSelectListItemProjectAll(item) } @@ -236,7 +300,7 @@ open class AstRewriterBase : AstRewriter { when(pathComponent) { is PathComponentUnpivot -> rewritePathComponentUnpivot(pathComponent) is PathComponentWildcard -> rewritePathComponentWildcard(pathComponent) - is PathComponentExpr -> rewritePathComponentExpr(pathComponent) + is PathComponentExpr -> rewritePathComponentExpr(pathComponent) } open fun rewritePathComponentUnpivot(pathComponent: PathComponentUnpivot): PathComponent = @@ -251,12 +315,12 @@ open class AstRewriterBase : AstRewriter { open fun rewriteFromSource(fromSource: FromSource): FromSource = when(fromSource) { is FromSourceJoin -> rewriteFromSourceJoin(fromSource) - is FromSourceLet -> rewriteFromSourceLet(fromSource) + is FromSourceLet -> rewriteFromSourceLet(fromSource) } open fun rewriteFromSourceLet(fromSourceLet: FromSourceLet): FromSourceLet = when(fromSourceLet) { - is FromSourceExpr -> rewriteFromSourceExpr(fromSourceLet) + is FromSourceExpr -> rewriteFromSourceExpr(fromSourceLet) is FromSourceUnpivot -> rewriteFromSourceUnpivot(fromSourceLet) } @@ -387,11 +451,11 @@ open class AstRewriterBase : AstRewriter { open fun rewriteDataManipulationOperation(node: DataManipulationOperation): DataManipulationOperation = when(node) { - is InsertOp -> rewriteDataManipulationOperationInsertOp(node) + is InsertOp -> rewriteDataManipulationOperationInsertOp(node) is InsertValueOp -> rewriteDataManipulationOperationInsertValueOp(node) - is AssignmentOp -> rewriteDataManipulationOperationAssignmentOp(node) - is RemoveOp -> rewriteDataManipulationOperationRemoveOp(node) - is DeleteOp -> rewriteDataManipulationOperationDeleteOp() + is AssignmentOp -> rewriteDataManipulationOperationAssignmentOp(node) + is RemoveOp -> rewriteDataManipulationOperationRemoveOp(node) + is DeleteOp -> rewriteDataManipulationOperationDeleteOp() } open fun rewriteDataManipulationOperationInsertOp(node: InsertOp): DataManipulationOperation = diff --git a/lang/src/org/partiql/lang/ast/passes/AstVisitor.kt b/lang/src/org/partiql/lang/ast/passes/AstVisitor.kt index d4556656d3..e5d2f19645 100644 --- a/lang/src/org/partiql/lang/ast/passes/AstVisitor.kt +++ b/lang/src/org/partiql/lang/ast/passes/AstVisitor.kt @@ -14,7 +14,14 @@ package org.partiql.lang.ast.passes -import org.partiql.lang.ast.* +import org.partiql.lang.ast.DataManipulationOperation +import org.partiql.lang.ast.DataType +import org.partiql.lang.ast.ExprNode +import org.partiql.lang.ast.FromSource +import org.partiql.lang.ast.OnConflict +import org.partiql.lang.ast.PathComponent +import org.partiql.lang.ast.SelectListItem +import org.partiql.lang.ast.SelectProjection /** * Used in conjunction with [AstWalker], implementors of this interface can easily inspect an AST. diff --git a/lang/src/org/partiql/lang/ast/passes/AstWalker.kt b/lang/src/org/partiql/lang/ast/passes/AstWalker.kt index e17ffdc85a..c067b2703d 100644 --- a/lang/src/org/partiql/lang/ast/passes/AstWalker.kt +++ b/lang/src/org/partiql/lang/ast/passes/AstWalker.kt @@ -14,8 +14,55 @@ package org.partiql.lang.ast.passes -import org.partiql.lang.ast.* -import org.partiql.lang.util.* +import org.partiql.lang.ast.AssignmentOp +import org.partiql.lang.ast.CallAgg +import org.partiql.lang.ast.ConflictAction +import org.partiql.lang.ast.CreateIndex +import org.partiql.lang.ast.CreateTable +import org.partiql.lang.ast.DataManipulation +import org.partiql.lang.ast.DataManipulationOperation +import org.partiql.lang.ast.DateTimeType +import org.partiql.lang.ast.DeleteOp +import org.partiql.lang.ast.DmlOpList +import org.partiql.lang.ast.DropIndex +import org.partiql.lang.ast.DropTable +import org.partiql.lang.ast.Exec +import org.partiql.lang.ast.ExprNode +import org.partiql.lang.ast.FromSource +import org.partiql.lang.ast.FromSourceExpr +import org.partiql.lang.ast.FromSourceJoin +import org.partiql.lang.ast.FromSourceUnpivot +import org.partiql.lang.ast.InsertOp +import org.partiql.lang.ast.InsertValueOp +import org.partiql.lang.ast.Literal +import org.partiql.lang.ast.LiteralMissing +import org.partiql.lang.ast.MetaContainer +import org.partiql.lang.ast.NAry +import org.partiql.lang.ast.OnConflict +import org.partiql.lang.ast.Parameter +import org.partiql.lang.ast.Path +import org.partiql.lang.ast.PathComponentExpr +import org.partiql.lang.ast.PathComponentUnpivot +import org.partiql.lang.ast.PathComponentWildcard +import org.partiql.lang.ast.RemoveOp +import org.partiql.lang.ast.ReturningColumn +import org.partiql.lang.ast.ReturningWildcard +import org.partiql.lang.ast.SearchedCase +import org.partiql.lang.ast.Select +import org.partiql.lang.ast.SelectListItemExpr +import org.partiql.lang.ast.SelectListItemProjectAll +import org.partiql.lang.ast.SelectListItemStar +import org.partiql.lang.ast.SelectProjection +import org.partiql.lang.ast.SelectProjectionList +import org.partiql.lang.ast.SelectProjectionPivot +import org.partiql.lang.ast.SelectProjectionValue +import org.partiql.lang.ast.Seq +import org.partiql.lang.ast.SimpleCase +import org.partiql.lang.ast.Struct +import org.partiql.lang.ast.Typed +import org.partiql.lang.ast.VariableReference +import org.partiql.lang.util.case +import org.partiql.lang.util.checkThreadInterrupted /** * Contains the logic necessary to walk every node in the AST and invokes methods of [AstVisitor] along the way. @@ -35,29 +82,29 @@ open class AstWalker(private val visitor: AstVisitor) { is Literal, is LiteralMissing, is VariableReference, - is Parameter -> case { + is Parameter -> case { // Leaf nodes have no children to walk. } - is NAry -> case { + is NAry -> case { val (_, args, _: MetaContainer) = expr args.forEach { it -> walkExprNode(it) } } - is CallAgg -> case { + is CallAgg -> case { val (funcExpr, _, arg, _: MetaContainer) = expr walkExprNode(funcExpr) walkExprNode(arg) } - is Typed -> case { + is Typed -> case { val (_, exp, sqlDataType, _: MetaContainer) = expr walkExprNode(exp) visitor.visitDataType(sqlDataType) } - is Path -> case { + is Path -> case { walkPath(expr) } - is SimpleCase -> case { + is SimpleCase -> case { val (valueExpr, branches, elseExpr, _: MetaContainer) = expr walkExprNode(valueExpr) branches.forEach { @@ -74,20 +121,20 @@ open class AstWalker(private val visitor: AstVisitor) { } walkExprNode(elseExpr) } - is Struct -> case { + is Struct -> case { val (fields, _: MetaContainer) = expr fields.forEach { val (nameExpr, valueExpr) = it walkExprNode(nameExpr, valueExpr) } } - is Seq -> case { + is Seq -> case { val (_, items, _: MetaContainer) = expr items.forEach { walkExprNode(it) } } - is Select -> case { + is Select -> case { val (_, projection, from, fromLet, where, groupBy, having, orderBy, limit, offset, _: MetaContainer) = expr walkSelectProjection(projection) walkFromSource(from) @@ -119,7 +166,7 @@ open class AstWalker(private val visitor: AstVisitor) { returning?.let { it.returningElems.forEach { re -> when (re.columnComponent) { - is ReturningColumn -> case { + is ReturningColumn -> case { walkExprNode(re.columnComponent.column) } is ReturningWildcard -> case { @@ -151,7 +198,7 @@ open class AstWalker(private val visitor: AstVisitor) { is PathComponentWildcard -> case { //Leaf nodes have no children to walk. } - is PathComponentExpr -> case { + is PathComponentExpr -> case { val (exp) = it walkExprNode(exp) } @@ -162,7 +209,7 @@ open class AstWalker(private val visitor: AstVisitor) { private fun walkFromSource(fromSource: FromSource) { visitor.visitFromSource(fromSource) when (fromSource) { - is FromSourceExpr -> case { + is FromSourceExpr -> case { val (exp, _) = fromSource walkExprNode(exp) } @@ -170,7 +217,7 @@ open class AstWalker(private val visitor: AstVisitor) { val (exp, _, _) = fromSource walkExprNode(exp) } - is FromSourceJoin -> case { + is FromSourceJoin -> case { val (_, leftRef, rightRef, condition, _: MetaContainer) = fromSource walkFromSource(leftRef) walkFromSource(rightRef) @@ -190,15 +237,15 @@ open class AstWalker(private val visitor: AstVisitor) { val (asExpr, atExpr) = projection walkExprNode(asExpr, atExpr) } - is SelectProjectionList -> case { + is SelectProjectionList -> case { val (items) = projection items.forEach { visitor.visitSelectListItem(it) when (it) { - is SelectListItemStar -> case { + is SelectListItemStar -> case { //Leaf nodes have no children to walk. } - is SelectListItemExpr -> case { + is SelectListItemExpr -> case { walkExprNode(it.expr) } is SelectListItemProjectAll -> case { diff --git a/lang/src/org/partiql/lang/ast/passes/SemanticException.kt b/lang/src/org/partiql/lang/ast/passes/SemanticException.kt index 4e62d0028d..fa22ae1040 100644 --- a/lang/src/org/partiql/lang/ast/passes/SemanticException.kt +++ b/lang/src/org/partiql/lang/ast/passes/SemanticException.kt @@ -14,17 +14,18 @@ package org.partiql.lang.ast.passes -import org.partiql.lang.* -import org.partiql.lang.errors.* +import org.partiql.lang.SqlException +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.errors.PropertyValueMap /** * The exception to be thrown by semantic passes. */ class SemanticException( - message: String = "", - errorCode: ErrorCode, - errorContext: PropertyValueMap?, - cause: Throwable? = null) + message: String = "", + errorCode: ErrorCode, + errorContext: PropertyValueMap?, + cause: Throwable? = null) : SqlException(message, errorCode, errorContext, cause) diff --git a/lang/src/org/partiql/lang/errors/ErrorAndErrorContexts.kt b/lang/src/org/partiql/lang/errors/ErrorAndErrorContexts.kt index c84d57eff7..b7f850a334 100644 --- a/lang/src/org/partiql/lang/errors/ErrorAndErrorContexts.kt +++ b/lang/src/org/partiql/lang/errors/ErrorAndErrorContexts.kt @@ -15,9 +15,8 @@ package org.partiql.lang.errors import com.amazon.ion.IonValue -import org.partiql.lang.errors.PropertyType.* -import org.partiql.lang.syntax.* -import java.util.* +import org.partiql.lang.syntax.TokenType +import java.util.EnumMap internal const val UNKNOWN: String = "" @@ -47,36 +46,36 @@ enum class ErrorCategory(val message: String) { * */ enum class Property(val propertyName: String, val propertyType: PropertyType) { - LINE_NUMBER("line_no", LONG_CLASS), - COLUMN_NUMBER("column_no", LONG_CLASS), - TOKEN_STRING("token_string", STRING_CLASS), - CAST_TO("cast_to", STRING_CLASS), - CAST_FROM("cast_from", STRING_CLASS), - KEYWORD("keyword", STRING_CLASS), - TOKEN_TYPE("token_type", TOKEN_CLASS), - EXPECTED_TOKEN_TYPE("expected_token_type", TOKEN_CLASS), - EXPECTED_TOKEN_TYPE_1_OF_2("expected_token_type_1_of_2", TOKEN_CLASS), - EXPECTED_TOKEN_TYPE_2_OF_2("expected_token_type_2_of_2", TOKEN_CLASS), - TOKEN_VALUE("token_value", ION_VALUE_CLASS), - EXPECTED_ARITY_MIN("arity_min", INTEGER_CLASS), - EXPECTED_ARITY_MAX("arity_max", INTEGER_CLASS), - ACTUAL_ARITY("actual_arity", INTEGER_CLASS), - EXPECTED_PARAMETER_ORDINAL("expected_parameter_ordinal", INTEGER_CLASS), - BOUND_PARAMETER_COUNT("bound_parameter_count", INTEGER_CLASS), - TIMESTAMP_FORMAT_PATTERN("timestamp_format_pattern", STRING_CLASS), - TIMESTAMP_FORMAT_PATTERN_FIELDS("timestamp_format_pattern_fields", STRING_CLASS), - TIMESTAMP_STRING("timestamp_string", STRING_CLASS), - BINDING_NAME("binding_name", STRING_CLASS), - BINDING_NAME_MATCHES("binding_name_matches", STRING_CLASS), - LIKE_VALUE("value_to_match", STRING_CLASS), - LIKE_PATTERN("pattern", STRING_CLASS), - LIKE_ESCAPE("escape_char", STRING_CLASS), - FUNCTION_NAME("function_name", STRING_CLASS), - PROCEDURE_NAME("procedure_name", STRING_CLASS), - EXPECTED_ARGUMENT_TYPES("expected_types", STRING_CLASS), - ACTUAL_ARGUMENT_TYPES("actual_types", STRING_CLASS), - FEATURE_NAME("FEATURE_NAME", STRING_CLASS), - ACTUAL_TYPE("ACTUAL_TYPE", STRING_CLASS) + LINE_NUMBER("line_no", PropertyType.LONG_CLASS), + COLUMN_NUMBER("column_no", PropertyType.LONG_CLASS), + TOKEN_STRING("token_string", PropertyType.STRING_CLASS), + CAST_TO("cast_to", PropertyType.STRING_CLASS), + CAST_FROM("cast_from", PropertyType.STRING_CLASS), + KEYWORD("keyword", PropertyType.STRING_CLASS), + TOKEN_TYPE("token_type", PropertyType.TOKEN_CLASS), + EXPECTED_TOKEN_TYPE("expected_token_type", PropertyType.TOKEN_CLASS), + EXPECTED_TOKEN_TYPE_1_OF_2("expected_token_type_1_of_2", PropertyType.TOKEN_CLASS), + EXPECTED_TOKEN_TYPE_2_OF_2("expected_token_type_2_of_2", PropertyType.TOKEN_CLASS), + TOKEN_VALUE("token_value", PropertyType.ION_VALUE_CLASS), + EXPECTED_ARITY_MIN("arity_min", PropertyType.INTEGER_CLASS), + EXPECTED_ARITY_MAX("arity_max", PropertyType.INTEGER_CLASS), + ACTUAL_ARITY("actual_arity", PropertyType.INTEGER_CLASS), + EXPECTED_PARAMETER_ORDINAL("expected_parameter_ordinal", PropertyType.INTEGER_CLASS), + BOUND_PARAMETER_COUNT("bound_parameter_count", PropertyType.INTEGER_CLASS), + TIMESTAMP_FORMAT_PATTERN("timestamp_format_pattern", PropertyType.STRING_CLASS), + TIMESTAMP_FORMAT_PATTERN_FIELDS("timestamp_format_pattern_fields", PropertyType.STRING_CLASS), + TIMESTAMP_STRING("timestamp_string", PropertyType.STRING_CLASS), + BINDING_NAME("binding_name", PropertyType.STRING_CLASS), + BINDING_NAME_MATCHES("binding_name_matches", PropertyType.STRING_CLASS), + LIKE_VALUE("value_to_match", PropertyType.STRING_CLASS), + LIKE_PATTERN("pattern", PropertyType.STRING_CLASS), + LIKE_ESCAPE("escape_char", PropertyType.STRING_CLASS), + FUNCTION_NAME("function_name", PropertyType.STRING_CLASS), + PROCEDURE_NAME("procedure_name", PropertyType.STRING_CLASS), + EXPECTED_ARGUMENT_TYPES("expected_types", PropertyType.STRING_CLASS), + ACTUAL_ARGUMENT_TYPES("actual_types", PropertyType.STRING_CLASS), + FEATURE_NAME("FEATURE_NAME", PropertyType.STRING_CLASS), + ACTUAL_TYPE("ACTUAL_TYPE", PropertyType.STRING_CLASS) } /** @@ -93,16 +92,16 @@ abstract class PropertyValue(val type: PropertyType) { val value: Any get() = when (type) { - LONG_CLASS -> longValue() - STRING_CLASS -> stringValue() - INTEGER_CLASS -> integerValue() - TOKEN_CLASS -> tokenTypeValue() - ION_VALUE_CLASS -> ionValue() + PropertyType.LONG_CLASS -> longValue() + PropertyType.STRING_CLASS -> stringValue() + PropertyType.INTEGER_CLASS -> integerValue() + PropertyType.TOKEN_CLASS -> tokenTypeValue() + PropertyType.ION_VALUE_CLASS -> ionValue() } override fun toString(): String = when (type) { - ION_VALUE_CLASS -> (value as IonValue).toPrettyString() + PropertyType.ION_VALUE_CLASS -> (value as IonValue).toPrettyString() else -> value.toString() } } @@ -168,10 +167,10 @@ class PropertyValueMap(private val map: EnumMap = EnumM * @throws [IllegalArgumentException] if the [Property] used as `key` requires values of type **other than** [String] */ operator fun set(key: Property, strValue: String) { - val o = object : PropertyValue(STRING_CLASS) { + val o = object : PropertyValue(PropertyType.STRING_CLASS) { override fun stringValue(): String = strValue } - verifyTypeAndSet(key, STRING_CLASS, strValue ,o) + verifyTypeAndSet(key, PropertyType.STRING_CLASS, strValue ,o) } @@ -184,10 +183,10 @@ class PropertyValueMap(private val map: EnumMap = EnumM * @throws [IllegalArgumentException] if the [Property] used as `key` requires values of type **other than** [Long] */ operator fun set(key: Property, longValue: Long) { - val o = object : PropertyValue(LONG_CLASS) { + val o = object : PropertyValue(PropertyType.LONG_CLASS) { override fun longValue(): Long = longValue } - verifyTypeAndSet(key, LONG_CLASS, longValue, o) + verifyTypeAndSet(key, PropertyType.LONG_CLASS, longValue, o) } @@ -200,10 +199,10 @@ class PropertyValueMap(private val map: EnumMap = EnumM * @throws [IllegalArgumentException] if the [Property] used as `key` requires values of type **other than** [Int] */ operator fun set(key: Property, intValue: Int) { - val o = object : PropertyValue(INTEGER_CLASS) { + val o = object : PropertyValue(PropertyType.INTEGER_CLASS) { override fun integerValue(): Int = intValue } - verifyTypeAndSet(key, INTEGER_CLASS, intValue, o) + verifyTypeAndSet(key, PropertyType.INTEGER_CLASS, intValue, o) } @@ -216,10 +215,10 @@ class PropertyValueMap(private val map: EnumMap = EnumM * @throws [IllegalArgumentException] if the [Property] used as `key` requires values of type **other than** [IonValue] */ operator fun set(key: Property, ionValue: IonValue) { - val o = object : PropertyValue(ION_VALUE_CLASS) { + val o = object : PropertyValue(PropertyType.ION_VALUE_CLASS) { override fun ionValue(): IonValue = ionValue } - verifyTypeAndSet(key, ION_VALUE_CLASS, ionValue, o) + verifyTypeAndSet(key, PropertyType.ION_VALUE_CLASS, ionValue, o) } @@ -232,10 +231,10 @@ class PropertyValueMap(private val map: EnumMap = EnumM * @throws [IllegalArgumentException] if the [Property] used as `key` requires values of type **other than** [TokenType] */ operator fun set(key: Property, tokenTypeValue: TokenType) { - val o = object : PropertyValue(TOKEN_CLASS) { + val o = object : PropertyValue(PropertyType.TOKEN_CLASS) { override fun tokenTypeValue(): TokenType = tokenTypeValue } - verifyTypeAndSet(key, TOKEN_CLASS, tokenTypeValue, o) + verifyTypeAndSet(key, PropertyType.TOKEN_CLASS, tokenTypeValue, o) } diff --git a/lang/src/org/partiql/lang/eval/Bindings.kt b/lang/src/org/partiql/lang/eval/Bindings.kt index dda4a077ec..0cb955edea 100644 --- a/lang/src/org/partiql/lang/eval/Bindings.kt +++ b/lang/src/org/partiql/lang/eval/Bindings.kt @@ -13,9 +13,13 @@ */ package org.partiql.lang.eval -import com.amazon.ion.* +import com.amazon.ion.IonStruct +import com.amazon.ion.IonSystem +import com.amazon.ion.IonValue import org.partiql.lang.ast.CaseSensitivity -import org.partiql.lang.util.* +import org.partiql.lang.util.errAmbiguousBinding +import org.partiql.lang.util.isBindingNameEquivalent +import org.partiql.lang.util.stringValue /** Indicates if the lookup of a particular binding should be case-sensitive or not. */ enum class BindingCase { diff --git a/lang/src/org/partiql/lang/eval/Environment.kt b/lang/src/org/partiql/lang/eval/Environment.kt index 25b3373657..7a8e7d9a45 100644 --- a/lang/src/org/partiql/lang/eval/Environment.kt +++ b/lang/src/org/partiql/lang/eval/Environment.kt @@ -14,7 +14,7 @@ package org.partiql.lang.eval -import java.util.* +import java.util.TreeMap /** * The environment for execution. diff --git a/lang/src/org/partiql/lang/eval/EvaluatingCompiler.kt b/lang/src/org/partiql/lang/eval/EvaluatingCompiler.kt index 91824207ea..701846b084 100644 --- a/lang/src/org/partiql/lang/eval/EvaluatingCompiler.kt +++ b/lang/src/org/partiql/lang/eval/EvaluatingCompiler.kt @@ -14,12 +14,77 @@ package org.partiql.lang.eval - -import com.amazon.ion.* -import org.partiql.lang.ast.* +import com.amazon.ion.IntegerSize +import com.amazon.ion.IonInt +import com.amazon.ion.IonSexp +import com.amazon.ion.IonString +import com.amazon.ion.IonValue +import org.partiql.lang.ast.AggregateCallSiteListMeta +import org.partiql.lang.ast.AggregateRegisterIdMeta +import org.partiql.lang.ast.AstDeserializerBuilder +import org.partiql.lang.ast.AstVersion +import org.partiql.lang.ast.CallAgg +import org.partiql.lang.ast.CreateIndex +import org.partiql.lang.ast.CreateTable +import org.partiql.lang.ast.DataManipulation +import org.partiql.lang.ast.DateTimeType +import org.partiql.lang.ast.DropIndex +import org.partiql.lang.ast.DropTable +import org.partiql.lang.ast.Exec +import org.partiql.lang.ast.ExprNode +import org.partiql.lang.ast.FromSource +import org.partiql.lang.ast.FromSourceExpr +import org.partiql.lang.ast.FromSourceJoin +import org.partiql.lang.ast.FromSourceLet +import org.partiql.lang.ast.FromSourceUnpivot +import org.partiql.lang.ast.GroupBy +import org.partiql.lang.ast.GroupByItem +import org.partiql.lang.ast.GroupingStrategy +import org.partiql.lang.ast.IsCountStarMeta +import org.partiql.lang.ast.JoinOp +import org.partiql.lang.ast.LetSource +import org.partiql.lang.ast.Literal +import org.partiql.lang.ast.LiteralMissing +import org.partiql.lang.ast.MetaContainer +import org.partiql.lang.ast.NAry +import org.partiql.lang.ast.NAryOp +import org.partiql.lang.ast.Parameter +import org.partiql.lang.ast.Path +import org.partiql.lang.ast.PathComponent +import org.partiql.lang.ast.PathComponentExpr +import org.partiql.lang.ast.PathComponentUnpivot +import org.partiql.lang.ast.PathComponentWildcard +import org.partiql.lang.ast.ScopeQualifier +import org.partiql.lang.ast.SearchedCase +import org.partiql.lang.ast.Select +import org.partiql.lang.ast.SelectListItemExpr +import org.partiql.lang.ast.SelectListItemProjectAll +import org.partiql.lang.ast.SelectListItemStar +import org.partiql.lang.ast.SelectProjectionList +import org.partiql.lang.ast.SelectProjectionPivot +import org.partiql.lang.ast.SelectProjectionValue +import org.partiql.lang.ast.Seq +import org.partiql.lang.ast.SeqType +import org.partiql.lang.ast.SetQuantifier +import org.partiql.lang.ast.SimpleCase +import org.partiql.lang.ast.SourceLocationMeta +import org.partiql.lang.ast.SqlDataType +import org.partiql.lang.ast.Struct +import org.partiql.lang.ast.SymbolicName +import org.partiql.lang.ast.Typed +import org.partiql.lang.ast.TypedOp +import org.partiql.lang.ast.UniqueNameMeta +import org.partiql.lang.ast.VariableReference +import org.partiql.lang.ast.toAstExpr +import org.partiql.lang.ast.toAstStatement +import org.partiql.lang.ast.toExprNode +import org.partiql.lang.ast.toExprNodeSetQuantifier +import org.partiql.lang.ast.toPartiQlMetaContainer import org.partiql.lang.domains.PartiqlAst -import org.partiql.lang.errors.* -import org.partiql.lang.eval.binding.* +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.errors.Property +import org.partiql.lang.eval.binding.Alias +import org.partiql.lang.eval.binding.localsBinder import org.partiql.lang.eval.builtins.storedprocedure.StoredProcedure import org.partiql.lang.eval.like.PatternPart import org.partiql.lang.eval.like.executePattern @@ -27,10 +92,28 @@ import org.partiql.lang.eval.like.parsePattern import org.partiql.lang.eval.time.Time import org.partiql.lang.eval.visitors.PartiqlAstSanityValidator import org.partiql.lang.syntax.SqlParser -import org.partiql.lang.util.* -import java.math.* -import java.util.* -import kotlin.collections.* +import org.partiql.lang.util.bigDecimalOf +import org.partiql.lang.util.case +import org.partiql.lang.util.checkThreadInterrupted +import org.partiql.lang.util.codePointSequence +import org.partiql.lang.util.compareTo +import org.partiql.lang.util.div +import org.partiql.lang.util.drop +import org.partiql.lang.util.foldLeftProduct +import org.partiql.lang.util.isZero +import org.partiql.lang.util.minus +import org.partiql.lang.util.plus +import org.partiql.lang.util.rem +import org.partiql.lang.util.stringValue +import org.partiql.lang.util.take +import org.partiql.lang.util.times +import org.partiql.lang.util.totalMinutes +import org.partiql.lang.util.unaryMinus +import java.math.BigDecimal +import java.util.LinkedList +import java.util.Stack +import java.util.TreeSet +import kotlin.collections.List /** * A basic compiler that converts an instance of [ExprNode] to an [Expression]. @@ -139,7 +222,8 @@ internal class EvaluatingCompiler( /** Dispatch table for built-in aggregate functions. */ private val builtinAggregates: Map, ExprAggregatorFactory> = { val countAccFunc: (Number?, ExprValue) -> Number = { curr, _ -> curr!! + 1L } - val sumAccFunc: (Number?, ExprValue) -> Number = { curr, next -> curr?.let { it + next.numberValue() } ?: next.numberValue() } + val sumAccFunc: (Number?, ExprValue) -> Number = + { curr, next -> curr?.let { it + next.numberValue() } ?: next.numberValue() } val minAccFunc = comparisonAccumulator { left, right -> left < right } val maxAccFunc = comparisonAccumulator { left, right -> left > right } @@ -214,7 +298,8 @@ internal class EvaluatingCompiler( */ fun compile(originalAst: ExprNode): Expression { val visitorTransformer = compileOptions.visitorTransformMode.createVisitorTransform() - val transformedAst = visitorTransformer.transformStatement(originalAst.toAstStatement()).toExprNode(valueFactory.ion) + val transformedAst = + visitorTransformer.transformStatement(originalAst.toAstStatement()).toExprNode(valueFactory.ion) PartiqlAstSanityValidator.validate(transformedAst.toAstStatement()) @@ -270,20 +355,20 @@ internal class EvaluatingCompiler( private fun compileExprNode(expr: ExprNode): ThunkEnv { checkThreadInterrupted() return when (expr) { - is Literal -> compileLiteral(expr) - is LiteralMissing -> compileLiteralMissing(expr) + is Literal -> compileLiteral(expr) + is LiteralMissing -> compileLiteralMissing(expr) is VariableReference -> compileVariableReference(expr) - is NAry -> compileNAry(expr) - is Typed -> compileTyped(expr) - is SimpleCase -> compileSimpleCase(expr) - is SearchedCase -> compileSearchedCase(expr) - is Path -> compilePath(expr) - is Struct -> compileStruct(expr) - is Seq -> compileSeq(expr) - is Select -> compileSelect(expr) - is CallAgg -> compileCallAgg(expr) - is Parameter -> compileParameter(expr) - is DataManipulation -> err( + is NAry -> compileNAry(expr) + is Typed -> compileTyped(expr) + is SimpleCase -> compileSimpleCase(expr) + is SearchedCase -> compileSearchedCase(expr) + is Path -> compilePath(expr) + is Struct -> compileStruct(expr) + is Seq -> compileSeq(expr) + is Select -> compileSelect(expr) + is CallAgg -> compileCallAgg(expr) + is Parameter -> compileParameter(expr) + is DataManipulation -> err( "DML operations are not supported yet", ErrorCode.EVALUATOR_FEATURE_NOT_SUPPORTED_YET, errorContextFrom(expr.metas).also { @@ -294,7 +379,7 @@ internal class EvaluatingCompiler( is CreateIndex, is DropIndex, is DropTable -> compileDdl(expr) - is Exec -> compileExec(expr) + is Exec -> compileExec(expr) is DateTimeType.Date -> compileDate(expr) is DateTimeType.Time -> compileTime(expr) } @@ -1090,7 +1175,8 @@ internal class EvaluatingCompiler( val compiledAggregates = aggregateListMeta?.aggregateCallSites?.map { it -> val funcName = it.funcName.text CompiledAggregate( - factory = getAggregatorFactory(funcName, it.setq.toExprNodeSetQuantifier(), it.metas.toPartiQlMetaContainer()), + factory = getAggregatorFactory(funcName, it.setq.toExprNodeSetQuantifier(), + it.metas.toPartiQlMetaContainer()), argThunk = compileExprNode(it.arg.toExprNode(valueFactory.ion))) } @@ -1236,7 +1322,7 @@ internal class EvaluatingCompiler( } } } - is SelectProjectionList -> { + is SelectProjectionList -> { val (items) = projection nestCompilationContext(ExpressionContext.SELECT_LIST, allFromSourceAliases) { val projectionThunk: ThunkEnvValue> = @@ -1378,7 +1464,8 @@ internal class EvaluatingCompiler( private fun compileCallAgg(expr: CallAgg): ThunkEnv { val (funcExpr, setQuantifier, argExpr, metas: MetaContainer) = expr - if(metas.hasMeta(IsCountStarMeta.TAG) && currentCompilationContext.expressionContext != ExpressionContext.SELECT_LIST) { + if(metas.hasMeta(IsCountStarMeta.TAG) && + currentCompilationContext.expressionContext != ExpressionContext.SELECT_LIST) { err("COUNT(*) is not allowed in this context", errorContextFrom(metas), internal = false) } @@ -1428,10 +1515,10 @@ internal class EvaluatingCompiler( } private fun compileFromSources( - fromSource: FromSource, - sources: MutableList = ArrayList(), - joinExpansion: JoinExpansion = JoinExpansion.INNER, - conditionThunk: ThunkEnv? = null + fromSource: FromSource, + sources: MutableList = ArrayList(), + joinExpansion: JoinExpansion = JoinExpansion.INNER, + conditionThunk: ThunkEnv? = null ): List { val metas = fromSource.metas() @@ -1459,7 +1546,7 @@ internal class EvaluatingCompiler( joinExpansion = joinExpansion, filter = conditionThunk)) } - is FromSourceJoin -> case { + is FromSourceJoin -> case { val (joinOp, left, right, condition, _: MetaContainer) = fromSource @@ -1623,7 +1710,7 @@ internal class EvaluatingCompiler( errNoContext("Encountered a SelectListItemStar--did SelectStarVisitorTransform execute?", internal = true) } - is SelectListItemExpr -> { + is SelectListItemExpr -> { val (itemExpr, asName) = it val alias = asName?.name ?: itemExpr.extractColumnAlias(idx) val thunk = compileExprNode(itemExpr) @@ -1661,7 +1748,7 @@ internal class EvaluatingCompiler( componentThunks.add( when (c) { - is PathComponentExpr -> { + is PathComponentExpr -> { val (indexExpr, caseSensitivity) = c val locationMeta = indexExpr.metas.sourceLocationMeta when { @@ -1695,7 +1782,7 @@ internal class EvaluatingCompiler( } } } - is PathComponentUnpivot -> { + is PathComponentUnpivot -> { val (pathComponentMetas: MetaContainer) = c when { !remainingComponents.isEmpty() -> { @@ -1909,10 +1996,10 @@ internal class EvaluatingCompiler( * and the size of the search pattern excluding uses of the escape character */ private fun checkPattern( - pattern: IonValue, - patternLocationMeta: SourceLocationMeta?, - escape: IonValue?, - escapeLocationMeta: SourceLocationMeta? + pattern: IonValue, + patternLocationMeta: SourceLocationMeta?, + escape: IonValue?, + escapeLocationMeta: SourceLocationMeta? ): Pair { val patternString = pattern.stringValue() diff --git a/lang/src/org/partiql/lang/eval/EvaluationSession.kt b/lang/src/org/partiql/lang/eval/EvaluationSession.kt index dcb0015b4b..b16d286560 100644 --- a/lang/src/org/partiql/lang/eval/EvaluationSession.kt +++ b/lang/src/org/partiql/lang/eval/EvaluationSession.kt @@ -14,7 +14,7 @@ package org.partiql.lang.eval -import com.amazon.ion.* +import com.amazon.ion.Timestamp import java.time.ZoneOffset /** diff --git a/lang/src/org/partiql/lang/eval/Exceptions.kt b/lang/src/org/partiql/lang/eval/Exceptions.kt index 1e2b5c54b8..3d7826a07f 100644 --- a/lang/src/org/partiql/lang/eval/Exceptions.kt +++ b/lang/src/org/partiql/lang/eval/Exceptions.kt @@ -14,10 +14,12 @@ package org.partiql.lang.eval -import org.partiql.lang.* -import org.partiql.lang.ast.* -import org.partiql.lang.errors.* -import org.partiql.lang.util.* +import org.partiql.lang.SqlException +import org.partiql.lang.ast.MetaContainer +import org.partiql.lang.ast.SourceLocationMeta +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.errors.Property +import org.partiql.lang.errors.PropertyValueMap /** Error for evaluation problems. */ open class EvaluationException(message: String, diff --git a/lang/src/org/partiql/lang/eval/ExprFunction.kt b/lang/src/org/partiql/lang/eval/ExprFunction.kt index 7b99d52dbd..e2107849ec 100644 --- a/lang/src/org/partiql/lang/eval/ExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/ExprFunction.kt @@ -14,9 +14,10 @@ package org.partiql.lang.eval -import com.amazon.ion.* -import org.partiql.lang.errors.* -import org.partiql.lang.util.* +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.errors.Property +import org.partiql.lang.errors.PropertyValueMap +import org.partiql.lang.util.isAnyUnknown /** * Represents a function that can be invoked from within an [EvaluatingCompiler] diff --git a/lang/src/org/partiql/lang/eval/ExprNodeExtensions.kt b/lang/src/org/partiql/lang/eval/ExprNodeExtensions.kt index fb8ff40a4b..5540b99b68 100644 --- a/lang/src/org/partiql/lang/eval/ExprNodeExtensions.kt +++ b/lang/src/org/partiql/lang/eval/ExprNodeExtensions.kt @@ -14,8 +14,13 @@ package org.partiql.lang.eval -import com.amazon.ion.* -import org.partiql.lang.ast.* +import com.amazon.ion.IonString +import org.partiql.lang.ast.ExprNode +import org.partiql.lang.ast.Literal +import org.partiql.lang.ast.MetaContainer +import org.partiql.lang.ast.Path +import org.partiql.lang.ast.PathComponentExpr +import org.partiql.lang.ast.VariableReference /** @@ -30,7 +35,7 @@ fun ExprNode.extractColumnAlias(idx: Int): String = val (name, _, _, _: MetaContainer) = this name } - is Path -> { + is Path -> { this.extractColumnAlias(idx) } else -> syntheticColumnName(idx) @@ -51,7 +56,7 @@ fun Path.extractColumnAlias(idx: Int): String { else -> syntheticColumnName(idx) } } - else -> syntheticColumnName(idx) + else -> syntheticColumnName(idx) } } diff --git a/lang/src/org/partiql/lang/eval/ExprValueExtensions.kt b/lang/src/org/partiql/lang/eval/ExprValueExtensions.kt index 0aae7c4e37..e69e2c387b 100644 --- a/lang/src/org/partiql/lang/eval/ExprValueExtensions.kt +++ b/lang/src/org/partiql/lang/eval/ExprValueExtensions.kt @@ -14,21 +14,33 @@ package org.partiql.lang.eval -import com.amazon.ion.* -import org.partiql.lang.ast.* -import org.partiql.lang.errors.* -import org.partiql.lang.eval.ExprValueType.* +import com.amazon.ion.IntegerSize +import com.amazon.ion.IonInt +import com.amazon.ion.Timestamp +import org.partiql.lang.ast.DataType +import org.partiql.lang.ast.SourceLocationMeta +import org.partiql.lang.ast.SqlDataType +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.errors.Property +import org.partiql.lang.errors.PropertyValueMap import org.partiql.lang.eval.time.NANOS_PER_SECOND import org.partiql.lang.eval.time.Time -import org.partiql.lang.syntax.* -import org.partiql.lang.util.* -import java.math.* +import org.partiql.lang.syntax.DATE_PART_KEYWORDS +import org.partiql.lang.syntax.DatePart +import org.partiql.lang.util.ConfigurableExprValueFormatter +import org.partiql.lang.util.bigDecimalOf +import org.partiql.lang.util.coerce +import org.partiql.lang.util.compareTo +import org.partiql.lang.util.downcast +import org.partiql.lang.util.getPrecisionFromTimeString +import org.partiql.lang.util.ionValue +import java.math.BigDecimal import java.time.LocalDate import java.time.LocalTime import java.time.ZoneOffset import java.time.format.DateTimeFormatter import java.time.format.DateTimeParseException -import java.util.* +import java.util.TreeSet /** * Wraps the given [ExprValue] with a delegate that provides the [OrderedBindNames] facet. @@ -160,12 +172,13 @@ internal fun ExprValue.isDirectlyComparableTo(other: ExprValue): Boolean = when { // The ExprValue type for TIME and TIME WITH TIME ZONE is same // and thus needs to be checked explicitly for the timezone values. - type == TIME && other.type == TIME -> timeValue().isDirectlyComparableTo(other.timeValue()) + type == ExprValueType.TIME && other.type == ExprValueType.TIME -> + timeValue().isDirectlyComparableTo(other.timeValue()) else -> type.isDirectlyComparableTo(other.type) } /** Types that are cast to the [ExprValueType.isText] types by calling `IonValue.toString()`. */ -private val ION_TEXT_STRING_CAST_TYPES = setOf(BOOL, TIMESTAMP) +private val ION_TEXT_STRING_CAST_TYPES = setOf(ExprValueType.BOOL, ExprValueType.TIMESTAMP) /** Regex to match DATE strings of the format yyyy-MM-dd */ private val datePatternRegex = Regex("\\d\\d\\d\\d-\\d\\d-\\d\\d") @@ -226,10 +239,10 @@ private val genericTimeRegex = Regex("\\d\\d:\\d\\d:\\d\\d(\\.\\d*)?([+|-]\\d\\d * @param session The EvaluationSession which provides necessary information for evaluation. */ fun ExprValue.cast( - targetDataType: DataType, - valueFactory: ExprValueFactory, - locationMeta: SourceLocationMeta?, - session: EvaluationSession + targetDataType: DataType, + valueFactory: ExprValueFactory, + locationMeta: SourceLocationMeta?, + session: EvaluationSession ): ExprValue { val targetSqlDataType = targetDataType.sqlDataType @@ -266,8 +279,8 @@ fun ExprValue.cast( fun Number.exprValue() = valueFactory.newFromIonValue(ionValue(valueFactory.ion)) fun String.exprValue(type: ExprValueType) = valueFactory.newFromIonValue(when (type) { - STRING -> valueFactory.ion.newString(this) - SYMBOL -> valueFactory.ion.newSymbol(this) + ExprValueType.STRING -> valueFactory.ion.newString(this) + ExprValueType.SYMBOL -> valueFactory.ion.newSymbol(this) else -> castFailedErr("Invalid type for textual conversion: $type (this code should be unreachable)", internal = true) }) @@ -277,7 +290,7 @@ fun ExprValue.cast( type.isUnknown && targetSqlDataType == SqlDataType.NULL -> return valueFactory.nullValue // Note that the ExprValueType for TIME and TIME WITH TIME ZONE is the same i.e. ExprValueType.TIME. // We further need to check for the time zone and hence we do not short circuit here when the type is TIME. - type.isUnknown || (type == targetExprValueType && type != TIME) -> return this + type.isUnknown || (type == targetExprValueType && type != ExprValueType.TIME) -> return this else -> { when (targetSqlDataType) { SqlDataType.BOOLEAN -> when { @@ -292,7 +305,7 @@ fun ExprValue.cast( } } SqlDataType.SMALLINT, SqlDataType.INTEGER -> when { - type == BOOL -> return valueFactory.newInt(if(booleanValue()) 1L else 0L) + type == ExprValueType.BOOL -> return valueFactory.newInt(if(booleanValue()) 1L else 0L) type.isNumber -> return valueFactory.newInt(numberValue().toLongFailingOverflow(locationMeta)) type.isText -> { val value = try { @@ -309,7 +322,7 @@ fun ExprValue.cast( } } SqlDataType.FLOAT, SqlDataType.REAL, SqlDataType.DOUBLE_PRECISION -> when { - type == BOOL -> return if (booleanValue()) 1.0.exprValue() else 0.0.exprValue() + type == ExprValueType.BOOL -> return if (booleanValue()) 1.0.exprValue() else 0.0.exprValue() type.isNumber -> return numberValue().toDouble().exprValue() type.isText -> try { @@ -319,7 +332,7 @@ fun ExprValue.cast( } } SqlDataType.DECIMAL, SqlDataType.NUMERIC -> when { - type == BOOL -> return if (booleanValue()) BigDecimal.ONE.exprValue() else BigDecimal.ZERO.exprValue() + type == ExprValueType.BOOL -> return if (booleanValue()) BigDecimal.ONE.exprValue() else BigDecimal.ZERO.exprValue() type.isNumber -> return numberValue().coerce(BigDecimal::class.java).exprValue() type.isText -> try { return bigDecimalOf(stringValue()).exprValue() @@ -339,7 +352,7 @@ fun ExprValue.cast( } } SqlDataType.DATE -> when { - type == TIMESTAMP -> { + type == ExprValueType.TIMESTAMP -> { val ts = timestampValue() return valueFactory.newDate(LocalDate.of(ts.year, ts.month, ts.day)) } @@ -361,7 +374,7 @@ fun ExprValue.cast( SqlDataType.TIME, SqlDataType.TIME_WITH_TIME_ZONE -> { val precision = targetDataType.args.firstOrNull()?.toInt() when { - type == TIME -> { + type == ExprValueType.TIME -> { val time = timeValue() val timeZoneOffset = when (targetSqlDataType) { SqlDataType.TIME_WITH_TIME_ZONE -> time.zoneOffset?: session.defaultTimezoneOffset @@ -374,7 +387,7 @@ fun ExprValue.cast( timeZoneOffset )) } - type == TIMESTAMP -> { + type == ExprValueType.TIMESTAMP -> { val ts = timestampValue() val timeZoneOffset = when (targetSqlDataType) { SqlDataType.TIME_WITH_TIME_ZONE -> ts.localOffset?: castFailedErr( @@ -429,8 +442,8 @@ fun ExprValue.cast( SqlDataType.CHARACTER, SqlDataType.CHARACTER_VARYING, SqlDataType.STRING, SqlDataType.SYMBOL -> when { type.isNumber -> return numberValue().toString().exprValue(targetExprValueType) type.isText -> return stringValue().exprValue(targetExprValueType) - type == DATE -> return dateValue().toString().exprValue(targetExprValueType) - type == TIME -> return timeValue().toString().exprValue(targetExprValueType) + type == ExprValueType.DATE -> return dateValue().toString().exprValue(targetExprValueType) + type == ExprValueType.TIME -> return timeValue().toString().exprValue(targetExprValueType) type in ION_TEXT_STRING_CAST_TYPES -> return ionValue.toString().exprValue(targetExprValueType) } SqlDataType.CLOB -> when { diff --git a/lang/src/org/partiql/lang/eval/ExprValueFactory.kt b/lang/src/org/partiql/lang/eval/ExprValueFactory.kt index 4c75032187..335e61e6f3 100644 --- a/lang/src/org/partiql/lang/eval/ExprValueFactory.kt +++ b/lang/src/org/partiql/lang/eval/ExprValueFactory.kt @@ -14,11 +14,28 @@ package org.partiql.lang.eval -import com.amazon.ion.* +import com.amazon.ion.IonBool +import com.amazon.ion.IonContainer +import com.amazon.ion.IonNull +import com.amazon.ion.IonReader +import com.amazon.ion.IonSequence +import com.amazon.ion.IonStruct +import com.amazon.ion.IonSystem +import com.amazon.ion.IonType +import com.amazon.ion.IonValue +import com.amazon.ion.Timestamp import org.partiql.lang.errors.ErrorCode import org.partiql.lang.eval.time.Time -import org.partiql.lang.util.* -import java.math.* +import org.partiql.lang.util.booleanValueOrNull +import org.partiql.lang.util.bytesValueOrNull +import org.partiql.lang.util.numberValueOrNull +import org.partiql.lang.util.ordinal +import org.partiql.lang.util.propertyValueMapOf +import org.partiql.lang.util.seal +import org.partiql.lang.util.stringValueOrNull +import org.partiql.lang.util.timestampValueOrNull +import java.math.BigDecimal +import java.math.BigInteger import java.time.LocalDate /** diff --git a/lang/src/org/partiql/lang/eval/ExprValueType.kt b/lang/src/org/partiql/lang/eval/ExprValueType.kt index c324c07476..a9f29f6eb8 100644 --- a/lang/src/org/partiql/lang/eval/ExprValueType.kt +++ b/lang/src/org/partiql/lang/eval/ExprValueType.kt @@ -15,7 +15,7 @@ package org.partiql.lang.eval import com.amazon.ion.IonType -import org.partiql.lang.ast.* +import org.partiql.lang.ast.SqlDataType import org.partiql.lang.syntax.TYPE_ALIASES import org.partiql.lang.syntax.TYPE_NAME_ARITY_MAP diff --git a/lang/src/org/partiql/lang/eval/GroupKeyExprValue.kt b/lang/src/org/partiql/lang/eval/GroupKeyExprValue.kt index 7ab677dcb5..95455ce45a 100644 --- a/lang/src/org/partiql/lang/eval/GroupKeyExprValue.kt +++ b/lang/src/org/partiql/lang/eval/GroupKeyExprValue.kt @@ -14,7 +14,7 @@ package org.partiql.lang.eval -import com.amazon.ion.* +import com.amazon.ion.IonSystem import org.partiql.lang.eval.visitors.GroupByItemAliasVisitorTransform diff --git a/lang/src/org/partiql/lang/eval/IonStructBindings.kt b/lang/src/org/partiql/lang/eval/IonStructBindings.kt index 417a63c25c..fc59271d2e 100644 --- a/lang/src/org/partiql/lang/eval/IonStructBindings.kt +++ b/lang/src/org/partiql/lang/eval/IonStructBindings.kt @@ -14,8 +14,9 @@ package org.partiql.lang.eval -import com.amazon.ion.* -import org.partiql.lang.util.* +import com.amazon.ion.IonStruct +import com.amazon.ion.IonValue +import org.partiql.lang.util.errAmbiguousBinding /** * Custom implementation of [Bindings] that lazily computes case sensitive or insensitive hash tables which diff --git a/lang/src/org/partiql/lang/eval/NaturalExprValueComparators.kt b/lang/src/org/partiql/lang/eval/NaturalExprValueComparators.kt index 541fc84fa5..4b3ca6d6aa 100644 --- a/lang/src/org/partiql/lang/eval/NaturalExprValueComparators.kt +++ b/lang/src/org/partiql/lang/eval/NaturalExprValueComparators.kt @@ -14,8 +14,11 @@ package org.partiql.lang.eval -import org.partiql.lang.eval.ExprValueType.* -import org.partiql.lang.util.* +import org.partiql.lang.util.compareTo +import org.partiql.lang.util.isNaN +import org.partiql.lang.util.isNegInf +import org.partiql.lang.util.isPosInf +import org.partiql.lang.util.isZero /** * Provides a total, natural ordering over [ExprValue]. This ordering is consistent with @@ -158,7 +161,7 @@ enum class NaturalExprValueComparators(private val nullOrder: NullOrder) : Compa // Bool ifCompared( - handle(lType == BOOL, rType == BOOL) { + handle(lType == ExprValueType.BOOL, rType == ExprValueType.BOOL) { val lVal = left.booleanValue() val rVal = right.booleanValue() @@ -192,7 +195,7 @@ enum class NaturalExprValueComparators(private val nullOrder: NullOrder) : Compa // Date ifCompared( - handle(lType == DATE, rType == DATE) { + handle(lType == ExprValueType.DATE, rType == ExprValueType.DATE) { val lVal = left.dateValue() val rVal = right.dateValue() @@ -202,7 +205,7 @@ enum class NaturalExprValueComparators(private val nullOrder: NullOrder) : Compa // Time ifCompared( - handle(lType == TIME, rType == TIME) { + handle(lType == ExprValueType.TIME, rType == ExprValueType.TIME) { val lVal = left.timeValue() val rVal = right.timeValue() @@ -212,7 +215,7 @@ enum class NaturalExprValueComparators(private val nullOrder: NullOrder) : Compa // Timestamp ifCompared( - handle(lType == TIMESTAMP, rType == TIMESTAMP) { + handle(lType == ExprValueType.TIMESTAMP, rType == ExprValueType.TIMESTAMP) { val lVal = left.timestampValue() val rVal = right.timestampValue() @@ -251,28 +254,28 @@ enum class NaturalExprValueComparators(private val nullOrder: NullOrder) : Compa // List ifCompared( - handle(lType == LIST, rType == LIST) { + handle(lType == ExprValueType.LIST, rType == ExprValueType.LIST) { return compareOrdered(left, right, this) } ) { return it } // Sexp ifCompared( - handle(lType == SEXP, rType == SEXP) { + handle(lType == ExprValueType.SEXP, rType == ExprValueType.SEXP) { return compareOrdered(left, right, this) } ) { return it } // Struct ifCompared( - handle(lType == STRUCT, rType == STRUCT) { + handle(lType == ExprValueType.STRUCT, rType == ExprValueType.STRUCT) { compareUnordered(left, right, structFieldComparator) } ) { return it } // Bag ifCompared( - handle(lType == BAG, rType == BAG) { + handle(lType == ExprValueType.BAG, rType == ExprValueType.BAG) { compareUnordered(left, right, this) } ) { return it } diff --git a/lang/src/org/partiql/lang/eval/NodeMetadata.kt b/lang/src/org/partiql/lang/eval/NodeMetadata.kt index e557f1faf6..5d2a9182f3 100644 --- a/lang/src/org/partiql/lang/eval/NodeMetadata.kt +++ b/lang/src/org/partiql/lang/eval/NodeMetadata.kt @@ -14,9 +14,10 @@ package org.partiql.lang.eval -import com.amazon.ion.* -import org.partiql.lang.errors.* -import org.partiql.lang.util.* +import com.amazon.ion.IonStruct +import org.partiql.lang.errors.Property +import org.partiql.lang.errors.PropertyValueMap +import org.partiql.lang.util.longValue /* * WARNING: This whole file is intended as a non intrusive way to preserve the meta nodes information during diff --git a/lang/src/org/partiql/lang/eval/OrdinalBindings.kt b/lang/src/org/partiql/lang/eval/OrdinalBindings.kt index 8f412c30ab..6818afd584 100644 --- a/lang/src/org/partiql/lang/eval/OrdinalBindings.kt +++ b/lang/src/org/partiql/lang/eval/OrdinalBindings.kt @@ -14,7 +14,7 @@ package org.partiql.lang.eval -import com.amazon.ion.* +import com.amazon.ion.IonSequence /** A simple mapping of ordinal index to [ExprValue]. */ interface OrdinalBindings { diff --git a/lang/src/org/partiql/lang/eval/StructExprValue.kt b/lang/src/org/partiql/lang/eval/StructExprValue.kt index 06539b38c7..dd8585f8d3 100644 --- a/lang/src/org/partiql/lang/eval/StructExprValue.kt +++ b/lang/src/org/partiql/lang/eval/StructExprValue.kt @@ -14,10 +14,9 @@ package org.partiql.lang.eval -import com.amazon.ion.* -import org.partiql.lang.util.* -import java.util.* - +import com.amazon.ion.IonStruct +import com.amazon.ion.IonSystem +import org.partiql.lang.util.seal /** Indicates if a struct is ordered or not. */ enum class StructOrdering { @@ -29,9 +28,9 @@ enum class StructOrdering { * Provides a [ExprValueType.STRUCT] implementation lazily backed by a sequence. */ internal open class StructExprValue( - private val ion: IonSystem, - private val ordering: StructOrdering, - private val sequence: Sequence + private val ion: IonSystem, + private val ordering: StructOrdering, + private val sequence: Sequence ) : BaseExprValue() { override val type = ExprValueType.STRUCT diff --git a/lang/src/org/partiql/lang/eval/Thunk.kt b/lang/src/org/partiql/lang/eval/Thunk.kt index f34177f774..6d5edcd625 100644 --- a/lang/src/org/partiql/lang/eval/Thunk.kt +++ b/lang/src/org/partiql/lang/eval/Thunk.kt @@ -14,8 +14,9 @@ package org.partiql.lang.eval -import org.partiql.lang.ast.* -import org.partiql.lang.errors.* +import org.partiql.lang.ast.MetaContainer +import org.partiql.lang.ast.SourceLocationMeta +import org.partiql.lang.errors.Property /** diff --git a/lang/src/org/partiql/lang/eval/binding/LocalsBinder.kt b/lang/src/org/partiql/lang/eval/binding/LocalsBinder.kt index a9aa9f0bdc..4e05e297f5 100644 --- a/lang/src/org/partiql/lang/eval/binding/LocalsBinder.kt +++ b/lang/src/org/partiql/lang/eval/binding/LocalsBinder.kt @@ -14,8 +14,13 @@ package org.partiql.lang.eval.binding -import org.partiql.lang.eval.* -import org.partiql.lang.util.* +import org.partiql.lang.eval.BindingCase +import org.partiql.lang.eval.BindingName +import org.partiql.lang.eval.Bindings +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.address +import org.partiql.lang.eval.name +import org.partiql.lang.util.errAmbiguousBinding /** * Creates a list of bindings from a list of locals. diff --git a/lang/src/org/partiql/lang/eval/builtins/BuiltinFunctions.kt b/lang/src/org/partiql/lang/eval/builtins/BuiltinFunctions.kt index 855fce9f48..324c33325b 100644 --- a/lang/src/org/partiql/lang/eval/builtins/BuiltinFunctions.kt +++ b/lang/src/org/partiql/lang/eval/builtins/BuiltinFunctions.kt @@ -14,7 +14,13 @@ package org.partiql.lang.eval.builtins -import org.partiql.lang.eval.* +import org.partiql.lang.eval.Environment +import org.partiql.lang.eval.ExprFunction +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueFactory +import org.partiql.lang.eval.NullPropagatingExprFunction +import org.partiql.lang.eval.errNoContext +import org.partiql.lang.eval.stringValue internal fun createBuiltinFunctions(valueFactory: ExprValueFactory) = listOf( diff --git a/lang/src/org/partiql/lang/eval/builtins/CoalesceExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/CoalesceExprFunction.kt index f889b22cd9..68d4a1edf2 100644 --- a/lang/src/org/partiql/lang/eval/builtins/CoalesceExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/CoalesceExprFunction.kt @@ -14,9 +14,15 @@ package org.partiql.lang.eval.builtins -import com.amazon.ion.* -import org.partiql.lang.errors.* -import org.partiql.lang.eval.* +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.errors.Property +import org.partiql.lang.errors.PropertyValueMap +import org.partiql.lang.eval.Environment +import org.partiql.lang.eval.EvaluationException +import org.partiql.lang.eval.ExprFunction +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueFactory +import org.partiql.lang.eval.ExprValueType /** * Coalesce built in function. Takes in one ore more expression as arguments and returns the first non unknown value diff --git a/lang/src/org/partiql/lang/eval/builtins/DateAddExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/DateAddExprFunction.kt index 49513d19b3..35489a7415 100644 --- a/lang/src/org/partiql/lang/eval/builtins/DateAddExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/DateAddExprFunction.kt @@ -14,29 +14,35 @@ package org.partiql.lang.eval.builtins -import com.amazon.ion.* -import com.amazon.ion.Timestamp.* -import org.partiql.lang.eval.* -import org.partiql.lang.syntax.* -import org.partiql.lang.util.* +import com.amazon.ion.Timestamp +import org.partiql.lang.eval.Environment +import org.partiql.lang.eval.EvaluationException +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueFactory +import org.partiql.lang.eval.NullPropagatingExprFunction +import org.partiql.lang.eval.datePartValue +import org.partiql.lang.eval.errNoContext +import org.partiql.lang.eval.intValue +import org.partiql.lang.eval.timestampValue +import org.partiql.lang.syntax.DatePart internal class DateAddExprFunction(valueFactory: ExprValueFactory) : NullPropagatingExprFunction("date_add", 3, valueFactory) { companion object { - @JvmStatic private val precisionOrder = listOf(Precision.YEAR, - Precision.MONTH, - Precision.DAY, - Precision.MINUTE, - Precision.SECOND) + @JvmStatic private val precisionOrder = listOf(Timestamp.Precision.YEAR, + Timestamp.Precision.MONTH, + Timestamp.Precision.DAY, + Timestamp.Precision.MINUTE, + Timestamp.Precision.SECOND) - @JvmStatic private val datePartToPrecision = mapOf(DatePart.YEAR to Precision.YEAR, - DatePart.MONTH to Precision.MONTH, - DatePart.DAY to Precision.DAY, - DatePart.HOUR to Precision.MINUTE, - DatePart.MINUTE to Precision.MINUTE, - DatePart.SECOND to Precision.SECOND) + @JvmStatic private val datePartToPrecision = mapOf(DatePart.YEAR to Timestamp.Precision.YEAR, + DatePart.MONTH to Timestamp.Precision.MONTH, + DatePart.DAY to Timestamp.Precision.DAY, + DatePart.HOUR to Timestamp.Precision.MINUTE, + DatePart.MINUTE to Timestamp.Precision.MINUTE, + DatePart.SECOND to Timestamp.Precision.SECOND) } - private fun Timestamp.hasSufficientPrecisionFor(requiredPrecision: Precision): Boolean { + private fun Timestamp.hasSufficientPrecisionFor(requiredPrecision: Timestamp.Precision): Boolean { val requiredPrecisionPos = precisionOrder.indexOf(requiredPrecision) val precisionPos = precisionOrder.indexOf(precision) @@ -51,17 +57,17 @@ internal class DateAddExprFunction(valueFactory: ExprValueFactory) : NullPropaga } return when (requiredPrecision) { - Precision.YEAR -> Timestamp.forYear(this.year) - Precision.MONTH -> Timestamp.forMonth(this.year, this.month) - Precision.DAY -> Timestamp.forDay(this.year, this.month, this.day) - Precision.SECOND -> Timestamp.forSecond(this.year, + Timestamp.Precision.YEAR -> Timestamp.forYear(this.year) + Timestamp.Precision.MONTH -> Timestamp.forMonth(this.year, this.month) + Timestamp.Precision.DAY -> Timestamp.forDay(this.year, this.month, this.day) + Timestamp.Precision.SECOND -> Timestamp.forSecond(this.year, this.month, this.day, this.hour, this.minute, this.second, this.localOffset) - Precision.MINUTE -> Timestamp.forMinute(this.year, + Timestamp.Precision.MINUTE -> Timestamp.forMinute(this.year, this.month, this.day, this.hour, diff --git a/lang/src/org/partiql/lang/eval/builtins/DateDiffExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/DateDiffExprFunction.kt index 2b35060826..567244a5dc 100644 --- a/lang/src/org/partiql/lang/eval/builtins/DateDiffExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/DateDiffExprFunction.kt @@ -14,10 +14,19 @@ package org.partiql.lang.eval.builtins -import com.amazon.ion.* -import org.partiql.lang.eval.* -import org.partiql.lang.syntax.* -import java.time.* +import com.amazon.ion.Timestamp +import org.partiql.lang.eval.Environment +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueFactory +import org.partiql.lang.eval.NullPropagatingExprFunction +import org.partiql.lang.eval.datePartValue +import org.partiql.lang.eval.errNoContext +import org.partiql.lang.eval.timestampValue +import org.partiql.lang.syntax.DatePart +import java.time.Duration +import java.time.OffsetDateTime +import java.time.Period +import java.time.ZoneOffset /** * Difference in date parts between two timestamps. If the first timestamp is later than the second the result is negative. diff --git a/lang/src/org/partiql/lang/eval/builtins/ExtractExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/ExtractExprFunction.kt index dfe6330b42..29446ad0a1 100644 --- a/lang/src/org/partiql/lang/eval/builtins/ExtractExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/ExtractExprFunction.kt @@ -14,10 +14,20 @@ package org.partiql.lang.eval.builtins -import com.amazon.ion.* -import org.partiql.lang.eval.* +import com.amazon.ion.Timestamp +import org.partiql.lang.eval.Environment +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueFactory +import org.partiql.lang.eval.ExprValueType +import org.partiql.lang.eval.NullPropagatingExprFunction +import org.partiql.lang.eval.datePartValue +import org.partiql.lang.eval.dateValue +import org.partiql.lang.eval.errNoContext +import org.partiql.lang.eval.isUnknown import org.partiql.lang.eval.time.Time -import org.partiql.lang.syntax.* +import org.partiql.lang.eval.timeValue +import org.partiql.lang.eval.timestampValue +import org.partiql.lang.syntax.DatePart import java.time.LocalDate private const val SECONDS_PER_MINUTE = 60 diff --git a/lang/src/org/partiql/lang/eval/builtins/MakeDateExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/MakeDateExprFunction.kt index bf6c517d20..c007c87334 100644 --- a/lang/src/org/partiql/lang/eval/builtins/MakeDateExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/MakeDateExprFunction.kt @@ -2,11 +2,15 @@ package org.partiql.lang.eval.builtins import org.partiql.lang.errors.ErrorCode import org.partiql.lang.errors.Property -import org.partiql.lang.errors.PropertyValueMap -import org.partiql.lang.eval.* +import org.partiql.lang.eval.Environment +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueFactory +import org.partiql.lang.eval.ExprValueType +import org.partiql.lang.eval.NullPropagatingExprFunction +import org.partiql.lang.eval.err +import org.partiql.lang.eval.intValue import org.partiql.lang.util.propertyValueMapOf import java.time.DateTimeException -import java.time.LocalDate /** * Creates a DATE ExprValue from the date fields year, month and day. diff --git a/lang/src/org/partiql/lang/eval/builtins/MakeTimeExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/MakeTimeExprFunction.kt index 675e8a5933..737e063fb7 100644 --- a/lang/src/org/partiql/lang/eval/builtins/MakeTimeExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/MakeTimeExprFunction.kt @@ -2,14 +2,19 @@ package org.partiql.lang.eval.builtins import org.partiql.lang.errors.ErrorCode import org.partiql.lang.errors.Property -import org.partiql.lang.eval.* -import org.partiql.lang.eval.time.* -import org.partiql.lang.util.getOffsetHHmm +import org.partiql.lang.eval.Environment +import org.partiql.lang.eval.EvaluationException +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueFactory +import org.partiql.lang.eval.ExprValueType +import org.partiql.lang.eval.NullPropagatingExprFunction +import org.partiql.lang.eval.bigDecimalValue +import org.partiql.lang.eval.err +import org.partiql.lang.eval.intValue +import org.partiql.lang.eval.time.Time +import org.partiql.lang.eval.time.NANOS_PER_SECOND import org.partiql.lang.util.propertyValueMapOf -import org.partiql.lang.util.times import java.math.BigDecimal -import java.time.DateTimeException -import java.time.ZoneOffset /** * Creates a TIME ExprValue from the time fields hour, minute, second and optional timezone_minutes. diff --git a/lang/src/org/partiql/lang/eval/builtins/NullIfExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/NullIfExprFunction.kt index 5939b487a1..4ac272299a 100644 --- a/lang/src/org/partiql/lang/eval/builtins/NullIfExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/NullIfExprFunction.kt @@ -14,8 +14,12 @@ package org.partiql.lang.eval.builtins -import com.amazon.ion.* -import org.partiql.lang.eval.* +import org.partiql.lang.eval.ArityCheckingTrait +import org.partiql.lang.eval.Environment +import org.partiql.lang.eval.ExprFunction +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueFactory +import org.partiql.lang.eval.exprEquals /** * NullIf built in function. Takes in two arguments, expr1 and expr2, returns null if expr1 = expr2 otherwise returns expr1 diff --git a/lang/src/org/partiql/lang/eval/builtins/SizeExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/SizeExprFunction.kt index ee57053e23..c3beb75670 100644 --- a/lang/src/org/partiql/lang/eval/builtins/SizeExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/SizeExprFunction.kt @@ -14,10 +14,17 @@ package org.partiql.lang.eval.builtins -import com.amazon.ion.* -import org.partiql.lang.errors.* -import org.partiql.lang.eval.* -import org.partiql.lang.util.* +import com.amazon.ion.IonContainer +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.errors.Property +import org.partiql.lang.errors.PropertyValueMap +import org.partiql.lang.eval.Environment +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueFactory +import org.partiql.lang.eval.ExprValueType +import org.partiql.lang.eval.NullPropagatingExprFunction +import org.partiql.lang.eval.err +import org.partiql.lang.util.size /** * Built in function to return the size of a container type, i.e. size of Lists, Structs and Bags. This function diff --git a/lang/src/org/partiql/lang/eval/builtins/SubstringExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/SubstringExprFunction.kt index 3f7601d96c..1f24e2544f 100644 --- a/lang/src/org/partiql/lang/eval/builtins/SubstringExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/SubstringExprFunction.kt @@ -14,10 +14,14 @@ package org.partiql.lang.eval.builtins -import com.amazon.ion.* -import org.partiql.lang.eval.* -import org.partiql.lang.util.* -import java.lang.Integer.* +import org.partiql.lang.eval.Environment +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueFactory +import org.partiql.lang.eval.ExprValueType +import org.partiql.lang.eval.NullPropagatingExprFunction +import org.partiql.lang.eval.errNoContext +import org.partiql.lang.eval.intValue +import org.partiql.lang.eval.stringValue /** * Built in function to return the substring of an existing string. This function @@ -101,11 +105,11 @@ internal class SubstringExprFunction(valueFactory: ExprValueFactory): NullPropag // calculate this before adjusting start position to account for negative startPosition val endPosition = when (quantity) { null -> codePointCount - else -> min(codePointCount, startPosition + quantity - 1) + else -> Integer.min(codePointCount, startPosition + quantity - 1) } // Clamp start indexes to values that make sense for java substring - val adjustedStartPosition = max(0, startPosition - 1) + val adjustedStartPosition = Integer.max(0, startPosition - 1) if (endPosition < adjustedStartPosition) { return valueFactory.newString("") diff --git a/lang/src/org/partiql/lang/eval/builtins/TimestampParser.kt b/lang/src/org/partiql/lang/eval/builtins/TimestampParser.kt index 4364e1aed2..187b21fc4c 100644 --- a/lang/src/org/partiql/lang/eval/builtins/TimestampParser.kt +++ b/lang/src/org/partiql/lang/eval/builtins/TimestampParser.kt @@ -14,15 +14,19 @@ package org.partiql.lang.eval.builtins -import com.amazon.ion.* -import org.partiql.lang.errors.* -import org.partiql.lang.eval.* -import org.partiql.lang.eval.builtins.timestamp.* -import org.partiql.lang.util.* -import java.math.* -import java.time.* -import java.time.format.* -import java.time.temporal.* +import com.amazon.ion.Timestamp +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.errors.Property +import org.partiql.lang.eval.EvaluationException +import org.partiql.lang.eval.builtins.timestamp.FormatPattern +import org.partiql.lang.eval.builtins.timestamp.TimestampField +import org.partiql.lang.eval.errNoContext +import org.partiql.lang.util.propertyValueMapOf +import java.math.BigDecimal +import java.time.DateTimeException +import java.time.format.DateTimeFormatterBuilder +import java.time.temporal.ChronoField +import java.time.temporal.TemporalAccessor /** * Uses Java 8's DateTimeFormatter to parse an Ion Timestamp value. diff --git a/lang/src/org/partiql/lang/eval/builtins/TimestampTemporalAccessor.kt b/lang/src/org/partiql/lang/eval/builtins/TimestampTemporalAccessor.kt index 1a61024fb6..1e96ffc2d5 100644 --- a/lang/src/org/partiql/lang/eval/builtins/TimestampTemporalAccessor.kt +++ b/lang/src/org/partiql/lang/eval/builtins/TimestampTemporalAccessor.kt @@ -14,9 +14,13 @@ package org.partiql.lang.eval.builtins -import com.amazon.ion.* -import java.math.* -import java.time.temporal.* +import com.amazon.ion.Timestamp +import java.math.BigDecimal +import java.time.temporal.ChronoField +import java.time.temporal.IsoFields +import java.time.temporal.TemporalAccessor +import java.time.temporal.TemporalField +import java.time.temporal.UnsupportedTemporalTypeException private val NANOS_PER_SECOND = 1_000_000_000L private val MILLIS_PER_SECOND = 1_000L diff --git a/lang/src/org/partiql/lang/eval/builtins/ToStringExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/ToStringExprFunction.kt index ceea8ee937..9d1eee1fd8 100644 --- a/lang/src/org/partiql/lang/eval/builtins/ToStringExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/ToStringExprFunction.kt @@ -14,14 +14,23 @@ package org.partiql.lang.eval.builtins -import com.amazon.ion.* -import org.partiql.lang.errors.* -import org.partiql.lang.eval.* -import org.partiql.lang.util.* +import com.amazon.ion.IonText +import com.amazon.ion.IonTimestamp +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.errors.Property +import org.partiql.lang.errors.PropertyValueMap +import org.partiql.lang.eval.Environment +import org.partiql.lang.eval.EvaluationException +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueFactory +import org.partiql.lang.eval.NullPropagatingExprFunction +import org.partiql.lang.eval.errNoContext +import org.partiql.lang.eval.timestampValue +import org.partiql.lang.util.stringValue import java.lang.IllegalArgumentException -import java.time.* -import java.time.format.* -import java.time.temporal.* +import java.time.DateTimeException +import java.time.format.DateTimeFormatter +import java.time.temporal.UnsupportedTemporalTypeException class ToStringExprFunction(valueFactory: ExprValueFactory) : NullPropagatingExprFunction("to_string", 2, valueFactory) { override fun eval(env: Environment, args: List): ExprValue { @@ -51,7 +60,7 @@ class ToStringExprFunction(valueFactory: ExprValueFactory) : NullPropagatingExpr private fun validateArguments(args: List) { when { args[0].ionValue !is IonTimestamp -> errNoContext("First argument of to_string is not a timestamp.", internal = false) - args[1].ionValue !is IonText -> errNoContext("Second argument of to_string is not a string.", internal = false) + args[1].ionValue !is IonText -> errNoContext("Second argument of to_string is not a string.", internal = false) } } diff --git a/lang/src/org/partiql/lang/eval/builtins/ToTimestampExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/ToTimestampExprFunction.kt index 8124064d45..2364ab0fb8 100644 --- a/lang/src/org/partiql/lang/eval/builtins/ToTimestampExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/ToTimestampExprFunction.kt @@ -14,11 +14,17 @@ package org.partiql.lang.eval.builtins -import com.amazon.ion.* -import org.partiql.lang.errors.* -import org.partiql.lang.eval.* -import org.partiql.lang.util.* - +import com.amazon.ion.IonString +import com.amazon.ion.Timestamp +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.errors.PropertyValueMap +import org.partiql.lang.eval.Environment +import org.partiql.lang.eval.EvaluationException +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueFactory +import org.partiql.lang.eval.NullPropagatingExprFunction +import org.partiql.lang.eval.errNoContext +import org.partiql.lang.util.stringValue /** * PartiQL function to convert a formatted string into an Ion Timestamp. diff --git a/lang/src/org/partiql/lang/eval/builtins/TrimExprFunction.kt b/lang/src/org/partiql/lang/eval/builtins/TrimExprFunction.kt index af4dced612..c50ed480ea 100644 --- a/lang/src/org/partiql/lang/eval/builtins/TrimExprFunction.kt +++ b/lang/src/org/partiql/lang/eval/builtins/TrimExprFunction.kt @@ -14,10 +14,12 @@ package org.partiql.lang.eval.builtins -import com.amazon.ion.* -import org.partiql.lang.eval.* -import org.partiql.lang.eval.builtins.TrimSpecification.* -import org.partiql.lang.util.* +import org.partiql.lang.eval.Environment +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueFactory +import org.partiql.lang.eval.NullPropagatingExprFunction +import org.partiql.lang.eval.errNoContext +import org.partiql.lang.eval.stringValue /** * From section 6.7 of SQL 92 spec: @@ -50,7 +52,7 @@ import org.partiql.lang.util.* */ internal class TrimExprFunction(valueFactory: ExprValueFactory) : NullPropagatingExprFunction("trim", 1..3, valueFactory) { private val DEFAULT_TO_REMOVE = " ".codePoints().toArray() - private val DEFAULT_SPECIFICATION = BOTH + private val DEFAULT_SPECIFICATION = TrimSpecification.BOTH private fun IntArray.leadingTrimOffset(toRemove: IntArray): Int { var offset = 0 @@ -88,9 +90,9 @@ internal class TrimExprFunction(valueFactory: ExprValueFactory) : NullPropagatin val (type, toRemove, string) = extractArguments(args) return when (type) { - BOTH, NONE -> valueFactory.newString(string.trim(toRemove)) - LEADING -> valueFactory.newString(string.leadingTrim(toRemove)) - TRAILING -> valueFactory.newString(string.trailingTrim(toRemove)) + TrimSpecification.BOTH, TrimSpecification.NONE -> valueFactory.newString(string.trim(toRemove)) + TrimSpecification.LEADING -> valueFactory.newString(string.leadingTrim(toRemove)) + TrimSpecification.TRAILING -> valueFactory.newString(string.trailingTrim(toRemove)) } } @@ -109,7 +111,7 @@ internal class TrimExprFunction(valueFactory: ExprValueFactory) : NullPropagatin val specification = TrimSpecification.from(args[0]) val toRemove = when(specification) { - NONE -> args[0].codePoints() + TrimSpecification.NONE -> args[0].codePoints() else -> DEFAULT_TO_REMOVE } @@ -117,7 +119,7 @@ internal class TrimExprFunction(valueFactory: ExprValueFactory) : NullPropagatin } 3 -> { val specification = TrimSpecification.from(args[0]) - if(specification == NONE) { + if(specification == TrimSpecification.NONE) { errNoContext("'${args[0].stringValue()}' is an unknown trim specification, " + "valid vales: ${TrimSpecification.validValues}", internal = false) diff --git a/lang/src/org/partiql/lang/eval/builtins/timestamp/FormatItem.kt b/lang/src/org/partiql/lang/eval/builtins/timestamp/FormatItem.kt index 2168dd4511..be3df8c2fd 100644 --- a/lang/src/org/partiql/lang/eval/builtins/timestamp/FormatItem.kt +++ b/lang/src/org/partiql/lang/eval/builtins/timestamp/FormatItem.kt @@ -14,8 +14,6 @@ package org.partiql.lang.eval.builtins.timestamp -import org.partiql.lang.eval.builtins.timestamp.TimestampField.* - /** * A item that is parsed from the format pattern. i.e. text or one of many symbols corresponding to a * field and its formatting options. diff --git a/lang/src/org/partiql/lang/eval/builtins/timestamp/FormatPattern.kt b/lang/src/org/partiql/lang/eval/builtins/timestamp/FormatPattern.kt index 91b2758964..394d747091 100644 --- a/lang/src/org/partiql/lang/eval/builtins/timestamp/FormatPattern.kt +++ b/lang/src/org/partiql/lang/eval/builtins/timestamp/FormatPattern.kt @@ -14,11 +14,10 @@ package org.partiql.lang.eval.builtins.timestamp -import org.partiql.lang.errors.* -import org.partiql.lang.eval.* -import org.partiql.lang.util.* - -import org.partiql.lang.eval.builtins.timestamp.TimestampField.* +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.errors.Property +import org.partiql.lang.eval.EvaluationException +import org.partiql.lang.util.propertyValueMapOf /** * Represents a parsed timestamp format pattern. @@ -170,7 +169,8 @@ internal class FormatPattern(val formatPatternString: String, val formatItems: L //NOTE: HOUR is not a valid precision for an Ion timestamp but when a format pattern's //leastSignificantField is HOUR, the minute field defaults to 00. if(hasOffset || hasAmPm) { - errIfMissingTimestampFields(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY) + errIfMissingTimestampFields(TimestampField.YEAR, TimestampField.MONTH_OF_YEAR, TimestampField.DAY_OF_MONTH, + TimestampField.HOUR_OF_DAY) } when (leastSignificantField) { @@ -178,18 +178,24 @@ internal class FormatPattern(val formatPatternString: String, val formatItems: L //If most precise field is null there are no format symbols corresponding to any timestamp fields. err("YEAR") } - YEAR -> { + TimestampField.YEAR -> { // the year field is the most coarse of the timestamp fields // it does not require any other fields to make a complete timestamp } - MONTH_OF_YEAR -> errIfMissingTimestampFields(YEAR) - DAY_OF_MONTH -> errIfMissingTimestampFields(YEAR, MONTH_OF_YEAR) - HOUR_OF_DAY -> errIfMissingTimestampFields(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH) - MINUTE_OF_HOUR -> errIfMissingTimestampFields(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY) - SECOND_OF_MINUTE -> errIfMissingTimestampFields(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR) - FRACTION_OF_SECOND -> errIfMissingTimestampFields(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE) - - OFFSET, AM_PM -> { + TimestampField.MONTH_OF_YEAR -> errIfMissingTimestampFields(TimestampField.YEAR) + TimestampField.DAY_OF_MONTH -> errIfMissingTimestampFields(TimestampField.YEAR, TimestampField.MONTH_OF_YEAR) + TimestampField.HOUR_OF_DAY -> errIfMissingTimestampFields(TimestampField.YEAR, + TimestampField.MONTH_OF_YEAR, TimestampField.DAY_OF_MONTH) + TimestampField.MINUTE_OF_HOUR -> errIfMissingTimestampFields(TimestampField.YEAR, + TimestampField.MONTH_OF_YEAR, TimestampField.DAY_OF_MONTH, TimestampField.HOUR_OF_DAY) + TimestampField.SECOND_OF_MINUTE -> errIfMissingTimestampFields(TimestampField.YEAR, + TimestampField.MONTH_OF_YEAR, TimestampField.DAY_OF_MONTH, TimestampField.HOUR_OF_DAY, + TimestampField.MINUTE_OF_HOUR) + TimestampField. FRACTION_OF_SECOND -> errIfMissingTimestampFields(TimestampField.YEAR, + TimestampField.MONTH_OF_YEAR, TimestampField.DAY_OF_MONTH, TimestampField.HOUR_OF_DAY, + TimestampField.MINUTE_OF_HOUR, TimestampField.SECOND_OF_MINUTE) + + TimestampField.OFFSET, TimestampField.AM_PM -> { throw IllegalStateException("OFFSET, AM_PM should never be the least significant field!") } } diff --git a/lang/src/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternLexer.kt b/lang/src/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternLexer.kt index 652a895e14..56889a5254 100644 --- a/lang/src/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternLexer.kt +++ b/lang/src/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternLexer.kt @@ -14,9 +14,11 @@ package org.partiql.lang.eval.builtins.timestamp -import org.partiql.lang.errors.* -import org.partiql.lang.eval.* -import org.partiql.lang.util.* +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.errors.Property +import org.partiql.lang.eval.EvaluationException +import org.partiql.lang.util.codePointSequence +import org.partiql.lang.util.propertyValueMapOf private const val NON_ESCAPED_TEXT = " /-,:." private const val SINGLE_QUOTE_CP = '\''.toInt() diff --git a/lang/src/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternParser.kt b/lang/src/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternParser.kt index cf140d98ed..dfbf19625b 100644 --- a/lang/src/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternParser.kt +++ b/lang/src/org/partiql/lang/eval/builtins/timestamp/TimestampFormatPatternParser.kt @@ -14,9 +14,10 @@ package org.partiql.lang.eval.builtins.timestamp -import org.partiql.lang.errors.* -import org.partiql.lang.eval.* -import org.partiql.lang.util.* +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.errors.Property +import org.partiql.lang.eval.EvaluationException +import org.partiql.lang.util.propertyValueMapOf internal class TimestampFormatPatternParser { diff --git a/lang/src/org/partiql/lang/eval/io/DelimitedValues.kt b/lang/src/org/partiql/lang/eval/io/DelimitedValues.kt index e83654386c..8d61e10947 100644 --- a/lang/src/org/partiql/lang/eval/io/DelimitedValues.kt +++ b/lang/src/org/partiql/lang/eval/io/DelimitedValues.kt @@ -14,12 +14,26 @@ package org.partiql.lang.eval.io -import com.amazon.ion.* +import com.amazon.ion.IonDecimal +import com.amazon.ion.IonException +import com.amazon.ion.IonFloat +import com.amazon.ion.IonInt +import com.amazon.ion.IonSystem +import com.amazon.ion.IonTimestamp +import com.amazon.ion.IonType +import com.amazon.ion.IonValue import org.apache.commons.csv.CSVFormat import org.apache.commons.csv.CSVParser import org.apache.commons.csv.CSVPrinter -import org.partiql.lang.eval.* -import org.partiql.lang.util.* +import org.partiql.lang.eval.BindingCase +import org.partiql.lang.eval.BindingName +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueFactory +import org.partiql.lang.eval.StructOrdering +import org.partiql.lang.eval.namedValue +import org.partiql.lang.eval.orderedNames +import org.partiql.lang.eval.syntheticColumnName +import org.partiql.lang.util.stringValue import java.io.BufferedReader import java.io.Reader import java.io.Writer diff --git a/lang/src/org/partiql/lang/syntax/Parser.kt b/lang/src/org/partiql/lang/syntax/Parser.kt index d7106a0070..0e39368369 100644 --- a/lang/src/org/partiql/lang/syntax/Parser.kt +++ b/lang/src/org/partiql/lang/syntax/Parser.kt @@ -14,8 +14,8 @@ package org.partiql.lang.syntax -import com.amazon.ion.* -import org.partiql.lang.ast.* +import com.amazon.ion.IonSexp +import org.partiql.lang.ast.ExprNode import org.partiql.lang.domains.PartiqlAst /** diff --git a/lang/src/org/partiql/lang/syntax/SqlLexer.kt b/lang/src/org/partiql/lang/syntax/SqlLexer.kt index c0b3d04040..d922d364f2 100644 --- a/lang/src/org/partiql/lang/syntax/SqlLexer.kt +++ b/lang/src/org/partiql/lang/syntax/SqlLexer.kt @@ -14,17 +14,15 @@ package org.partiql.lang.syntax -import com.amazon.ion.* -import org.partiql.lang.errors.* -import org.partiql.lang.errors.ErrorCode.* -import org.partiql.lang.errors.Property.* -import org.partiql.lang.syntax.SqlLexer.LexType.* -import org.partiql.lang.syntax.SqlLexer.StateType.* -import org.partiql.lang.syntax.TokenType.* -import org.partiql.lang.syntax.TokenType.KEYWORD -import org.partiql.lang.util.* -import java.math.* - +import com.amazon.ion.IonException +import com.amazon.ion.IonSystem +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.errors.Property +import org.partiql.lang.errors.PropertyValueMap +import org.partiql.lang.util.bigDecimalOf +import org.partiql.lang.util.codePointSequence +import org.partiql.lang.util.seal +import java.math.BigInteger /** * Simple tokenizer for PartiQL. @@ -73,7 +71,7 @@ class SqlLexer(private val ion: IonSystem) : Lexer { val tokenType: TokenType? get() = null val lexType: LexType - get() = NONE + get() = LexType.NONE val replacement: Int get() = REPLACE_SAME @@ -89,7 +87,7 @@ class SqlLexer(private val ion: IonSystem) : Lexer { /** State node and corresponding state table. */ internal class TableState(override val stateType: StateType, override val tokenType: TokenType? = null, - override val lexType: LexType = NONE, + override val lexType: LexType = LexType.NONE, override val replacement: Int = REPLACE_SAME, var delegate: State = ERROR_STATE, setup: TableState.() -> Unit = { }) : State { @@ -121,7 +119,7 @@ class SqlLexer(private val ion: IonSystem) : Lexer { fun selfRepeatingDelegate(stateType: StateType, tokenType: TokenType? = null, - lexType: LexType = NONE) { + lexType: LexType = LexType.NONE) { delegate = object : State { override val stateType = stateType override val tokenType = tokenType @@ -133,7 +131,7 @@ class SqlLexer(private val ion: IonSystem) : Lexer { fun delta(chars: String, stateType: StateType, tokenType: TokenType? = null, - lexType: LexType = NONE, + lexType: LexType = LexType.NONE, replacement: Int = REPLACE_SAME, delegate: State = this, setup: TableState.(String) -> Unit = { }): TableState { @@ -199,83 +197,83 @@ class SqlLexer(private val ion: IonSystem) : Lexer { private val REPLACE_NOTHING = -2 /** Synthetic state for EOF to trigger a flush of the last token. */ - private val EOF_STATE = RepeatingState(END) + private val EOF_STATE = RepeatingState(StateType.END) /** Error state. */ - private val ERROR_STATE = RepeatingState(ERROR) + private val ERROR_STATE = RepeatingState(StateType.ERROR) /** Initial state. */ - private val INITIAL_STATE = TableState(INITIAL) { + private val INITIAL_STATE = TableState(StateType.INITIAL) { val initialState = this - delta("(", START_AND_TERMINAL, LEFT_PAREN) - delta(")", START_AND_TERMINAL, RIGHT_PAREN) - delta("[", START_AND_TERMINAL, LEFT_BRACKET) - delta("]", START_AND_TERMINAL, RIGHT_BRACKET) - delta("{", START_AND_TERMINAL, LEFT_CURLY) - delta("}", START_AND_TERMINAL, RIGHT_CURLY) - delta(":", START_AND_TERMINAL, COLON) - delta(",", START_AND_TERMINAL, COMMA) - delta("*", START_AND_TERMINAL, STAR) - delta(";", START_AND_TERMINAL, SEMICOLON) - delta("?", START_AND_TERMINAL, QUESTION_MARK) - - delta(NON_OVERLOADED_OPERATOR_CHARS, START_AND_TERMINAL, OPERATOR) - - delta("|", START) { - delta("|", TERMINAL, OPERATOR, delegate = initialState) + delta("(", StateType.START_AND_TERMINAL, TokenType.LEFT_PAREN) + delta(")", StateType.START_AND_TERMINAL, TokenType.RIGHT_PAREN) + delta("[", StateType.START_AND_TERMINAL, TokenType.LEFT_BRACKET) + delta("]", StateType.START_AND_TERMINAL, TokenType.RIGHT_BRACKET) + delta("{", StateType.START_AND_TERMINAL, TokenType.LEFT_CURLY) + delta("}", StateType.START_AND_TERMINAL, TokenType.RIGHT_CURLY) + delta(":", StateType.START_AND_TERMINAL, TokenType.COLON) + delta(",", StateType.START_AND_TERMINAL, TokenType.COMMA) + delta("*", StateType.START_AND_TERMINAL, TokenType.STAR) + delta(";", StateType.START_AND_TERMINAL, TokenType.SEMICOLON) + delta("?", StateType.START_AND_TERMINAL, TokenType.QUESTION_MARK) + + delta(NON_OVERLOADED_OPERATOR_CHARS, StateType.START_AND_TERMINAL, TokenType.OPERATOR) + + delta("|", StateType.START) { + delta("|", StateType.TERMINAL, TokenType.OPERATOR, delegate = initialState) } - delta("!", START) { - delta("=", TERMINAL, OPERATOR, delegate = initialState) + delta("!", StateType.START) { + delta("=", StateType.TERMINAL, TokenType.OPERATOR, delegate = initialState) } - delta("<", START_AND_TERMINAL, OPERATOR) { - delta("=", TERMINAL, OPERATOR, delegate = initialState) - delta(">", TERMINAL, OPERATOR, delegate = initialState) - delta("<", TERMINAL, LEFT_DOUBLE_ANGLE_BRACKET, delegate = initialState) + delta("<", StateType.START_AND_TERMINAL, TokenType.OPERATOR) { + delta("=", StateType.TERMINAL, TokenType.OPERATOR, delegate = initialState) + delta(">", StateType.TERMINAL, TokenType.OPERATOR, delegate = initialState) + delta("<", StateType.TERMINAL, TokenType.LEFT_DOUBLE_ANGLE_BRACKET, delegate = initialState) } - delta(">", START_AND_TERMINAL, OPERATOR) { - delta("=", TERMINAL, OPERATOR, delegate = initialState) - delta(">", TERMINAL, RIGHT_DOUBLE_ANGLE_BRACKET, delegate = initialState) + delta(">", StateType.START_AND_TERMINAL, TokenType.OPERATOR) { + delta("=", StateType.TERMINAL, TokenType.OPERATOR, delegate = initialState) + delta(">", StateType.TERMINAL, TokenType.RIGHT_DOUBLE_ANGLE_BRACKET, delegate = initialState) } - delta(IDENT_START_CHARS, START_AND_TERMINAL, IDENTIFIER) { - delta(IDENT_CONTINUE_CHARS, TERMINAL, IDENTIFIER) + delta(IDENT_START_CHARS, StateType.START_AND_TERMINAL, TokenType.IDENTIFIER) { + delta(IDENT_CONTINUE_CHARS, StateType.TERMINAL, TokenType.IDENTIFIER) } fun TableState.deltaDecimalInteger(stateType: StateType, lexType: LexType, setup: TableState.(String) -> Unit = { }): Unit { - delta(DIGIT_CHARS, stateType, LITERAL, lexType, delegate = initialState) { - delta(DIGIT_CHARS, TERMINAL, LITERAL, lexType) + delta(DIGIT_CHARS, stateType, TokenType.LITERAL, lexType, delegate = initialState) { + delta(DIGIT_CHARS, StateType.TERMINAL, TokenType.LITERAL, lexType) setup(it) } } fun TableState.deltaDecimalFraction(setup: TableState.(String) -> Unit = { }): Unit { - delta(".", TERMINAL, LITERAL, DECIMAL) { - deltaDecimalInteger(TERMINAL, DECIMAL, setup) + delta(".", StateType.TERMINAL, TokenType.LITERAL, LexType.DECIMAL) { + deltaDecimalInteger(StateType.TERMINAL, LexType.DECIMAL, setup) } } fun TableState.deltaExponent(setup: TableState.(String) -> Unit = { }): Unit { - delta(E_NOTATION_CHARS, INCOMPLETE, delegate = ERROR_STATE) { - delta(SIGN_CHARS, INCOMPLETE, delegate = ERROR_STATE) { - deltaDecimalInteger(TERMINAL, DECIMAL, setup) + delta(E_NOTATION_CHARS, StateType.INCOMPLETE, delegate = ERROR_STATE) { + delta(SIGN_CHARS, StateType.INCOMPLETE, delegate = ERROR_STATE) { + deltaDecimalInteger(StateType.TERMINAL, LexType.DECIMAL, setup) } - deltaDecimalInteger(TERMINAL, DECIMAL, setup) + deltaDecimalInteger(StateType.TERMINAL, LexType.DECIMAL, setup) } } fun TableState.deltaNumber(stateType: StateType) { - deltaDecimalInteger(stateType, INTEGER) { + deltaDecimalInteger(stateType, LexType.INTEGER) { deltaDecimalFraction { deltaExponent { } } deltaExponent { } } when (stateType) { - START_AND_TERMINAL -> { + StateType.START_AND_TERMINAL -> { // at the top-level we need to support dot as a special - delta(".", START_AND_TERMINAL, DOT) { - deltaDecimalInteger(TERMINAL, DECIMAL) { + delta(".", StateType.START_AND_TERMINAL, TokenType.DOT) { + deltaDecimalInteger(StateType.TERMINAL, LexType.DECIMAL) { deltaExponent { } } } @@ -288,108 +286,108 @@ class SqlLexer(private val ion: IonSystem) : Lexer { } } - deltaNumber(START_AND_TERMINAL) + deltaNumber(StateType.START_AND_TERMINAL) fun TableState.deltaQuote(quoteChar: String, tokenType: TokenType, lexType: LexType): Unit { - delta(quoteChar, START, replacement = REPLACE_NOTHING) { - selfRepeatingDelegate(INCOMPLETE) + delta(quoteChar, StateType.START, replacement = REPLACE_NOTHING) { + selfRepeatingDelegate(StateType.INCOMPLETE) val quoteState = this - delta(quoteChar, TERMINAL, tokenType, lexType = lexType, replacement = REPLACE_NOTHING, delegate = initialState) { - delta(quoteChar, INCOMPLETE, delegate = quoteState) + delta(quoteChar, StateType.TERMINAL, tokenType, lexType = lexType, replacement = REPLACE_NOTHING, delegate = initialState) { + delta(quoteChar, StateType.INCOMPLETE, delegate = quoteState) } } } - deltaQuote(SINGLE_QUOTE_CHARS, LITERAL, SQ_STRING) - deltaQuote(DOUBLE_QUOTE_CHARS, QUOTED_IDENTIFIER, DQ_STRING) + deltaQuote(SINGLE_QUOTE_CHARS, TokenType.LITERAL, LexType.SQ_STRING) + deltaQuote(DOUBLE_QUOTE_CHARS, TokenType.QUOTED_IDENTIFIER, LexType.DQ_STRING) // Ion literals - very partial lexing of Ion to support nested back-tick // in Ion strings/symbols/comments - delta(BACKTICK_CHARS, START, replacement = REPLACE_NOTHING) { - selfRepeatingDelegate(INCOMPLETE) + delta(BACKTICK_CHARS, StateType.START, replacement = REPLACE_NOTHING) { + selfRepeatingDelegate(StateType.INCOMPLETE) val quoteState = this - delta("/", INCOMPLETE) { - delta("/", INCOMPLETE) { + delta("/", StateType.INCOMPLETE) { + delta("/", StateType.INCOMPLETE) { val ionCommentState = this - selfRepeatingDelegate(INCOMPLETE) - delta(BACKTICK_CHARS, INCOMPLETE, delegate = ionCommentState) - delta(NL_WHITESPACE_CHARS, INCOMPLETE, delegate = quoteState) + selfRepeatingDelegate(StateType.INCOMPLETE) + delta(BACKTICK_CHARS, StateType.INCOMPLETE, delegate = ionCommentState) + delta(NL_WHITESPACE_CHARS, StateType.INCOMPLETE, delegate = quoteState) } - delta("*", INCOMPLETE) { + delta("*", StateType.INCOMPLETE) { val ionCommentState = this - selfRepeatingDelegate(INCOMPLETE) - delta(BACKTICK_CHARS, INCOMPLETE, delegate = ionCommentState) - delta("*", INCOMPLETE) { - delta("/", INCOMPLETE, delegate = quoteState) + selfRepeatingDelegate(StateType.INCOMPLETE) + delta(BACKTICK_CHARS, StateType.INCOMPLETE, delegate = ionCommentState) + delta("*", StateType.INCOMPLETE) { + delta("/", StateType.INCOMPLETE, delegate = quoteState) } } } - delta(DOUBLE_QUOTE_CHARS, INCOMPLETE) { + delta(DOUBLE_QUOTE_CHARS, StateType.INCOMPLETE) { val ionStringState = this - selfRepeatingDelegate(INCOMPLETE) + selfRepeatingDelegate(StateType.INCOMPLETE) - delta("\\", INCOMPLETE) { - delta(DOUBLE_QUOTE_CHARS, INCOMPLETE, delegate = ionStringState) + delta("\\", StateType.INCOMPLETE) { + delta(DOUBLE_QUOTE_CHARS, StateType.INCOMPLETE, delegate = ionStringState) } - delta(BACKTICK_CHARS, INCOMPLETE, delegate = ionStringState) - delta(DOUBLE_QUOTE_CHARS, INCOMPLETE, delegate = quoteState) + delta(BACKTICK_CHARS, StateType.INCOMPLETE, delegate = ionStringState) + delta(DOUBLE_QUOTE_CHARS, StateType.INCOMPLETE, delegate = quoteState) } - delta(SINGLE_QUOTE_CHARS, INCOMPLETE) { + delta(SINGLE_QUOTE_CHARS, StateType.INCOMPLETE) { val ionStringState = this - selfRepeatingDelegate(INCOMPLETE) + selfRepeatingDelegate(StateType.INCOMPLETE) - delta("\\", INCOMPLETE) { - delta(SINGLE_QUOTE_CHARS, INCOMPLETE, delegate = ionStringState) + delta("\\", StateType.INCOMPLETE) { + delta(SINGLE_QUOTE_CHARS, StateType.INCOMPLETE, delegate = ionStringState) } - delta(BACKTICK_CHARS, INCOMPLETE, delegate = ionStringState) - delta(SINGLE_QUOTE_CHARS, INCOMPLETE, delegate = quoteState) { - delta(SINGLE_QUOTE_CHARS, INCOMPLETE, delegate = ionStringState) { + delta(BACKTICK_CHARS, StateType.INCOMPLETE, delegate = ionStringState) + delta(SINGLE_QUOTE_CHARS, StateType.INCOMPLETE, delegate = quoteState) { + delta(SINGLE_QUOTE_CHARS, StateType.INCOMPLETE, delegate = ionStringState) { val ionLongStringState = this - selfRepeatingDelegate(INCOMPLETE) + selfRepeatingDelegate(StateType.INCOMPLETE) - delta("\\", INCOMPLETE) { - delta(SINGLE_QUOTE_CHARS, INCOMPLETE, delegate = ionLongStringState) + delta("\\", StateType.INCOMPLETE) { + delta(SINGLE_QUOTE_CHARS, StateType.INCOMPLETE, delegate = ionLongStringState) } - delta(BACKTICK_CHARS, INCOMPLETE, delegate = ionLongStringState) - delta(SINGLE_QUOTE_CHARS, INCOMPLETE, delegate = ionLongStringState) { - delta(SINGLE_QUOTE_CHARS, INCOMPLETE, delegate = ionLongStringState) { - delta(SINGLE_QUOTE_CHARS, INCOMPLETE, delegate = quoteState) + delta(BACKTICK_CHARS, StateType.INCOMPLETE, delegate = ionLongStringState) + delta(SINGLE_QUOTE_CHARS, StateType.INCOMPLETE, delegate = ionLongStringState) { + delta(SINGLE_QUOTE_CHARS, StateType.INCOMPLETE, delegate = ionLongStringState) { + delta(SINGLE_QUOTE_CHARS, StateType.INCOMPLETE, delegate = quoteState) } } } } } - delta("{", INCOMPLETE) { - delta("{", INCOMPLETE) { - selfRepeatingDelegate(INCOMPLETE) - delta("}", INCOMPLETE) { - delta("}", INCOMPLETE, delegate = quoteState) + delta("{", StateType.INCOMPLETE) { + delta("{", StateType.INCOMPLETE) { + selfRepeatingDelegate(StateType.INCOMPLETE) + delta("}", StateType.INCOMPLETE) { + delta("}", StateType.INCOMPLETE, delegate = quoteState) } } } - delta(BACKTICK_CHARS, TERMINAL, TokenType.ION_LITERAL, LexType.ION_LITERAL, replacement = REPLACE_NOTHING, delegate = initialState) + delta(BACKTICK_CHARS, StateType.TERMINAL, TokenType.ION_LITERAL, LexType.ION_LITERAL, replacement = REPLACE_NOTHING, delegate = initialState) } - delta(ALL_WHITESPACE_CHARS, START_AND_TERMINAL, null, WHITESPACE) + delta(ALL_WHITESPACE_CHARS, StateType.START_AND_TERMINAL, null, LexType.WHITESPACE) // block comment and divide operator - delta("/", START_AND_TERMINAL, OPERATOR) { - delta("*", INCOMPLETE) { - selfRepeatingDelegate(INCOMPLETE) - delta("*", INCOMPLETE) { - delta("/", TERMINAL, null, WHITESPACE, delegate = initialState) + delta("/", StateType.START_AND_TERMINAL, TokenType.OPERATOR) { + delta("*", StateType.INCOMPLETE) { + selfRepeatingDelegate(StateType.INCOMPLETE) + delta("*", StateType.INCOMPLETE) { + delta("/", StateType.TERMINAL, null, LexType.WHITESPACE, delegate = initialState) } } } // line comment, subtraction operator, and signed positive integer - delta("-", START_AND_TERMINAL, OPERATOR) { + delta("-", StateType.START_AND_TERMINAL, TokenType.OPERATOR) { // inline comments don't need a special terminator before EOF - delta("-", TERMINAL, null, WHITESPACE) { - selfRepeatingDelegate(TERMINAL, null, WHITESPACE) - delta(NL_WHITESPACE_CHARS, TERMINAL, null, WHITESPACE, delegate = initialState) + delta("-", StateType.TERMINAL, null, LexType.WHITESPACE) { + selfRepeatingDelegate(StateType.TERMINAL, null, LexType.WHITESPACE) + delta(NL_WHITESPACE_CHARS, StateType.TERMINAL, null, LexType.WHITESPACE, delegate = initialState) } } @@ -409,9 +407,9 @@ class SqlLexer(private val ion: IonSystem) : Lexer { */ private fun makePropertyBag(tokenString: String, tracker: PositionTracker): PropertyValueMap { val pvmap = PropertyValueMap() - pvmap[LINE_NUMBER] = tracker.line - pvmap[COLUMN_NUMBER] = tracker.col - pvmap[TOKEN_STRING] = tokenString + pvmap[Property.LINE_NUMBER] = tracker.line + pvmap[Property.COLUMN_NUMBER] = tracker.col + pvmap[Property.TOKEN_STRING] = tokenString return pvmap } @@ -434,16 +432,16 @@ class SqlLexer(private val ion: IonSystem) : Lexer { tokenCodePointCount++; fun errInvalidChar(): Nothing = - throw LexerException(errorCode = LEXER_INVALID_CHAR, errorContext = makePropertyBag(repr(cp), tracker)) + throw LexerException(errorCode = ErrorCode.LEXER_INVALID_CHAR, errorContext = makePropertyBag(repr(cp), tracker)) fun errInvalidOperator(operator: String): Nothing = - throw LexerException(errorCode = LEXER_INVALID_OPERATOR, errorContext = makePropertyBag(operator, tracker)) + throw LexerException(errorCode = ErrorCode.LEXER_INVALID_OPERATOR, errorContext = makePropertyBag(operator, tracker)) fun errInvalidLiteral(literal: String): Nothing = - throw LexerException(errorCode = LEXER_INVALID_LITERAL, errorContext = makePropertyBag(literal, tracker)) + throw LexerException(errorCode = ErrorCode.LEXER_INVALID_LITERAL, errorContext = makePropertyBag(literal, tracker)) fun errInvalidIonLiteral(literal: String, cause: IonException): Nothing = - throw LexerException(errorCode = LEXER_INVALID_ION_LITERAL, + throw LexerException(errorCode = ErrorCode.LEXER_INVALID_ION_LITERAL, errorContext = makePropertyBag(literal, tracker), cause = cause) @@ -459,89 +457,89 @@ class SqlLexer(private val ion: IonSystem) : Lexer { val nextType = next.stateType when { - nextType == ERROR -> errInvalidChar() + nextType == StateType.ERROR -> errInvalidChar() nextType.beginsToken -> { // we can only start a token if we've properly ended another one. - if (currType != INITIAL && !currType.endsToken) { + if (currType != StateType.INITIAL && !currType.endsToken) { errInvalidChar() } - if (currType.endsToken && curr.lexType != WHITESPACE) { + if (currType.endsToken && curr.lexType != LexType.WHITESPACE) { // flush out the previous token val text = buffer.toString() var tokenType = curr.tokenType!! val ionValue = when (tokenType) { - OPERATOR -> { + TokenType.OPERATOR -> { val unaliased = OPERATOR_ALIASES[text] ?: text when (unaliased) { in ALL_OPERATORS -> ion.newSymbol(unaliased) else -> errInvalidOperator(unaliased) } } - IDENTIFIER -> { + TokenType.IDENTIFIER -> { val lower = text.toLowerCase() when { - curr.lexType == DQ_STRING -> ion.newSymbol(text) + curr.lexType == LexType.DQ_STRING -> ion.newSymbol(text) lower in ALL_SINGLE_LEXEME_OPERATORS -> { // an operator that looks like a keyword - tokenType = OPERATOR + tokenType = TokenType.OPERATOR ion.newSymbol(lower) } lower == "as" -> { // AS token - tokenType = AS + tokenType = TokenType.AS ion.newSymbol(lower) } lower == "at" -> { // AS token - tokenType = AT + tokenType = TokenType.AT ion.newSymbol(lower) } lower == "by" -> { // BY token - tokenType = BY + tokenType = TokenType.BY ion.newSymbol(lower) } lower == "null" -> { // literal null - tokenType = NULL + tokenType = TokenType.NULL ion.newNull() } lower == "missing" -> { // special literal for MISSING - tokenType = MISSING + tokenType = TokenType.MISSING ion.newNull() } lower == "for" -> { // used as an argument delimiter for substring - tokenType = FOR + tokenType = TokenType.FOR ion.newSymbol(lower) } lower == "asc" -> { - tokenType = ASC + tokenType = TokenType.ASC ion.newSymbol(lower) } lower == "desc" -> { - tokenType = DESC + tokenType = TokenType.DESC ion.newSymbol(lower) } lower in BOOLEAN_KEYWORDS -> { // literal boolean - tokenType = LITERAL + tokenType = TokenType.LITERAL ion.newBool(lower == "true") } lower in KEYWORDS -> { // unquoted identifier that is a keyword - tokenType = KEYWORD + tokenType = TokenType.KEYWORD ion.newSymbol(KEYWORD_ALIASES[lower] ?: lower) } else -> ion.newSymbol(text) } } - LITERAL -> when (curr.lexType) { - SQ_STRING -> ion.newString(text) - INTEGER -> ion.newInt(BigInteger(text, 10)) - DECIMAL -> try { + TokenType.LITERAL -> when (curr.lexType) { + LexType.SQ_STRING -> ion.newString(text) + LexType.INTEGER -> ion.newInt(BigInteger(text, 10)) + LexType.DECIMAL -> try { ion.newDecimal(bigDecimalOf(text)) } catch (e: NumberFormatException) { @@ -560,7 +558,7 @@ class SqlLexer(private val ion: IonSystem) : Lexer { errInvalidIonLiteral(text, e) } } - QUESTION_MARK -> { + TokenType.QUESTION_MARK -> { ion.newInt(++parameterCt) } else -> ion.newSymbol(text) @@ -588,7 +586,7 @@ class SqlLexer(private val ion: IonSystem) : Lexer { } // if next state is the EOF marker add it to `tokens`. - if (next.stateType == END) tokens.add( + if (next.stateType == StateType.END) tokens.add( Token( type = TokenType.EOF, value = ion.newSymbol("EOF"), diff --git a/lang/src/org/partiql/lang/syntax/SqlParser.kt b/lang/src/org/partiql/lang/syntax/SqlParser.kt index 45ab5de464..639c3f5e34 100644 --- a/lang/src/org/partiql/lang/syntax/SqlParser.kt +++ b/lang/src/org/partiql/lang/syntax/SqlParser.kt @@ -14,27 +14,59 @@ package org.partiql.lang.syntax -import com.amazon.ion.* -import com.amazon.ionelement.api.* -import org.partiql.lang.ast.* +import com.amazon.ion.IonSexp +import com.amazon.ion.IonSystem +import com.amazon.ionelement.api.emptyMetaContainer +import com.amazon.ionelement.api.ionInt +import com.amazon.ionelement.api.ionString +import com.amazon.ionelement.api.metaContainerOf +import com.amazon.ionelement.api.toIonElement +import org.partiql.lang.ast.AstSerializer +import org.partiql.lang.ast.AstVersion +import org.partiql.lang.ast.ExprNode +import org.partiql.lang.ast.IonElementMetaContainer +import org.partiql.lang.ast.IsCountStarMeta +import org.partiql.lang.ast.IsImplictJoinMeta +import org.partiql.lang.ast.IsIonLiteralMeta +import org.partiql.lang.ast.LegacyLogicalNotMeta +import org.partiql.lang.ast.Meta +import org.partiql.lang.ast.NAryOp +import org.partiql.lang.ast.SourceLocationMeta +import org.partiql.lang.ast.SqlDataType +import org.partiql.lang.ast.toExprNode import org.partiql.lang.domains.PartiqlAst import org.partiql.lang.errors.ErrorCode -import org.partiql.lang.errors.ErrorCode.* import org.partiql.lang.errors.Property -import org.partiql.lang.errors.Property.* -import org.partiql.lang.errors.* -import org.partiql.lang.eval.time.* -import org.partiql.lang.syntax.SqlParser.AliasSupportType.* -import org.partiql.lang.syntax.SqlParser.ArgListMode.* -import org.partiql.lang.syntax.SqlParser.ParseType.* -import org.partiql.lang.syntax.TokenType.* -import org.partiql.lang.syntax.TokenType.KEYWORD -import org.partiql.lang.util.* +import org.partiql.lang.errors.PropertyValueMap +import org.partiql.lang.eval.time.MAX_PRECISION_FOR_TIME +import org.partiql.lang.util.DATE_PATTERN_REGEX +import org.partiql.lang.util.atomFromHead +import org.partiql.lang.util.checkThreadInterrupted +import org.partiql.lang.util.err +import org.partiql.lang.util.errExpectedTokenType +import org.partiql.lang.util.genericTimeRegex +import org.partiql.lang.util.getPrecisionFromTimeString +import org.partiql.lang.util.head +import org.partiql.lang.util.ionValue +import org.partiql.lang.util.isNumeric +import org.partiql.lang.util.isText +import org.partiql.lang.util.isUnsignedInteger +import org.partiql.lang.util.longValue +import org.partiql.lang.util.numberValue +import org.partiql.lang.util.onlyEndOfStatement +import org.partiql.lang.util.stringValue +import org.partiql.lang.util.tail +import org.partiql.lang.util.tailExpectedKeyword +import org.partiql.lang.util.tailExpectedToken +import org.partiql.lang.util.timeWithoutTimeZoneRegex +import org.partiql.lang.util.unaryMinus import org.partiql.pig.runtime.SymbolPrimitive import java.time.LocalDate -import java.time.format.DateTimeFormatter.* -import java.time.* +import java.time.LocalTime +import java.time.OffsetTime import java.time.format.DateTimeFormatter +import java.time.format.DateTimeFormatter.ISO_LOCAL_DATE +import java.time.format.DateTimeFormatter.ISO_TIME import java.time.format.DateTimeParseException import java.time.temporal.Temporal @@ -166,22 +198,22 @@ class SqlParser(private val ion: IonSystem) : Parser { fun deriveExpected(expectedType1: TokenType, expectedType2: TokenType): Pair = if (expectedType1 != this.remaining.head?.type && expectedType2 != this.remaining.head?.type) { val pvmap = PropertyValueMap() - pvmap[EXPECTED_TOKEN_TYPE_1_OF_2] = expectedType1 - pvmap[EXPECTED_TOKEN_TYPE_2_OF_2] = expectedType2 - this.remaining.err("Expected $type", PARSE_EXPECTED_2_TOKEN_TYPES, pvmap) + pvmap[Property.EXPECTED_TOKEN_TYPE_1_OF_2] = expectedType1 + pvmap[Property.EXPECTED_TOKEN_TYPE_2_OF_2] = expectedType2 + this.remaining.err("Expected $type", ErrorCode.PARSE_EXPECTED_2_TOKEN_TYPES, pvmap) } else { Pair(copy(remaining = this.remaining.tail), this.remaining.head!!) } fun deriveExpectedKeyword(keyword: String): ParseNode = derive { tailExpectedKeyword(keyword) } - val isNumericLiteral = type == ATOM && when (token?.type) { - LITERAL, ION_LITERAL -> token.value?.isNumeric ?: false + val isNumericLiteral = type == ParseType.ATOM && when (token?.type) { + TokenType.LITERAL, TokenType.ION_LITERAL -> token.value?.isNumeric ?: false else -> false } fun numberValue(): Number = token?.value?.numberValue() - ?: unsupported("Could not interpret token as number", PARSE_EXPECTED_NUMBER) + ?: unsupported("Could not interpret token as number", ErrorCode.PARSE_EXPECTED_NUMBER) fun unsupported(message: String, errorCode: ErrorCode, errorContext: PropertyValueMap = PropertyValueMap()): Nothing = remaining.err(message, errorCode, errorContext) @@ -217,17 +249,20 @@ class SqlParser(private val ion: IonSystem) : Parser { //*************************************** private fun ParseNode.toAstStatement(): PartiqlAst.Statement { return when (type) { - ATOM, LIST, BAG, STRUCT, UNARY, BINARY, TERNARY, CAST, CALL, CALL_AGG, - CALL_DISTINCT_AGG, CALL_AGG_WILDCARD, PATH, PARAMETER, CASE, SELECT_LIST, - SELECT_VALUE, PIVOT, DATE, TIME, TIME_WITH_TIME_ZONE -> PartiqlAst.build { query(toAstExpr(), getMetas()) } + ParseType.ATOM, ParseType.LIST, ParseType.BAG, ParseType.STRUCT, ParseType.UNARY, ParseType.BINARY, + ParseType.TERNARY, ParseType.CAST, ParseType.CALL, ParseType.CALL_AGG, ParseType.CALL_DISTINCT_AGG, + ParseType.CALL_AGG_WILDCARD, ParseType.PATH, ParseType.PARAMETER, ParseType.CASE, ParseType.SELECT_LIST, + ParseType.SELECT_VALUE, ParseType.PIVOT, ParseType.DATE, ParseType.TIME, + ParseType.TIME_WITH_TIME_ZONE -> PartiqlAst.build { query(toAstExpr(), getMetas()) } - FROM, INSERT, INSERT_VALUE, SET, UPDATE, REMOVE, DELETE, DML_LIST -> toAstDml() + ParseType.FROM, ParseType.INSERT, ParseType.INSERT_VALUE, ParseType.SET, ParseType.UPDATE, ParseType.REMOVE, + ParseType.DELETE, ParseType.DML_LIST -> toAstDml() - CREATE_TABLE, DROP_TABLE, CREATE_INDEX, DROP_INDEX -> toAstDdl() - - EXEC -> toAstExec() + ParseType.CREATE_TABLE, ParseType.DROP_TABLE, ParseType.CREATE_INDEX, ParseType.DROP_INDEX -> toAstDdl() - else -> unsupported("Unsupported syntax for $type", PARSE_UNSUPPORTED_SYNTAX) + ParseType.EXEC -> toAstExec() + + else -> unsupported("Unsupported syntax for $type", ErrorCode.PARSE_UNSUPPORTED_SYNTAX) } } @@ -237,19 +272,19 @@ class SqlParser(private val ion: IonSystem) : Parser { return PartiqlAst.build { when (type) { - ATOM -> when (token?.type){ - LITERAL, NULL, TRIM_SPECIFICATION, DATE_PART -> lit(token.value!!.toIonElement(), metas) - ION_LITERAL -> lit(token.value!!.toIonElement(), metas + metaToIonMetaContainer(IsIonLiteralMeta.instance)) - MISSING -> missing(metas) - QUOTED_IDENTIFIER -> id(token.text!!, caseSensitive(), unqualified(), metas) - IDENTIFIER -> id(token.text!!, caseInsensitive(), unqualified(), metas) + ParseType.ATOM -> when (token?.type){ + TokenType.LITERAL, TokenType.NULL, TokenType.TRIM_SPECIFICATION,TokenType. DATE_PART -> lit(token.value!!.toIonElement(), metas) + TokenType.ION_LITERAL -> lit(token.value!!.toIonElement(), metas + metaToIonMetaContainer(IsIonLiteralMeta.instance)) + TokenType.MISSING -> missing(metas) + TokenType.QUOTED_IDENTIFIER -> id(token.text!!, caseSensitive(), unqualified(), metas) + TokenType.IDENTIFIER -> id(token.text!!, caseInsensitive(), unqualified(), metas) else -> errMalformedParseTree("Unsupported atom token type ${token?.type}") } - LIST -> list(children.map { it.toAstExpr() }, metas) - BAG -> bag(children.map { it.toAstExpr() }, metas) - STRUCT -> { + ParseType.LIST -> list(children.map { it.toAstExpr() }, metas) + ParseType.BAG -> bag(children.map { it.toAstExpr() }, metas) + ParseType.STRUCT -> { val fields = children.map { - if (it.type != MEMBER) { + if (it.type != ParseType.MEMBER) { errMalformedParseTree("Expected MEMBER node as direct descendant of a STRUCT node but instead found ${it.type}") } if (it.children.size != 2) { @@ -261,7 +296,7 @@ class SqlParser(private val ion: IonSystem) : Parser { } struct(fields, metas) } - UNARY, BINARY, TERNARY -> { + ParseType.UNARY, ParseType.BINARY, ParseType.TERNARY -> { when (token!!.text) { "is" -> isType(children[0].toAstExpr(), children[1].toAstType(), metas) "is_not" -> not( @@ -282,8 +317,8 @@ class SqlParser(private val ion: IonSystem) : Parser { val childToken = childNode.token ?: errMalformedParseTree("@ node does not have a token") when (childToken.type) { - QUOTED_IDENTIFIER -> id(childToken.text!!, caseSensitive(), localsFirst(), childNode.getMetas()) - IDENTIFIER -> id(childToken.text!!, caseInsensitive(), localsFirst(), childNode.getMetas()) + TokenType.QUOTED_IDENTIFIER -> id(childToken.text!!, caseSensitive(), localsFirst(), childNode.getMetas()) + TokenType.IDENTIFIER -> id(childToken.text!!, caseInsensitive(), localsFirst(), childNode.getMetas()) else -> errMalformedParseTree("Unexpected child node token type of @ operator node $childToken") } } @@ -303,8 +338,8 @@ class SqlParser(private val ion: IonSystem) : Parser { } } } - CAST -> cast(children[0].toAstExpr(), children[1].toAstType(), metas) - CALL -> { + ParseType.CAST -> cast(children[0].toAstExpr(), children[1].toAstType(), metas) + ParseType.CALL -> { when (val funcName = token?.text!!.toLowerCase()) { // special case--list/sexp/bag "functions" are intrinsic to the literal form "sexp" -> sexp(children.map { it.toAstExpr()}, metas) @@ -319,16 +354,16 @@ class SqlParser(private val ion: IonSystem) : Parser { } } } - CALL_AGG -> { + ParseType.CALL_AGG -> { val funcName = SymbolPrimitive(token?.text!!.toLowerCase(), emptyMetaContainer()) callAgg_(all(), funcName, children[0].toAstExpr(), metas) } - CALL_DISTINCT_AGG -> { + ParseType.CALL_DISTINCT_AGG -> { val funcName = SymbolPrimitive(token?.text!!.toLowerCase(), emptyMetaContainer()) callAgg_(distinct(), funcName, children[0].toAstExpr(), metas) } - CALL_AGG_WILDCARD -> { - if(token!!.type != KEYWORD || token.keywordText != "count") { + ParseType.CALL_AGG_WILDCARD -> { + if(token!!.type != TokenType.KEYWORD || token.keywordText != "count") { errMalformedParseTree("only COUNT can be used with a wildcard") } // Should only get the [SourceLocationMeta] if present, not any other metas. @@ -338,49 +373,51 @@ class SqlParser(private val ion: IonSystem) : Parser { val symbolicPrimitive = SymbolPrimitive("count", srcLocationMetaOnly) callAgg_(all(), symbolicPrimitive, lit, metas + metaToIonMetaContainer(IsCountStarMeta.instance)) } - PATH -> { + ParseType.PATH -> { val rootExpr = children[0].toAstExpr() val pathComponents = children.drop(1).map { when(it.type) { - PATH_DOT -> { + ParseType.PATH_DOT -> { if(it.children.size != 1) { errMalformedParseTree("Unexpected number of child elements in PATH_DOT ParseNode") } val atomParseNode = it.children[0] val atomMetas = atomParseNode.getMetas() when (atomParseNode.type) { - CASE_SENSITIVE_ATOM, CASE_INSENSITIVE_ATOM -> { + ParseType.CASE_SENSITIVE_ATOM, ParseType.CASE_INSENSITIVE_ATOM -> { val lit = lit(ionString(atomParseNode.token?.text!!), atomMetas) - val caseSensitivity = if (atomParseNode.type == CASE_SENSITIVE_ATOM) caseSensitive() else caseInsensitive() + val caseSensitivity = if (atomParseNode.type == ParseType.CASE_SENSITIVE_ATOM) + caseSensitive() else caseInsensitive() pathExpr(lit, caseSensitivity) } - PATH_UNPIVOT -> pathUnpivot(atomMetas) + ParseType.PATH_UNPIVOT -> pathUnpivot(atomMetas) else -> errMalformedParseTree("Unsupported child path node of PATH_DOT") } } - PATH_SQB -> { + ParseType.PATH_SQB -> { if(it.children.size != 1) { errMalformedParseTree("Unexpected number of child elements in PATH_SQB ParseNode") } val child = it.children[0] val childMetas = child.getMetas() - if (child.type == PATH_WILDCARD) pathWildcard(childMetas) else pathExpr(child.toAstExpr(), caseSensitive()) + if (child.type == ParseType.PATH_WILDCARD) pathWildcard(childMetas) else pathExpr(child.toAstExpr(), caseSensitive()) } else -> errMalformedParseTree("Unsupported path component: ${it.type}") } } path(rootExpr, pathComponents, metas) } - PARAMETER -> parameter(token!!.value!!.longValue(), metas) - CASE -> { + ParseType.PARAMETER -> parameter(token!!.value!!.longValue(), metas) + ParseType.CASE -> { val branches = ArrayList() val cases = exprPairList(branches) var elseExpr: PartiqlAst.Expr? = null fun ParseNode.addCases() = children.forEach { when(it.type) { - WHEN -> branches.add(exprPair(it.children[0].toAstExpr(), it.children[1].toAstExpr())) - ELSE -> elseExpr = it.children[0].toAstExpr() + ParseType.WHEN -> + branches.add(exprPair(it.children[0].toAstExpr(), it.children[1].toAstExpr())) + ParseType.ELSE -> elseExpr = it.children[0].toAstExpr() else -> errMalformedParseTree("CASE clause must be WHEN or ELSE") } } @@ -400,7 +437,7 @@ class SqlParser(private val ion: IonSystem) : Parser { else -> errMalformedParseTree("CASE must be searched or simple") } } - SELECT_LIST, SELECT_VALUE, PIVOT -> { + ParseType.SELECT_LIST, ParseType.SELECT_VALUE, ParseType.PIVOT -> { // The first child of a SELECT_LIST parse node can be either DISTINCT or ARG_LIST. // If it is ARG_LIST, the children of that node are the select items and the SetQuantifier is ALL // If it is DISTINCT, the SetQuantifier is DISTINCT and there should be one child node, an ARG_LIST @@ -413,11 +450,11 @@ class SqlParser(private val ion: IonSystem) : Parser { // If the query parsed was a `SELECT DISTINCT ...`, children[0] is of type DISTINCT and its // children are the actual select list. - val setQuantifier = if (children[0].type == DISTINCT) distinct() else null - val selectList = if (children[0].type == DISTINCT) children[0].children[0] else children[0] + val setQuantifier = if (children[0].type == ParseType.DISTINCT) distinct() else null + val selectList = if (children[0].type == ParseType.DISTINCT) children[0].children[0] else children[0] val fromList = children[1] - if (fromList.type != FROM_CLAUSE) { + if (fromList.type != ParseType.FROM_CLAUSE) { errMalformedParseTree("Invalid second child of SELECT_LIST") } @@ -430,16 +467,16 @@ class SqlParser(private val ion: IonSystem) : Parser { val unconsumedChildren = children.drop(2).toMutableList() val projection = when (type) { - SELECT_LIST -> { + ParseType.SELECT_LIST -> { // We deal with ProjectStar first val childNodes = selectList.children - if (childNodes.any { it.type == PROJECT_ALL && it.children.isEmpty() }) { + if (childNodes.any { it.type == ParseType.PROJECT_ALL && it.children.isEmpty() }) { if (childNodes.size > 1) error("More than one select item when SELECT * was present.") projectStar(childNodes[0].getMetas()) } else { val selectListItems = childNodes.map { when (it.type) { - PROJECT_ALL -> projectAll(it.children[0].toAstExpr()) + ParseType.PROJECT_ALL -> projectAll(it.children[0].toAstExpr()) else -> { val (asAliasSymbol, parseNode) = it.unwrapAsAlias() projectExpr_(parseNode.toAstExpr(), asAliasSymbol) @@ -449,8 +486,8 @@ class SqlParser(private val ion: IonSystem) : Parser { projectList(selectListItems) } } - SELECT_VALUE -> projectValue(selectList.toAstExpr()) - PIVOT -> { + ParseType.SELECT_VALUE -> projectValue(selectList.toAstExpr()) + ParseType.PIVOT -> { val member = children[0] val asExpr = member.children[0].toAstExpr() val atExpr = member.children[1].toAstExpr() @@ -461,20 +498,21 @@ class SqlParser(private val ion: IonSystem) : Parser { val fromSource = fromList.children[0].toFromSource() - val fromLet = unconsumedChildren.firstOrNull { it.type == LET }?.let { + val fromLet = unconsumedChildren.firstOrNull { it.type == ParseType.LET }?.let { unconsumedChildren.remove(it) it.toLetSource() } - val whereExpr = unconsumedChildren.firstOrNull { it.type == WHERE }?.let { + val whereExpr = unconsumedChildren.firstOrNull { it.type == ParseType.WHERE }?.let { unconsumedChildren.remove(it) it.children[0].toAstExpr() } - val groupBy = unconsumedChildren.firstOrNull { it.type == GROUP || it.type == GROUP_PARTIAL }?.let { + val groupBy = unconsumedChildren.firstOrNull { it.type == ParseType.GROUP || + it.type == ParseType.GROUP_PARTIAL }?.let { unconsumedChildren.remove(it) val groupingStrategy = when (it.type) { - GROUP -> groupFull() + ParseType.GROUP -> groupFull() else -> groupPartial() } val groupAsName = if (it.children.size > 1) it.children[1].toSymbolicName() else null @@ -490,12 +528,12 @@ class SqlParser(private val ion: IonSystem) : Parser { ) } - val havingExpr = unconsumedChildren.firstOrNull { it.type == HAVING }?.let { + val havingExpr = unconsumedChildren.firstOrNull { it.type == ParseType.HAVING }?.let { unconsumedChildren.remove(it) it.children[0].toAstExpr() } - val orderBy = unconsumedChildren.firstOrNull { it.type == ORDER_BY }?.let { + val orderBy = unconsumedChildren.firstOrNull { it.type == ParseType.ORDER_BY }?.let { unconsumedChildren.remove(it) orderBy( it.children[0].children.map { @@ -508,12 +546,12 @@ class SqlParser(private val ion: IonSystem) : Parser { ) } - val limitExpr = unconsumedChildren.firstOrNull { it.type == LIMIT }?.let { + val limitExpr = unconsumedChildren.firstOrNull { it.type == ParseType.LIMIT }?.let { unconsumedChildren.remove(it) it.children[0].toAstExpr() } - val offsetExpr = unconsumedChildren.firstOrNull { it.type == OFFSET }?.let { + val offsetExpr = unconsumedChildren.firstOrNull { it.type == ParseType.OFFSET }?.let { unconsumedChildren.remove(it) it.children[0].toAstExpr() } @@ -534,34 +572,37 @@ class SqlParser(private val ion: IonSystem) : Parser { metas = metas ) } - DATE -> { + ParseType.DATE -> { val dateString = token!!.text!! val (year, month, day) = dateString.split("-") date(year.toLong(), month.toLong(), day.toLong(), metas) } - TIME -> { + ParseType.TIME -> { val timeString = token!!.text!! val precision = children[0].token!!.value!!.numberValue().toLong() val time = LocalTime.parse(timeString, ISO_TIME) litTime( - timeValue(time.hour.toLong(), time.minute.toLong(), time.second.toLong(), time.nano.toLong(), precision, false, null), + timeValue(time.hour.toLong(), time.minute.toLong(), time.second.toLong(), time.nano.toLong(), + precision, false, null), metas ) } - TIME_WITH_TIME_ZONE -> { + ParseType.TIME_WITH_TIME_ZONE -> { val timeString = token!!.text!! val precision = children[0].token!!.value!!.longValue() try { val time = OffsetTime.parse(timeString) litTime( - timeValue(time.hour.toLong(), time.minute.toLong(), time.second.toLong(), time.nano.toLong(), precision, true, (time.offset.totalSeconds/60).toLong()), + timeValue(time.hour.toLong(), time.minute.toLong(), time.second.toLong(), + time.nano.toLong(), precision, true, (time.offset.totalSeconds/60).toLong()), metas ) } catch (e: DateTimeParseException) { // In case time zone not explicitly specified val time = LocalTime.parse(timeString) litTime( - timeValue(time.hour.toLong(), time.minute.toLong(), time.second.toLong(), time.nano.toLong(), precision, true, null), + timeValue(time.hour.toLong(), time.minute.toLong(), time.second.toLong(), + time.nano.toLong(), precision, true, null), metas ) } @@ -576,12 +617,12 @@ class SqlParser(private val ion: IonSystem) : Parser { return PartiqlAst.build { when (type) { - FROM -> { + ParseType.FROM -> { // The first child is the operation, the second child is the from list, // each child following is an optional clause (e.g. ORDER BY) val operation = children[0].toAstDml() val fromSource = children[1].also { - if (it.type != FROM_CLAUSE) { + if (it.type != ParseType.FROM_CLAUSE) { errMalformedParseTree("Invalid second child of FROM") } @@ -594,11 +635,11 @@ class SqlParser(private val ion: IonSystem) : Parser { // If any unconsumed children remain, we've missed something and should throw an exception. val unconsumedChildren = children.drop(2).toMutableList() - val where = unconsumedChildren.firstOrNull { it.type == WHERE }?.let { + val where = unconsumedChildren.firstOrNull { it.type == ParseType.WHERE }?.let { unconsumedChildren.remove(it) it.children[0].toAstExpr() } - val returning = unconsumedChildren.firstOrNull { it.type == RETURNING }?.let { + val returning = unconsumedChildren.firstOrNull { it.type == ParseType.RETURNING }?.let { unconsumedChildren.remove(it) it.toReturningExpr() } @@ -608,7 +649,7 @@ class SqlParser(private val ion: IonSystem) : Parser { operation.copy(from = fromSource, where = where, returning = returning, metas = metas) } - INSERT, INSERT_VALUE -> { + ParseType.INSERT, ParseType.INSERT_VALUE -> { val insertReturning = toInsertReturning() dml( dmlOpList(insertReturning.ops), @@ -616,11 +657,11 @@ class SqlParser(private val ion: IonSystem) : Parser { metas = metas ) } - SET, UPDATE, REMOVE, DELETE -> dml( + ParseType.SET, ParseType.UPDATE, ParseType.REMOVE, ParseType.DELETE -> dml( dmlOpList(toDmlOperation()), metas = metas ) - DML_LIST -> { + ParseType.DML_LIST -> { val dmlOps = children.flatMap { it.toDmlOperation() } dml( dmlOpList(dmlOps), @@ -637,22 +678,22 @@ class SqlParser(private val ion: IonSystem) : Parser { return PartiqlAst.build { when (type) { - CREATE_TABLE -> ddl( + ParseType.CREATE_TABLE -> ddl( createTable(children[0].token!!.text!!), metas ) - DROP_TABLE -> ddl( + ParseType.DROP_TABLE -> ddl( dropTable(children[0].toIdentifier()), metas ) - CREATE_INDEX -> ddl( + ParseType.CREATE_INDEX -> ddl( createIndex( children[0].toIdentifier(), children[1].children.map { it.toAstExpr() } ), metas ) - DROP_INDEX -> ddl( + ParseType.DROP_INDEX -> ddl( dropIndex(children[1].toIdentifier(), children[0].toIdentifier()), metas ) @@ -666,7 +707,7 @@ class SqlParser(private val ion: IonSystem) : Parser { return PartiqlAst.build { when (type) { - EXEC -> exec_( + ParseType.EXEC -> exec_( SymbolPrimitive(token?.text!!.toLowerCase(), emptyMetaContainer()), children.map { it.toAstExpr() }, metas @@ -677,7 +718,7 @@ class SqlParser(private val ion: IonSystem) : Parser { } private fun ParseNode.toAstType(): PartiqlAst.Type { - if (type != TYPE) { + if (type != ParseType.TYPE) { errMalformedParseTree("Expected ParseType.TYPE instead of $type") } @@ -764,7 +805,7 @@ class SqlParser(private val ion: IonSystem) : Parser { return PartiqlAst.build { when (type) { - FROM_SOURCE_JOIN -> { + ParseType.FROM_SOURCE_JOIN -> { val isCrossJoin = token?.keywordText?.contains("cross") ?: false if (!isCrossJoin && children.size != 3) { errMalformedParseTree("Incorrect number of clauses provided to JOIN") @@ -794,7 +835,7 @@ class SqlParser(private val ion: IonSystem) : Parser { return PartiqlAst.build { when (unwrappedParseNode.type) { - UNPIVOT -> unpivot_( + ParseType.UNPIVOT -> unpivot_( unwrappedParseNode.children[0].toAstExpr(), aliases.asName, aliases.atName, @@ -815,15 +856,15 @@ class SqlParser(private val ion: IonSystem) : Parser { val metas = getMetas() return when (type) { - AS_ALIAS -> { + ParseType.AS_ALIAS -> { if(variables.asName != null) error("Invalid parse tree: AS_ALIAS encountered more than once in FROM source") children[0].unwrapAliases(variables.copy(asName = SymbolPrimitive(token!!.text!!, metas))) } - AT_ALIAS -> { + ParseType.AT_ALIAS -> { if(variables.atName != null) error("Invalid parse tree: AT_ALIAS encountered more than once in FROM source") children[0].unwrapAliases(variables.copy(atName = SymbolPrimitive(token!!.text!!, metas))) } - BY_ALIAS -> { + ParseType.BY_ALIAS -> { if(variables.byName != null) error("Invalid parse tree: BY_ALIAS encountered more than once in FROM source") children[0].unwrapAliases(variables.copy(byName = SymbolPrimitive(token!!.text!!, metas))) } @@ -844,7 +885,7 @@ class SqlParser(private val ion: IonSystem) : Parser { } private fun ParseNode.toReturningMapping(): PartiqlAst.ReturningMapping { - if(type != RETURNING_MAPPING) { + if(type != ParseType.RETURNING_MAPPING) { errMalformedParseTree("Expected ParseType.RETURNING_MAPPING instead of $type") } return PartiqlAst.build { @@ -860,12 +901,12 @@ class SqlParser(private val ion: IonSystem) : Parser { private fun ParseNode.toInsertReturning(): InsertReturning = when (type) { - INSERT -> { + ParseType.INSERT -> { val ops = listOf(PartiqlAst.DmlOp.Insert(children[0].toAstExpr(), children[1].toAstExpr())) // We will remove items from this collection as we consume them. // If any unconsumed children remain, we've missed something and should throw an exception. val unconsumedChildren = children.drop(2).toMutableList() - val returning = unconsumedChildren.firstOrNull { it.type == RETURNING }?.let { + val returning = unconsumedChildren.firstOrNull { it.type == ParseType.RETURNING }?.let { unconsumedChildren.remove(it) it.toReturningExpr() } @@ -875,12 +916,12 @@ class SqlParser(private val ion: IonSystem) : Parser { InsertReturning(ops, returning) } - INSERT_VALUE -> { + ParseType.INSERT_VALUE -> { fun getOnConflict(onConflictChildren: List): PartiqlAst.OnConflict { onConflictChildren.getOrNull(0)?.let { firstNode -> val condition = firstNode.toAstExpr() onConflictChildren.getOrNull(1)?.let { secondNode -> - if (CONFLICT_ACTION == secondNode.type && "do_nothing" == secondNode.token?.keywordText) { + if (ParseType.CONFLICT_ACTION == secondNode.type && "do_nothing" == secondNode.token?.keywordText) { return PartiqlAst.build { onConflict(condition, doNothing()) } } } @@ -896,18 +937,19 @@ class SqlParser(private val ion: IonSystem) : Parser { val unconsumedChildren = children.drop(2).toMutableList() // Handle AT clause - val position = unconsumedChildren.firstOrNull { it.type != ON_CONFLICT && it.type != RETURNING }?.let { + val position = unconsumedChildren.firstOrNull { it.type != ParseType.ON_CONFLICT && + it.type != ParseType.RETURNING }?.let { unconsumedChildren.remove(it) it.toAstExpr() } - val onConflict = unconsumedChildren.firstOrNull { it.type == ON_CONFLICT }?.let { + val onConflict = unconsumedChildren.firstOrNull { it.type == ParseType.ON_CONFLICT }?.let { unconsumedChildren.remove(it) getOnConflict(it.children) } val ops = listOf(PartiqlAst.build { insertValue(lvalue, value, position, onConflict) }) - val returning = unconsumedChildren.firstOrNull { it.type == RETURNING }?.let { + val returning = unconsumedChildren.firstOrNull { it.type == ParseType.RETURNING }?.let { unconsumedChildren.remove(it) it.toReturningExpr() } @@ -917,23 +959,23 @@ class SqlParser(private val ion: IonSystem) : Parser { InsertReturning(ops, returning) } - else -> unsupported("Unsupported syntax for $type", PARSE_UNSUPPORTED_SYNTAX) + else -> unsupported("Unsupported syntax for $type", ErrorCode.PARSE_UNSUPPORTED_SYNTAX) } private fun ParseNode.toColumnComponent(metas: IonElementMetaContainer): PartiqlAst.ColumnComponent = PartiqlAst.build { when (type) { - RETURNING_WILDCARD -> returningWildcard(metas) + ParseType.RETURNING_WILDCARD -> returningWildcard(metas) else -> returningColumn(this@toColumnComponent.toAstExpr()) } } private fun ParseNode.toDmlOperation(): List = when (type) { - INSERT -> { + ParseType.INSERT -> { listOf(PartiqlAst.build { insert(children[0].toAstExpr(), children[1].toAstExpr()) }) } - INSERT_VALUE -> { + ParseType.INSERT_VALUE -> { val lvalue = children[0].toAstExpr() val value = children[1].toAstExpr() @@ -942,18 +984,19 @@ class SqlParser(private val ion: IonSystem) : Parser { val unconsumedChildren = children.drop(2).toMutableList() // Handle AT clause - val position = unconsumedChildren.firstOrNull { it.type != ON_CONFLICT && it.type != RETURNING }?.let { + val position = unconsumedChildren.firstOrNull { it.type != ParseType.ON_CONFLICT && + it.type != ParseType.RETURNING }?.let { unconsumedChildren.remove(it) it.toAstExpr() } - val onConflict = unconsumedChildren.firstOrNull { it.type == ON_CONFLICT }?.let { + val onConflict = unconsumedChildren.firstOrNull { it.type == ParseType.ON_CONFLICT }?.let { unconsumedChildren.remove(it) val onConflictChildren = it.children onConflictChildren.getOrNull(0)?.let { val condition = it.toAstExpr() onConflictChildren.getOrNull(1)?.let { - if (CONFLICT_ACTION == it.type && "do_nothing" == it.token?.keywordText) { + if (ParseType.CONFLICT_ACTION == it.type && "do_nothing" == it.token?.keywordText) { PartiqlAst.build { onConflict(condition, doNothing()) } } } @@ -966,7 +1009,7 @@ class SqlParser(private val ion: IonSystem) : Parser { listOf(PartiqlAst.build { insertValue(lvalue, value, position, onConflict) }) } - SET, UPDATE -> children.map { + ParseType.SET, ParseType.UPDATE -> children.map { PartiqlAst.build { set( assignment( @@ -976,20 +1019,20 @@ class SqlParser(private val ion: IonSystem) : Parser { ) } } - REMOVE -> listOf(PartiqlAst.build { remove(children[0].toAstExpr()) }) - DELETE -> listOf(PartiqlAst.build { delete() }) - else -> unsupported("Unsupported syntax for $type", PARSE_UNSUPPORTED_SYNTAX) + ParseType.REMOVE -> listOf(PartiqlAst.build { remove(children[0].toAstExpr()) }) + ParseType.DELETE -> listOf(PartiqlAst.build { delete() }) + else -> unsupported("Unsupported syntax for $type", ErrorCode.PARSE_UNSUPPORTED_SYNTAX) } private fun ParseNode.unwrapAsAlias(): AsAlias = - if (type == AS_ALIAS) { + if (type == ParseType.AS_ALIAS) { AsAlias(SymbolPrimitive(token!!.text!!, getMetas()), children[0]) } else { AsAlias(null, this) } private fun ParseNode.toIdentifier(): PartiqlAst.Identifier { - if (type != ATOM){ + if (type != ParseType.ATOM){ errMalformedParseTree("Cannot transform ParseNode type: $type to identifier") } @@ -997,21 +1040,21 @@ class SqlParser(private val ion: IonSystem) : Parser { return PartiqlAst.build { when (token?.type){ - QUOTED_IDENTIFIER -> identifier(token.text!!, caseSensitive(), metas) - IDENTIFIER -> identifier(token.text!!, caseInsensitive(), metas) + TokenType.QUOTED_IDENTIFIER -> identifier(token.text!!, caseSensitive(), metas) + TokenType.IDENTIFIER -> identifier(token.text!!, caseInsensitive(), metas) else -> errMalformedParseTree("Cannot transform atom token type ${token?.type} to identifier") } } } private fun ParseNode.toOrderingSpec(): PartiqlAst.OrderingSpec { - if (type != ORDERING_SPEC) { + if (type != ParseType.ORDERING_SPEC) { errMalformedParseTree("Expected ParseType.ORDERING_SPEC instead of $type") } return PartiqlAst.build { when (token?.type) { - ASC -> asc() - DESC -> desc() + TokenType.ASC -> asc() + TokenType.DESC -> desc() else -> errMalformedParseTree("Invalid ordering spec parsing") } } @@ -1022,7 +1065,7 @@ class SqlParser(private val ion: IonSystem) : Parser { errMalformedParseTree("Expected ParseNode to have a token") } when (token.type) { - LITERAL, ION_LITERAL, IDENTIFIER, QUOTED_IDENTIFIER -> { + TokenType.LITERAL, TokenType.ION_LITERAL, TokenType.IDENTIFIER, TokenType.QUOTED_IDENTIFIER -> { val tokenText = token.text ?: errMalformedParseTree("Expected ParseNode.token to have text") return SymbolPrimitive(tokenText, getMetas()) } @@ -1106,7 +1149,7 @@ class SqlParser(private val ion: IonSystem) : Parser { fun parseRightExpr() = if (rem.size < 3) { rem.err( "Missing right-hand side expression of infix operator", - PARSE_EXPECTED_EXPRESSION + ErrorCode.PARSE_EXPECTED_EXPRESSION ) } else { rem.tail.parseExpression( @@ -1119,12 +1162,12 @@ class SqlParser(private val ion: IonSystem) : Parser { "is", "is_not" -> rem.tail.parseType() // IN has context sensitive parsing rules around parenthesis "in", "not_in" -> when { - rem.tail.head?.type == LEFT_PAREN + rem.tail.head?.type == TokenType.LEFT_PAREN && rem.tail.tail.head?.keywordText !in IN_OP_NORMAL_EVAL_KEYWORDS -> rem.tail.tail.parseArgList( - aliasSupportType = NONE, - mode = NORMAL_ARG_LIST - ).deriveExpected(RIGHT_PAREN).copy(LIST) + aliasSupportType = AliasSupportType.NONE, + mode = ArgListMode.NORMAL_ARG_LIST + ).deriveExpected(TokenType.RIGHT_PAREN).copy(ParseType.LIST) else -> parseRightExpr() } else -> parseRightExpr() @@ -1132,19 +1175,19 @@ class SqlParser(private val ion: IonSystem) : Parser { rem = right.remaining expr = when { - op.isBinaryOperator -> ParseNode(BINARY, op, listOf(expr, right), rem) + op.isBinaryOperator -> ParseNode(ParseType.BINARY, op, listOf(expr, right), rem) else -> when (op.keywordText) { "between", "not_between" -> { val rest = rem.tailExpectedKeyword("and") if (rest.onlyEndOfStatement()) { - rem.head.err("Expected expression after AND", PARSE_EXPECTED_EXPRESSION) + rem.head.err("Expected expression after AND", ErrorCode.PARSE_EXPECTED_EXPRESSION) } else { rem = rest val third = rem.parseExpression( precedence = op.infixPrecedence ) rem = third.remaining - ParseNode(TERNARY, op, listOf(expr, right, third), rem) + ParseNode(ParseType.TERNARY, op, listOf(expr, right, third), rem) } } "like", "not_like" -> { @@ -1152,18 +1195,18 @@ class SqlParser(private val ion: IonSystem) : Parser { rem.head?.keywordText == "escape" -> { val rest = rem.tailExpectedKeyword("escape") if (rest.onlyEndOfStatement()) { - rem.head.err("Expected expression after ESCAPE", PARSE_EXPECTED_EXPRESSION) + rem.head.err("Expected expression after ESCAPE", ErrorCode.PARSE_EXPECTED_EXPRESSION) } else { rem = rest val third = rem.parseExpression(precedence = op.infixPrecedence) rem = third.remaining - ParseNode(TERNARY, op, listOf(expr, right, third), rem) + ParseNode(ParseType.TERNARY, op, listOf(expr, right, third), rem) } } - else -> ParseNode(BINARY, op, listOf(expr, right), rem) + else -> ParseNode(ParseType.BINARY, op, listOf(expr, right), rem) } } - else -> rem.err("Unknown infix operator", PARSE_UNKNOWN_OPERATOR) + else -> rem.err("Unknown infix operator", ErrorCode.PARSE_UNKNOWN_OPERATOR) } } } @@ -1175,7 +1218,7 @@ class SqlParser(private val ion: IonSystem) : Parser { true -> { val op = head!! fun makeUnaryParseNode(term: ParseNode) = - ParseNode(UNARY, op, listOf(term), term.remaining) + ParseNode(ParseType.UNARY, op, listOf(term), term.remaining) // constant fold unary plus/minus into constant literals when (op.keywordText) { @@ -1208,8 +1251,8 @@ class SqlParser(private val ion: IonSystem) : Parser { val term = when (pathMode) { PathMode.FULL_PATH -> parseTerm() PathMode.SIMPLE_PATH -> when (head?.type) { - QUOTED_IDENTIFIER, IDENTIFIER -> atomFromHead() - else -> err("Expected identifier for simple path", PARSE_INVALID_PATH_COMPONENT) + TokenType.QUOTED_IDENTIFIER, TokenType.IDENTIFIER -> atomFromHead() + else -> err("Expected identifier for simple path", ErrorCode.PARSE_INVALID_PATH_COMPONENT) } } val path = ArrayList(listOf(term)) @@ -1218,44 +1261,44 @@ class SqlParser(private val ion: IonSystem) : Parser { var hasPath = true while (hasPath) { when (rem.head?.type) { - DOT -> { + TokenType.DOT -> { val dotToken = rem.head!! // consume first dot rem = rem.tail val pathPart = when (rem.head?.type) { - IDENTIFIER -> { - val litToken = Token(LITERAL, ion.newString(rem.head?.text!!), rem.head!!.span) - ParseNode(CASE_INSENSITIVE_ATOM, litToken, emptyList(), rem.tail) + TokenType.IDENTIFIER -> { + val litToken = Token(TokenType.LITERAL, ion.newString(rem.head?.text!!), rem.head!!.span) + ParseNode(ParseType.CASE_INSENSITIVE_ATOM, litToken, emptyList(), rem.tail) } - QUOTED_IDENTIFIER -> { - val litToken = Token(LITERAL, ion.newString(rem.head?.text!!), rem.head!!.span) - ParseNode(CASE_SENSITIVE_ATOM, litToken, emptyList(), rem.tail) + TokenType.QUOTED_IDENTIFIER -> { + val litToken = Token(TokenType.LITERAL, ion.newString(rem.head?.text!!), rem.head!!.span) + ParseNode(ParseType.CASE_SENSITIVE_ATOM, litToken, emptyList(), rem.tail) } - STAR -> { + TokenType.STAR -> { if (pathMode != PathMode.FULL_PATH) { - rem.err("Invalid path dot component for simple path", PARSE_INVALID_PATH_COMPONENT) + rem.err("Invalid path dot component for simple path", ErrorCode.PARSE_INVALID_PATH_COMPONENT) } - ParseNode(PATH_UNPIVOT, rem.head, emptyList(), rem.tail) + ParseNode(ParseType.PATH_UNPIVOT, rem.head, emptyList(), rem.tail) } else -> { - rem.err("Invalid path dot component", PARSE_INVALID_PATH_COMPONENT) + rem.err("Invalid path dot component", ErrorCode.PARSE_INVALID_PATH_COMPONENT) } } - path.add(ParseNode(PATH_DOT, dotToken, listOf(pathPart), rem)) + path.add(ParseNode(ParseType.PATH_DOT, dotToken, listOf(pathPart), rem)) rem = rem.tail } - LEFT_BRACKET -> { + TokenType.LEFT_BRACKET -> { val leftBracketToken = rem.head!! rem = rem.tail val expr = when (rem.head?.type) { - STAR -> ParseNode(PATH_WILDCARD, rem.head, emptyList(), rem.tail) + TokenType.STAR -> ParseNode(ParseType.PATH_WILDCARD, rem.head, emptyList(), rem.tail) else -> rem.parseExpression() - }.deriveExpected(RIGHT_BRACKET) - if (pathMode == PathMode.SIMPLE_PATH && expr.type != ATOM && expr.token?.type != LITERAL) { - rem.err("Invalid path component for simple path", PARSE_INVALID_PATH_COMPONENT) + }.deriveExpected(TokenType.RIGHT_BRACKET) + if (pathMode == PathMode.SIMPLE_PATH && expr.type != ParseType.ATOM && expr.token?.type != TokenType.LITERAL) { + rem.err("Invalid path component for simple path", ErrorCode.PARSE_INVALID_PATH_COMPONENT) } - path.add(ParseNode(PATH_SQB, leftBracketToken, listOf(expr), rem.tail)) + path.add(ParseNode(ParseType.PATH_SQB, leftBracketToken, listOf(expr), rem.tail)) rem = expr.remaining } else -> hasPath = false @@ -1264,26 +1307,26 @@ class SqlParser(private val ion: IonSystem) : Parser { return when (path.size) { 1 -> term - else -> ParseNode(PATH, null, path, rem) + else -> ParseNode(ParseType.PATH, null, path, rem) } } private fun List.parseTerm(): ParseNode = when (head?.type) { - OPERATOR -> when (head?.keywordText) { + TokenType.OPERATOR -> when (head?.keywordText) { // the lexical scope operator is **only** allowed with identifiers "@" -> when (tail.head?.type) { - IDENTIFIER, QUOTED_IDENTIFIER -> ParseNode( - UNARY, - head, - listOf(tail.atomFromHead()), - tail.tail + TokenType.IDENTIFIER, TokenType.QUOTED_IDENTIFIER -> ParseNode( + ParseType.UNARY, + head, + listOf(tail.atomFromHead()), + tail.tail ) - else -> err("Identifier must follow @-operator", PARSE_MISSING_IDENT_AFTER_AT) + else -> err("Identifier must follow @-operator", ErrorCode.PARSE_MISSING_IDENT_AFTER_AT) } - else -> err("Unexpected operator", PARSE_UNEXPECTED_OPERATOR) + else -> err("Unexpected operator", ErrorCode.PARSE_UNEXPECTED_OPERATOR) } - KEYWORD -> when (head?.keywordText) { + TokenType.KEYWORD -> when (head?.keywordText) { in BASE_DML_KEYWORDS -> parseBaseDml() "update" -> tail.parseUpdate() "delete" -> tail.parseDelete(head!!) @@ -1299,7 +1342,7 @@ class SqlParser(private val ion: IonSystem) : Parser { "from" -> tail.parseFrom() // table value constructor--which aliases to bag constructor in PartiQL with very // specific syntax - "values" -> tail.parseTableValues().copy(type = BAG) + "values" -> tail.parseTableValues().copy(type = ParseType.BAG) "substring" -> tail.parseSubstring(head!!) "trim" -> tail.parseTrim(head!!) "extract" -> tail.parseExtract(head!!) @@ -1307,45 +1350,46 @@ class SqlParser(private val ion: IonSystem) : Parser { "date" -> tail.parseDate() "time" -> tail.parseTime() in FUNCTION_NAME_KEYWORDS -> when (tail.head?.type) { - LEFT_PAREN -> + TokenType.LEFT_PAREN -> tail.tail.parseFunctionCall(head!!) - else -> err("Unexpected keyword", PARSE_UNEXPECTED_KEYWORD) + else -> err("Unexpected keyword", ErrorCode.PARSE_UNEXPECTED_KEYWORD) } "exec" -> tail.parseExec() - else -> err("Unexpected keyword", PARSE_UNEXPECTED_KEYWORD) + else -> err("Unexpected keyword", ErrorCode.PARSE_UNEXPECTED_KEYWORD) } - LEFT_PAREN -> { + TokenType.LEFT_PAREN -> { val group = tail.parseArgList( - aliasSupportType = NONE, - mode = NORMAL_ARG_LIST - ).deriveExpected(RIGHT_PAREN) + aliasSupportType = AliasSupportType.NONE, + mode = ArgListMode.NORMAL_ARG_LIST + ).deriveExpected(TokenType.RIGHT_PAREN) when (group.children.size) { - 0 -> tail.err("Expression group cannot be empty", PARSE_EXPECTED_EXPRESSION) + 0 -> tail.err("Expression group cannot be empty", ErrorCode.PARSE_EXPECTED_EXPRESSION) // expression grouping 1 -> group.children[0].copy(remaining = group.remaining) // row value constructor--which aliases to list constructor in PartiQL - else -> group.copy(type = LIST) + else -> group.copy(type = ParseType.LIST) } } - LEFT_BRACKET -> when (tail.head?.type) { - RIGHT_BRACKET -> ParseNode(LIST, null, emptyList(), tail.tail) + TokenType.LEFT_BRACKET -> when (tail.head?.type) { + TokenType.RIGHT_BRACKET -> ParseNode(ParseType.LIST, null, emptyList(), tail.tail) else -> tail.parseListLiteral() } - LEFT_DOUBLE_ANGLE_BRACKET -> when (tail.head?.type) { - RIGHT_DOUBLE_ANGLE_BRACKET -> ParseNode(BAG, null, emptyList(), tail.tail) + TokenType.LEFT_DOUBLE_ANGLE_BRACKET -> when (tail.head?.type) { + TokenType.RIGHT_DOUBLE_ANGLE_BRACKET -> ParseNode(ParseType.BAG, null, emptyList(), tail.tail) else -> tail.parseBagLiteral() } - LEFT_CURLY -> when (tail.head?.type) { - RIGHT_CURLY -> ParseNode(STRUCT, null, emptyList(), tail.tail) + TokenType.LEFT_CURLY -> when (tail.head?.type) { + TokenType.RIGHT_CURLY -> ParseNode(ParseType.STRUCT, null, emptyList(), tail.tail) else -> tail.parseStructLiteral() } - IDENTIFIER, QUOTED_IDENTIFIER -> when (tail.head?.type) { - LEFT_PAREN -> tail.tail.parseFunctionCall(head!!) + TokenType.IDENTIFIER, TokenType.QUOTED_IDENTIFIER -> when (tail.head?.type) { + TokenType.LEFT_PAREN -> tail.tail.parseFunctionCall(head!!) else -> atomFromHead() } - QUESTION_MARK -> ParseNode(PARAMETER, head!!, listOf(), tail) - ION_LITERAL, LITERAL, NULL, MISSING, TRIM_SPECIFICATION -> atomFromHead() - else -> err("Unexpected term", PARSE_UNEXPECTED_TERM) + TokenType.QUESTION_MARK -> ParseNode(ParseType.PARAMETER, head!!, listOf(), tail) + TokenType.ION_LITERAL, TokenType.LITERAL, TokenType.NULL, TokenType.MISSING, + TokenType.TRIM_SPECIFICATION -> atomFromHead() + else -> err("Unexpected term", ErrorCode.PARSE_UNEXPECTED_TERM) }.let { parseNode -> // for many of the terms here we parse the tail, assuming the head as // context, but that loses the metas and other info from that token. @@ -1372,7 +1416,7 @@ class SqlParser(private val ion: IonSystem) : Parser { children.add(caseBody) rem = caseBody.remaining - return ParseNode(CASE, null, children, rem) + return ParseNode(ParseType.CASE, null, children, rem) } private fun List.parseCaseBody(): ParseNode { @@ -1385,49 +1429,49 @@ class SqlParser(private val ion: IonSystem) : Parser { val result = rem.parseExpression() rem = result.remaining - children.add(ParseNode(WHEN, null, listOf(conditionExpr, result), rem)) + children.add(ParseNode(ParseType.WHEN, null, listOf(conditionExpr, result), rem)) } if (children.isEmpty()) { - err("Expected a WHEN clause in CASE", PARSE_EXPECTED_WHEN_CLAUSE) + err("Expected a WHEN clause in CASE", ErrorCode.PARSE_EXPECTED_WHEN_CLAUSE) } if (rem.head?.keywordText == "else") { val elseExpr = rem.tail.parseExpression() rem = elseExpr.remaining - children.add(ParseNode(ELSE, null, listOf(elseExpr), rem)) + children.add(ParseNode(ParseType.ELSE, null, listOf(elseExpr), rem)) } - return ParseNode(ARG_LIST, null, children, rem) + return ParseNode(ParseType.ARG_LIST, null, children, rem) .deriveExpectedKeyword("end") } private fun List.parseCast(): ParseNode { - if (head?.type != LEFT_PAREN) { - err("Missing left parenthesis after CAST", PARSE_EXPECTED_LEFT_PAREN_AFTER_CAST) + if (head?.type != TokenType.LEFT_PAREN) { + err("Missing left parenthesis after CAST", ErrorCode.PARSE_EXPECTED_LEFT_PAREN_AFTER_CAST) } - val valueExpr = tail.parseExpression().deriveExpected(AS) + val valueExpr = tail.parseExpression().deriveExpected(TokenType.AS) var rem = valueExpr.remaining - val typeNode = rem.parseType().deriveExpected(RIGHT_PAREN) + val typeNode = rem.parseType().deriveExpected(TokenType.RIGHT_PAREN) rem = typeNode.remaining - return ParseNode(CAST, head, listOf(valueExpr, typeNode), rem) + return ParseNode(ParseType.CAST, head, listOf(valueExpr, typeNode), rem) } private fun List.parseType(): ParseNode { val typeName = head?.keywordText - val typeArity = TYPE_NAME_ARITY_MAP[typeName] ?: err("Expected type name", PARSE_EXPECTED_TYPE_NAME) + val typeArity = TYPE_NAME_ARITY_MAP[typeName] ?: err("Expected type name", ErrorCode.PARSE_EXPECTED_TYPE_NAME) val typeNode = when (tail.head?.type) { - LEFT_PAREN -> tail.tail.parseArgList( - aliasSupportType = NONE, - mode = NORMAL_ARG_LIST + TokenType.LEFT_PAREN -> tail.tail.parseArgList( + aliasSupportType = AliasSupportType.NONE, + mode = ArgListMode.NORMAL_ARG_LIST ).copy( - type = TYPE, + type = ParseType.TYPE, token = head - ).deriveExpected(RIGHT_PAREN) + ).deriveExpected(TokenType.RIGHT_PAREN) - else -> ParseNode(TYPE, head, emptyList(), tail) + else -> ParseNode(ParseType.TYPE, head, emptyList(), tail) } // Check for the optional "WITH TIME ZONE" specifier for TIME and validate the value of precision. // Note that this needs to be checked explicitly as the keywordtext for "TIME WITH TIME ZONE" consists of multiple words. @@ -1440,7 +1484,7 @@ class SqlParser(private val ion: IonSystem) : Parser { ) { precision.token.err( "Expected integer value between 0 and 9 for precision", - PARSE_INVALID_PRECISION_FOR_TIME + ErrorCode.PARSE_INVALID_PRECISION_FOR_TIME ) } } @@ -1459,16 +1503,16 @@ class SqlParser(private val ion: IonSystem) : Parser { if (typeNode.children.size !in typeArity) { val pvmap = PropertyValueMap() - pvmap[CAST_TO] = typeName?: "" - pvmap[EXPECTED_ARITY_MIN] = typeArity.first - pvmap[EXPECTED_ARITY_MAX] = typeArity.last - tail.err("CAST for $typeName must have arity of $typeArity", PARSE_CAST_ARITY, pvmap) + pvmap[Property.CAST_TO] = typeName?: "" + pvmap[Property.EXPECTED_ARITY_MIN] = typeArity.first + pvmap[Property.EXPECTED_ARITY_MAX] = typeArity.last + tail.err("CAST for $typeName must have arity of $typeArity", ErrorCode.PARSE_CAST_ARITY, pvmap) } for (child in typeNode.children) { - if (child.type != ATOM - || child.token?.type != LITERAL + if (child.type != ParseType.ATOM + || child.token?.type != TokenType.LITERAL || child.token.value?.isUnsignedInteger != true) { - err("Type parameter must be an unsigned integer literal", PARSE_INVALID_TYPE_PARAM) + err("Type parameter must be an unsigned integer literal", ErrorCode.PARSE_INVALID_TYPE_PARAM) } } @@ -1499,7 +1543,7 @@ class SqlParser(private val ion: IonSystem) : Parser { rem = it.remaining } - return ParseNode(FROM, null, listOf(operation, fromList) + children, rem) + return ParseNode(ParseType.FROM, null, listOf(operation, fromList) + children, rem) } private fun List.parseBaseDmls() : ParseNode { @@ -1512,14 +1556,14 @@ class SqlParser(private val ion: IonSystem) : Parser { } if (nodes.size == 0) { - err("Expected data manipulation", PARSE_MISSING_OPERATION) + err("Expected data manipulation", ErrorCode.PARSE_MISSING_OPERATION) } if (nodes.size == 1) { return nodes[0] } - return ParseNode(DML_LIST, null, nodes, rem) + return ParseNode(ParseType.DML_LIST, null, nodes, rem) } private fun List.parseBaseDml(): ParseNode { @@ -1541,25 +1585,25 @@ class SqlParser(private val ion: IonSystem) : Parser { val returning = rem.parseOptionalReturning()?.also { rem = it.remaining } - ParseNode(INSERT_VALUE, null, listOfNotNull(lvalue, value, position, onConflict, returning), rem) + ParseNode(ParseType.INSERT_VALUE, null, listOfNotNull(lvalue, value, position, onConflict, returning), rem) } else { val values = rem.parseExpression() - ParseNode(INSERT, null, listOf(lvalue, values), values.remaining) + ParseNode(ParseType.INSERT, null, listOf(lvalue, values), values.remaining) } } - "set" -> rem.tail.parseSetAssignments(UPDATE) + "set" -> rem.tail.parseSetAssignments(ParseType.UPDATE) "remove" -> { val lvalue = rem.tail.parsePathTerm(PathMode.SIMPLE_PATH) rem = lvalue.remaining - ParseNode(REMOVE, null, listOf(lvalue), rem) + ParseNode(ParseType.REMOVE, null, listOf(lvalue), rem) } - else -> err("Expected data manipulation", PARSE_MISSING_OPERATION) + else -> err("Expected data manipulation", ErrorCode.PARSE_MISSING_OPERATION) } } private fun List.parseConflictAction(token: Token): ParseNode { val rem = this - return ParseNode(CONFLICT_ACTION, token, emptyList(), rem.tail) + return ParseNode(ParseType.CONFLICT_ACTION, token, emptyList(), rem.tail) } // Parse the optional ON CONFLICT clause in 'INSERT VALUE AT ON CONFLICT WHERE ' @@ -1576,32 +1620,32 @@ class SqlParser(private val ion: IonSystem) : Parser { "do_nothing" -> { val conflictAction = onConflictRem.parseConflictAction(onConflictRem.head!!) var nodes = listOfNotNull(onConflictExpression, conflictAction) - ParseNode(ON_CONFLICT, null, nodes, conflictAction.remaining) + ParseNode(ParseType.ON_CONFLICT, null, nodes, conflictAction.remaining) } - else -> rem.head.err("invalid ON CONFLICT syntax", PARSE_EXPECTED_CONFLICT_ACTION) + else -> rem.head.err("invalid ON CONFLICT syntax", ErrorCode.PARSE_EXPECTED_CONFLICT_ACTION) } } - else -> rem.head.err("invalid ON CONFLICT syntax", PARSE_EXPECTED_WHERE_CLAUSE) + else -> rem.head.err("invalid ON CONFLICT syntax", ErrorCode.PARSE_EXPECTED_WHERE_CLAUSE) } } else null } private fun List.parseSetAssignments(type: ParseType): ParseNode = parseArgList( - aliasSupportType = NONE, - mode = SET_CLAUSE_ARG_LIST + aliasSupportType = AliasSupportType.NONE, + mode = ArgListMode.SET_CLAUSE_ARG_LIST ).run { if (children.isEmpty()) { - remaining.err("Expected assignment for SET", PARSE_MISSING_SET_ASSIGNMENT) + remaining.err("Expected assignment for SET", ErrorCode.PARSE_MISSING_SET_ASSIGNMENT) } copy(type = type) } private fun List.parseDelete(name: Token): ParseNode { if (head?.keywordText != "from") { - err("Expected FROM after DELETE", PARSE_UNEXPECTED_TOKEN) + err("Expected FROM after DELETE", ErrorCode.PARSE_UNEXPECTED_TOKEN) } - return tail.parseLegacyDml { ParseNode(DELETE, name, emptyList(), this) } + return tail.parseLegacyDml { ParseNode(ParseType.DELETE, name, emptyList(), this) } } private fun List.parseUpdate(): ParseNode = parseLegacyDml { @@ -1612,7 +1656,7 @@ class SqlParser(private val ion: IonSystem) : Parser { var rem = this val returningElems = listOf(rem.parseReturningElems()) rem = returningElems.first().remaining - return ParseNode(type = RETURNING, token = null, children = returningElems, remaining = rem) + return ParseNode(type = ParseType.RETURNING, token = null, children = returningElems, remaining = rem) } private inline fun List.parseLegacyDml(parseDmlOp: List.() -> ParseNode): ParseNode { @@ -1633,7 +1677,7 @@ class SqlParser(private val ion: IonSystem) : Parser { } } - children.add(ParseNode(FROM_CLAUSE, null, listOf(source), rem)) + children.add(ParseNode(ParseType.FROM_CLAUSE, null, listOf(source), rem)) val operation = rem.parseDmlOp().also { rem = it.remaining @@ -1650,7 +1694,7 @@ class SqlParser(private val ion: IonSystem) : Parser { } // generate a FROM-node to normalize the parse tree - return ParseNode(FROM, null, listOf(operation) + children, rem) + return ParseNode(ParseType.FROM, null, listOf(operation) + children, rem) } private fun List.parseOptionalWhere(): ParseNode? { @@ -1660,7 +1704,7 @@ class SqlParser(private val ion: IonSystem) : Parser { if (rem.head?.keywordText == "where") { val expr = rem.tail.parseExpression() rem = expr.remaining - return ParseNode(WHERE, null, listOf(expr), rem) + return ParseNode(ParseType.WHERE, null, listOf(expr), rem) } return null @@ -1681,7 +1725,7 @@ class SqlParser(private val ion: IonSystem) : Parser { var rem = this var returningMapping = rem.parseReturningMapping().also { rem = it.remaining } var column = rem.parseColumn().also { rem = it.remaining } - ParseNode(type = RETURNING_ELEM, token = null, children = listOf(returningMapping, column), remaining = rem) + ParseNode(type = ParseType.RETURNING_ELEM, token = null, children = listOf(returningMapping, column), remaining = rem) } } @@ -1689,21 +1733,24 @@ class SqlParser(private val ion: IonSystem) : Parser { var rem = this when (rem.head?.keywordText) { "modified_old", "modified_new", "all_old", "all_new" -> { - return ParseNode(type = RETURNING_MAPPING, token = rem.head, children = listOf(), remaining = rem.tail) + return ParseNode(type = ParseType.RETURNING_MAPPING, token = rem.head, children = listOf(), + remaining = rem.tail) } - else -> rem.err("Expected ( MODIFIED | ALL ) ( NEW | OLD ) in each returning element.", PARSE_EXPECTED_RETURNING_CLAUSE) + else -> rem.err("Expected ( MODIFIED | ALL ) ( NEW | OLD ) in each returning element.", + ErrorCode.PARSE_EXPECTED_RETURNING_CLAUSE) } } private fun List.parseColumn(): ParseNode { return when (this.head?.type) { - STAR -> ParseNode(RETURNING_WILDCARD, this.head, listOf(), this.tail) + TokenType.STAR -> ParseNode(ParseType.RETURNING_WILDCARD, this.head, listOf(), this.tail) else -> { var expr = parseExpression().let { when (it.type) { - PATH -> inspectColumnPathExpression(it) - ATOM -> it - else -> this.err("Unsupported syntax in RETURNING columns.", PARSE_UNSUPPORTED_RETURNING_CLAUSE_SYNTAX) + ParseType.PATH -> inspectColumnPathExpression(it) + ParseType.ATOM -> it + else -> this.err("Unsupported syntax in RETURNING columns.", + ErrorCode.PARSE_UNSUPPORTED_RETURNING_CLAUSE_SYNTAX) } } expr @@ -1713,7 +1760,8 @@ class SqlParser(private val ion: IonSystem) : Parser { private fun inspectColumnPathExpression(pathNode: ParseNode): ParseNode { if (pathNode.children.size > 2) { - pathNode.children[2].token?.err("More than two paths in RETURNING columns.", PARSE_UNSUPPORTED_RETURNING_CLAUSE_SYNTAX) + pathNode.children[2].token?.err("More than two paths in RETURNING columns.", + ErrorCode.PARSE_UNSUPPORTED_RETURNING_CLAUSE_SYNTAX) } return pathNode } @@ -1724,7 +1772,8 @@ class SqlParser(private val ion: IonSystem) : Parser { rem = value.remaining val name = rem.parseExpression() rem = name.remaining - val selectAfterProjection = parseSelectAfterProjection(PIVOT,ParseNode(MEMBER, null, listOf(name, value), rem)) + val selectAfterProjection = parseSelectAfterProjection(ParseType.PIVOT, + ParseNode(ParseType.MEMBER, null, listOf(name, value), rem)) return selectAfterProjection } @@ -1743,16 +1792,16 @@ class SqlParser(private val ion: IonSystem) : Parser { else -> false } - var type = SELECT_LIST + var type = ParseType.SELECT_LIST var projection = when { rem.head?.keywordText == "value" -> { - type = SELECT_VALUE + type = ParseType.SELECT_VALUE rem.tail.parseExpression() } else -> { val list = rem.parseSelectList() if (list.children.isEmpty()) { - rem.err("Cannot have empty SELECT list", PARSE_EMPTY_SELECT) + rem.err("Cannot have empty SELECT list", ErrorCode.PARSE_EMPTY_SELECT) } val asterisk = list.children.firstOrNull { it.type == ParseType.PROJECT_ALL && it.children.isEmpty() } @@ -1767,7 +1816,7 @@ class SqlParser(private val ion: IonSystem) : Parser { } } if (distinct) { - projection = ParseNode(DISTINCT, null, listOf(projection), projection.remaining) + projection = ParseNode(ParseType.DISTINCT, null, listOf(projection), projection.remaining) } val parseSelectAfterProjection = parseSelectAfterProjection(type, projection) @@ -1803,45 +1852,45 @@ class SqlParser(private val ion: IonSystem) : Parser { */ private fun List.parseCreateTable(): ParseNode { val identifier = when (head?.type) { - QUOTED_IDENTIFIER, IDENTIFIER -> { + TokenType.QUOTED_IDENTIFIER, TokenType.IDENTIFIER -> { atomFromHead() } else -> { err("Expected identifier!", ErrorCode.PARSE_UNEXPECTED_TOKEN) } } - return ParseNode(CREATE_TABLE, null, listOf(identifier), identifier.remaining) + return ParseNode(ParseType.CREATE_TABLE, null, listOf(identifier), identifier.remaining) } private fun List.parseDropIndex(): ParseNode { var rem = this val identifier = when (rem.head?.type) { - IDENTIFIER, QUOTED_IDENTIFIER -> { + TokenType.IDENTIFIER, TokenType.QUOTED_IDENTIFIER -> { atomFromHead() } else -> { - rem.err("Expected identifier!", PARSE_UNEXPECTED_TOKEN) + rem.err("Expected identifier!", ErrorCode.PARSE_UNEXPECTED_TOKEN) } } rem = rem.tail if (rem.head?.keywordText != "on") { - rem.err("Expected ON", PARSE_UNEXPECTED_TOKEN) + rem.err("Expected ON", ErrorCode.PARSE_UNEXPECTED_TOKEN) } rem = rem.tail val target = when (rem.head?.type) { - QUOTED_IDENTIFIER, IDENTIFIER -> { + TokenType.QUOTED_IDENTIFIER, TokenType.IDENTIFIER -> { rem.atomFromHead() } else -> { - rem.err("Table target must be an identifier", PARSE_UNEXPECTED_TOKEN) + rem.err("Table target must be an identifier", ErrorCode.PARSE_UNEXPECTED_TOKEN) } } rem = rem.tail - return ParseNode(DROP_INDEX, null, listOf(identifier, target), rem) + return ParseNode(ParseType.DROP_INDEX, null, listOf(identifier, target), rem) } /** @@ -1849,7 +1898,7 @@ class SqlParser(private val ion: IonSystem) : Parser { */ private fun List.parseDropTable(): ParseNode { val identifier = when (head?.type) { - QUOTED_IDENTIFIER, IDENTIFIER -> { + TokenType.QUOTED_IDENTIFIER, TokenType.IDENTIFIER -> { atomFromHead() } else -> { @@ -1857,7 +1906,7 @@ class SqlParser(private val ion: IonSystem) : Parser { } } - return ParseNode(DROP_TABLE, null, listOf(identifier), identifier.remaining) + return ParseNode(ParseType.DROP_TABLE, null, listOf(identifier), identifier.remaining) } /** @@ -1875,7 +1924,7 @@ class SqlParser(private val ion: IonSystem) : Parser { rem = rem.tail val target = when (rem.head?.type) { - QUOTED_IDENTIFIER, IDENTIFIER -> { + TokenType.QUOTED_IDENTIFIER, TokenType.IDENTIFIER -> { rem.atomFromHead() } else -> { @@ -1884,15 +1933,16 @@ class SqlParser(private val ion: IonSystem) : Parser { } rem = target.remaining - if (rem.head?.type != LEFT_PAREN) { + if (rem.head?.type != TokenType.LEFT_PAREN) { rem.err("Expected parenthesis for keys", ErrorCode.PARSE_UNEXPECTED_TOKEN) } // TODO support full expressions here... only simple paths for now - val keys = rem.tail.parseArgList(NONE, SIMPLE_PATH_ARG_LIST).deriveExpected(RIGHT_PAREN) + val keys = rem.tail.parseArgList(AliasSupportType.NONE, ArgListMode.SIMPLE_PATH_ARG_LIST) + .deriveExpected(TokenType.RIGHT_PAREN) rem = keys.remaining // TODO support other syntax options - return ParseNode(CREATE_INDEX, null, listOf(target, keys), rem) + return ParseNode(ParseType.CREATE_INDEX, null, listOf(target, keys), rem) } /** @@ -1936,13 +1986,13 @@ class SqlParser(private val ion: IonSystem) : Parser { val flattened = flattenParseNode(pathNode).drop(2) //Is invalid if contains PATH_WILDCARD (i.e. to `[*]`} - flattened.firstOrNull { it.type == PATH_WILDCARD } + flattened.firstOrNull { it.type == ParseType.PATH_WILDCARD } ?.token ?.err("Invalid use of * in select list", ErrorCode.PARSE_INVALID_CONTEXT_FOR_WILDCARD_IN_SELECT_LIST) //Is invalid if contains PATH_WILDCARD_UNPIVOT (i.e. * as part of a dotted expression) anywhere except at the end. //i.e. f.*.b is invalid but f.b.* is not. - flattened.dropLast(1).firstOrNull { it.type == PATH_UNPIVOT } + flattened.dropLast(1).firstOrNull { it.type == ParseType.PATH_UNPIVOT } ?.token ?.err("Invalid use of * in select list", ErrorCode.PARSE_INVALID_CONTEXT_FOR_WILDCARD_IN_SELECT_LIST) @@ -1952,14 +2002,14 @@ class SqlParser(private val ion: IonSystem) : Parser { //Is invalid if contains a square bracket anywhere and a wildcard at the end. //i.e f[1].* is invalid - flattened.firstOrNull { it.type == PATH_SQB } + flattened.firstOrNull { it.type == ParseType.PATH_SQB } ?.token ?.err("Cannot use [] and * together in SELECT list expression", ErrorCode.PARSE_CANNOT_MIX_SQB_AND_WILDCARD_IN_SELECT_LIST) val pathPart = pathNode.copy(children = pathNode.children.dropLast(1)) return ParseNode( - type = PROJECT_ALL, + type = ParseType.PROJECT_ALL, token = null, children = listOf(if (pathPart.children.size == 1) pathPart.children[0] else pathPart), remaining = pathNode.remaining) @@ -1969,13 +2019,13 @@ class SqlParser(private val ion: IonSystem) : Parser { private fun List.parseSelectList(): ParseNode { return parseCommaList { - if (this.head?.type == STAR) { - ParseNode(PROJECT_ALL, this.head, listOf(), this.tail) + if (this.head?.type == TokenType.STAR) { + ParseNode(ParseType.PROJECT_ALL, this.head, listOf(), this.tail) } else { val expr = parseExpression().let { when (it.type) { - PATH -> inspectPathExpression(it) + ParseType.PATH -> inspectPathExpression(it) else -> it } } @@ -1992,7 +2042,7 @@ class SqlParser(private val ion: IonSystem) : Parser { // TODO support SELECT with no FROM if (rem.head?.keywordText != "from") { - rem.err("Expected FROM after SELECT list", PARSE_SELECT_MISSING_FROM) + rem.err("Expected FROM after SELECT list", ErrorCode.PARSE_SELECT_MISSING_FROM) } val fromList = rem.tail.parseFromSourceList(OperatorPrecedenceGroups.SELECT.precedence) @@ -2014,16 +2064,16 @@ class SqlParser(private val ion: IonSystem) : Parser { children.add(letParseNode) } - parseOptionalSingleExpressionClause(WHERE) + parseOptionalSingleExpressionClause(ParseType.WHERE) if (rem.head?.keywordText == "order") { - rem = rem.tail.tailExpectedToken(BY) + rem = rem.tail.tailExpectedToken(TokenType.BY) val orderByChildren = listOf(rem.parseOrderByArgList()) rem = orderByChildren.first().remaining children.add( - ParseNode(type = ORDER_BY, token = null, children = orderByChildren, remaining = rem) + ParseNode(type = ParseType.ORDER_BY, token = null, children = orderByChildren, remaining = rem) ) } @@ -2032,24 +2082,25 @@ class SqlParser(private val ion: IonSystem) : Parser { val type = when (rem.head?.keywordText) { "partial" -> { rem = rem.tail - GROUP_PARTIAL + ParseType.GROUP_PARTIAL } - else -> GROUP + else -> ParseType.GROUP } val groupChildren = ArrayList() - rem = rem.tailExpectedToken(BY) + rem = rem.tailExpectedToken(TokenType.BY) val groupKey = rem.parseArgList( - aliasSupportType = AS_ONLY, - mode = NORMAL_ARG_LIST, + aliasSupportType = AliasSupportType.AS_ONLY, + mode = ArgListMode.NORMAL_ARG_LIST, precedence = OperatorPrecedenceGroups.SELECT.precedence ) groupKey.children.forEach { // TODO support ordinal case - if (it.token?.type == LITERAL) { - it.token.err("Literals (including ordinals) not supported in GROUP BY", PARSE_UNSUPPORTED_LITERALS_GROUPBY) + if (it.token?.type == TokenType.LITERAL) { + it.token.err("Literals (including ordinals) not supported in GROUP BY", + ErrorCode.PARSE_UNSUPPORTED_LITERALS_GROUPBY) } } groupChildren.add(groupKey) @@ -2059,7 +2110,8 @@ class SqlParser(private val ion: IonSystem) : Parser { rem = rem.tail.tailExpectedKeyword("as") if (rem.head?.type?.isIdentifier() != true) { - rem.err("Expected identifier for GROUP name", PARSE_EXPECTED_IDENT_FOR_GROUP_NAME) + rem.err("Expected identifier for GROUP name", + ErrorCode.PARSE_EXPECTED_IDENT_FOR_GROUP_NAME) } groupChildren.add(rem.atomFromHead()) rem = rem.tail @@ -2074,23 +2126,23 @@ class SqlParser(private val ion: IonSystem) : Parser { ) } - parseOptionalSingleExpressionClause(HAVING) + parseOptionalSingleExpressionClause(ParseType.HAVING) - parseOptionalSingleExpressionClause(LIMIT) + parseOptionalSingleExpressionClause(ParseType.LIMIT) - parseOptionalSingleExpressionClause(OFFSET) + parseOptionalSingleExpressionClause(ParseType.OFFSET) return ParseNode(selectType, null, children, rem) } private fun List.parseFunctionCall(name: Token): ParseNode { fun parseCallArguments(callName: String, args: List, callType: ParseType): ParseNode = when(args.head?.type) { - STAR -> err("$callName(*) is not allowed", PARSE_UNSUPPORTED_CALL_WITH_STAR) - RIGHT_PAREN -> ParseNode(callType, name, emptyList(), tail) + TokenType.STAR -> err("$callName(*) is not allowed", ErrorCode.PARSE_UNSUPPORTED_CALL_WITH_STAR) + TokenType.RIGHT_PAREN -> ParseNode(callType, name, emptyList(), tail) else -> { - args.parseArgList(aliasSupportType = NONE, mode = NORMAL_ARG_LIST) + args.parseArgList(aliasSupportType = AliasSupportType.NONE, mode = ArgListMode.NORMAL_ARG_LIST) .copy(type = callType, token = name) - .deriveExpected(RIGHT_PAREN) + .deriveExpected(TokenType.RIGHT_PAREN) } } @@ -2101,84 +2153,85 @@ class SqlParser(private val ion: IonSystem) : Parser { return when (callName) { "count" -> { when { - head?.type == RIGHT_PAREN -> { - err("Aggregate functions are always unary", PARSE_NON_UNARY_AGREGATE_FUNCTION_CALL) + head?.type == TokenType.RIGHT_PAREN -> { + err("Aggregate functions are always unary", ErrorCode.PARSE_NON_UNARY_AGREGATE_FUNCTION_CALL) } // COUNT(*) - head?.type == STAR -> { - ParseNode(CALL_AGG_WILDCARD, name, emptyList(), tail).deriveExpected(RIGHT_PAREN) + head?.type == TokenType.STAR -> { + ParseNode(ParseType.CALL_AGG_WILDCARD, name, emptyList(), tail).deriveExpected(TokenType.RIGHT_PAREN) } - head?.type == KEYWORD && keywordText == "distinct" -> { + head?.type == TokenType.KEYWORD && keywordText == "distinct" -> { when(memoizedTail.head?.type) { // COUNT(DISTINCT *) - STAR -> { - err("COUNT(DISTINCT *) is not supported", PARSE_UNSUPPORTED_CALL_WITH_STAR) + TokenType.STAR -> { + err("COUNT(DISTINCT *) is not supported", ErrorCode.PARSE_UNSUPPORTED_CALL_WITH_STAR) } // COUNT(DISTINCT expression) else -> { - memoizedTail.parseArgList(aliasSupportType = NONE, mode = NORMAL_ARG_LIST) - .copy(type = CALL_DISTINCT_AGG, token = name) - .deriveExpected(RIGHT_PAREN) + memoizedTail.parseArgList(aliasSupportType = AliasSupportType.NONE, mode = ArgListMode.NORMAL_ARG_LIST) + .copy(type = ParseType.CALL_DISTINCT_AGG, token = name) + .deriveExpected(TokenType.RIGHT_PAREN) } } } - head?.type == KEYWORD && keywordText == "all" -> { + head?.type == TokenType.KEYWORD && keywordText == "all" -> { when(memoizedTail.head?.type) { - STAR -> err("COUNT(ALL *) is not supported", PARSE_UNSUPPORTED_CALL_WITH_STAR) + TokenType.STAR -> err("COUNT(ALL *) is not supported", ErrorCode.PARSE_UNSUPPORTED_CALL_WITH_STAR) // COUNT(ALL expression) else -> { - memoizedTail.parseArgList(aliasSupportType = NONE, mode = NORMAL_ARG_LIST) - .copy(type = CALL_AGG, token = name) - .deriveExpected(RIGHT_PAREN) + memoizedTail.parseArgList(aliasSupportType = AliasSupportType.NONE, mode = ArgListMode.NORMAL_ARG_LIST) + .copy(type = ParseType.CALL_AGG, token = name) + .deriveExpected(TokenType.RIGHT_PAREN) } } } - else -> parseArgList(aliasSupportType = NONE, mode = NORMAL_ARG_LIST) - .copy(type = CALL_AGG, token = name) - .deriveExpected(RIGHT_PAREN) + else -> parseArgList(aliasSupportType = AliasSupportType.NONE, mode = ArgListMode.NORMAL_ARG_LIST) + .copy(type = ParseType.CALL_AGG, token = name) + .deriveExpected(TokenType.RIGHT_PAREN) } } in STANDARD_AGGREGATE_FUNCTIONS -> { val call = when { - head?.type == KEYWORD && head?.keywordText == "distinct" -> { - parseCallArguments(callName, tail, CALL_DISTINCT_AGG) + head?.type == TokenType.KEYWORD && head?.keywordText == "distinct" -> { + parseCallArguments(callName, tail, ParseType.CALL_DISTINCT_AGG) } - head?.type == KEYWORD && head?.keywordText == "all" -> { - parseCallArguments(callName, tail, CALL_AGG) + head?.type == TokenType.KEYWORD && head?.keywordText == "all" -> { + parseCallArguments(callName, tail, ParseType.CALL_AGG) } else -> { - parseCallArguments(callName, this, CALL_AGG) + parseCallArguments(callName, this, ParseType.CALL_AGG) } } if (call.children.size != 1) { - err("Aggregate functions are always unary", PARSE_NON_UNARY_AGREGATE_FUNCTION_CALL) + err("Aggregate functions are always unary", + ErrorCode.PARSE_NON_UNARY_AGREGATE_FUNCTION_CALL) } call } // normal function - else -> parseCallArguments(callName, this, CALL) + else -> parseCallArguments(callName, this, ParseType.CALL) } } private fun List.parseExec(): ParseNode { var rem = this - if (rem.head?.type == EOF) { - rem.err("No stored procedure provided", PARSE_NO_STORED_PROCEDURE_PROVIDED) + if (rem.head?.type == TokenType.EOF) { + rem.err("No stored procedure provided", ErrorCode.PARSE_NO_STORED_PROCEDURE_PROVIDED) } rem.forEach { if (it.keywordText?.toLowerCase() == "exec") { - it.err("EXEC call found at unexpected location", PARSE_UNEXPECTED_TERM) + it.err("EXEC call found at unexpected location", ErrorCode.PARSE_UNEXPECTED_TERM) } } @@ -2186,16 +2239,16 @@ class SqlParser(private val ion: IonSystem) : Parser { rem = rem.tail // Stored procedure call has no args - if (rem.head?.type == EOF) { - return ParseNode(EXEC, procedureName, emptyList(), rem) + if (rem.head?.type == TokenType.EOF) { + return ParseNode(ParseType.EXEC, procedureName, emptyList(), rem) } - else if (rem.head?.type == LEFT_PAREN) { - rem.err("Unexpected $LEFT_PAREN found following stored procedure call", PARSE_UNEXPECTED_TOKEN) + else if (rem.head?.type == TokenType.LEFT_PAREN) { + rem.err("Unexpected ${TokenType.LEFT_PAREN} found following stored procedure call", ErrorCode.PARSE_UNEXPECTED_TOKEN) } - return rem.parseArgList(aliasSupportType = NONE, mode = NORMAL_ARG_LIST) - .copy(type = EXEC, token = procedureName) + return rem.parseArgList(aliasSupportType = AliasSupportType.NONE, mode = ArgListMode.NORMAL_ARG_LIST) + .copy(type = ParseType.EXEC, token = procedureName) } /** @@ -2207,10 +2260,11 @@ class SqlParser(private val ion: IonSystem) : Parser { private fun List.parseSubstring(name: Token): ParseNode { var rem = this - if (rem.head?.type != LEFT_PAREN) { + if (rem.head?.type != TokenType.LEFT_PAREN) { val pvmap = PropertyValueMap() - pvmap[EXPECTED_TOKEN_TYPE] = LEFT_PAREN - rem.err("Expected $LEFT_PAREN", PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL, pvmap) + pvmap[Property.EXPECTED_TOKEN_TYPE] = TokenType.LEFT_PAREN + rem.err("Expected ${TokenType.LEFT_PAREN}", + ErrorCode.PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL, pvmap) } var stringExpr = tail.parseExpression() @@ -2222,14 +2276,15 @@ class SqlParser(private val ion: IonSystem) : Parser { parseSql92Syntax = true stringExpr.deriveExpectedKeyword("from") } - rem.head!!.type == COMMA -> stringExpr.deriveExpected(COMMA) - else -> rem.err("Expected $KEYWORD 'from' OR $COMMA", PARSE_EXPECTED_ARGUMENT_DELIMITER) + rem.head!!.type == TokenType.COMMA -> stringExpr.deriveExpected(TokenType.COMMA) + else -> rem.err("Expected ${TokenType.KEYWORD} 'from' OR ${TokenType.COMMA}", + ErrorCode.PARSE_EXPECTED_ARGUMENT_DELIMITER) } val (positionExpr: ParseNode, expectedToken: Token) = stringExpr.remaining.parseExpression() - .deriveExpected(if(parseSql92Syntax) FOR else COMMA, RIGHT_PAREN) + .deriveExpected(if(parseSql92Syntax) TokenType.FOR else TokenType.COMMA, TokenType.RIGHT_PAREN) - if (expectedToken.type == RIGHT_PAREN) { + if (expectedToken.type == TokenType.RIGHT_PAREN) { return ParseNode( ParseType.CALL, name, @@ -2239,7 +2294,7 @@ class SqlParser(private val ion: IonSystem) : Parser { } rem = positionExpr.remaining - val lengthExpr = rem.parseExpression().deriveExpected(RIGHT_PAREN) + val lengthExpr = rem.parseExpression().deriveExpected(TokenType.RIGHT_PAREN) return ParseNode(ParseType.CALL, name, listOf(stringExpr, positionExpr, lengthExpr), @@ -2253,7 +2308,8 @@ class SqlParser(private val ion: IonSystem) : Parser { * Syntax is TRIM([[ specification ] [to trim characters] FROM] ). */ private fun List.parseTrim(name: Token): ParseNode { - if (head?.type != LEFT_PAREN) err("Expected $LEFT_PAREN", PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL) + if (head?.type != TokenType.LEFT_PAREN) err("Expected ${TokenType.LEFT_PAREN}", + ErrorCode.PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL) var rem = tail val arguments = mutableListOf() @@ -2267,8 +2323,10 @@ class SqlParser(private val ion: IonSystem) : Parser { val maybeTrimSpec = rem.head val hasSpecification = when { - maybeTrimSpec?.type == IDENTIFIER && TRIM_SPECIFICATION_KEYWORDS.contains(maybeTrimSpec.text?.toLowerCase()) -> { - arguments.add(ParseNode(ATOM, maybeTrimSpec.copy(type = TRIM_SPECIFICATION), listOf(), rem.tail)) + maybeTrimSpec?.type == TokenType.IDENTIFIER && + TRIM_SPECIFICATION_KEYWORDS.contains(maybeTrimSpec.text?.toLowerCase()) -> { + arguments.add(ParseNode(ParseType.ATOM, maybeTrimSpec.copy(type = TokenType.TRIM_SPECIFICATION), + listOf(), rem.tail)) rem = rem.tail true @@ -2301,8 +2359,8 @@ class SqlParser(private val ion: IonSystem) : Parser { } } - if(rem.head?.type != RIGHT_PAREN) { - rem.err("Expected $RIGHT_PAREN", PARSE_EXPECTED_RIGHT_PAREN_BUILTIN_FUNCTION_CALL) + if(rem.head?.type != TokenType.RIGHT_PAREN) { + rem.err("Expected ${TokenType.RIGHT_PAREN}", ErrorCode.PARSE_EXPECTED_RIGHT_PAREN_BUILTIN_FUNCTION_CALL) } return ParseNode(ParseType.CALL, name, arguments, rem.tail) @@ -2311,10 +2369,11 @@ class SqlParser(private val ion: IonSystem) : Parser { private fun List.parseDatePart(): ParseNode { val maybeDatePart = this.head return when { - maybeDatePart?.type == IDENTIFIER && DATE_PART_KEYWORDS.contains(maybeDatePart.text?.toLowerCase()) -> { - ParseNode(ATOM, maybeDatePart.copy(type = DATE_PART), listOf(), this.tail) + maybeDatePart?.type == TokenType.IDENTIFIER && DATE_PART_KEYWORDS.contains(maybeDatePart.text?.toLowerCase()) -> { + ParseNode(ParseType.ATOM, maybeDatePart.copy(type = TokenType.DATE_PART), listOf(), this.tail) } - else -> maybeDatePart.err("Expected one of: $DATE_PART_KEYWORDS", PARSE_EXPECTED_DATE_PART) + else -> maybeDatePart.err("Expected one of: $DATE_PART_KEYWORDS", + ErrorCode.PARSE_EXPECTED_DATE_PART) } } @@ -2325,14 +2384,14 @@ class SqlParser(private val ion: IonSystem) : Parser { * Syntax is EXTRACT( FROM ). */ private fun List.parseExtract(name: Token): ParseNode { - if (head?.type != LEFT_PAREN) err("Expected $LEFT_PAREN", - PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL) + if (head?.type != TokenType.LEFT_PAREN) err("Expected ${TokenType.LEFT_PAREN}", + ErrorCode.PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL) val datePart = this.tail.parseDatePart().deriveExpectedKeyword("from") val rem = datePart.remaining - val dateTimeType = rem.parseExpression().deriveExpected(RIGHT_PAREN) + val dateTimeType = rem.parseExpression().deriveExpected(TokenType.RIGHT_PAREN) - return ParseNode(CALL, name, listOf(datePart, dateTimeType), dateTimeType.remaining) + return ParseNode(ParseType.CALL, name, listOf(datePart, dateTimeType), dateTimeType.remaining) } /** @@ -2340,8 +2399,9 @@ class SqlParser(private val ion: IonSystem) : Parser { */ private fun List.parseDate(): ParseNode { val dateStringToken = head - if (dateStringToken?.value == null || dateStringToken.type != LITERAL || !dateStringToken.value.isText) { - err("Expected date string followed by the keyword DATE, found ${head?.value?.type}", PARSE_UNEXPECTED_TOKEN) + if (dateStringToken?.value == null || dateStringToken.type != TokenType.LITERAL || !dateStringToken.value.isText) { + err("Expected date string followed by the keyword DATE, found ${head?.value?.type}", + ErrorCode.PARSE_UNEXPECTED_TOKEN) } val dateString = dateStringToken.value.stringValue() @@ -2350,15 +2410,15 @@ class SqlParser(private val ion: IonSystem) : Parser { // Filter out the extended dates which can be specified with the '+' or '-' symbol. // '+99999-03-10' for example is allowed by LocalDate.parse and should be filtered out. if (!DATE_PATTERN_REGEX.matches(dateString!!)) { - err("Expected DATE string to be of the format yyyy-MM-dd", PARSE_INVALID_DATE_STRING) + err("Expected DATE string to be of the format yyyy-MM-dd", ErrorCode.PARSE_INVALID_DATE_STRING) } try { LocalDate.parse(dateString, ISO_LOCAL_DATE) } catch (e: DateTimeParseException) { - err(e.localizedMessage, PARSE_INVALID_DATE_STRING) + err(e.localizedMessage, ErrorCode.PARSE_INVALID_DATE_STRING) } - return ParseNode(DATE, head, listOf(), tail) + return ParseNode(ParseType.DATE, head, listOf(), tail) } /** @@ -2370,22 +2430,22 @@ class SqlParser(private val ion: IonSystem) : Parser { */ private fun List.parseOptionalPrecision(): ParseNode = // If the optional precision is present - if (head?.type == LEFT_PAREN) { + if (head?.type == TokenType.LEFT_PAREN) { var rem = tail // Expected precision token to be unsigned integer between 0 and 9 inclusive - if (rem.head == null || rem.head!!.type != LITERAL || !rem.head!!.value!!.isUnsignedInteger || + if (rem.head == null || rem.head!!.type != TokenType.LITERAL || !rem.head!!.value!!.isUnsignedInteger || rem.head!!.value!!.longValue() < 0 || rem.head!!.value!!.longValue() > MAX_PRECISION_FOR_TIME) { - rem.head.err("Expected integer value between 0 and 9 for precision", PARSE_INVALID_PRECISION_FOR_TIME) + rem.head.err("Expected integer value between 0 and 9 for precision", ErrorCode.PARSE_INVALID_PRECISION_FOR_TIME) } val precision = rem.head rem = rem.tail - if (rem.head?.type != RIGHT_PAREN) { - rem.head.errExpectedTokenType(RIGHT_PAREN) + if (rem.head?.type != TokenType.RIGHT_PAREN) { + rem.head.errExpectedTokenType(TokenType.RIGHT_PAREN) } - ParseNode(PRECISION, precision, listOf(), rem.tail) + ParseNode(ParseType.PRECISION, precision, listOf(), rem.tail) } else { - ParseNode(PRECISION, null, listOf(), this) + ParseNode(ParseType.PRECISION, null, listOf(), this) } /** @@ -2393,7 +2453,7 @@ class SqlParser(private val ion: IonSystem) : Parser { */ private fun List.checkForOptionalTimeZone(): Pair, Boolean> { // If the keyword is specified for time zone, it must be a series of keywords - "with time zone" - if (head?.type == KEYWORD) { + if (head?.type == TokenType.KEYWORD) { val rem = tailExpectedKeyword("with"). tailExpectedKeyword("time"). @@ -2434,7 +2494,7 @@ class SqlParser(private val ion: IonSystem) : Parser { parse(time, formatter) } catch (e: DateTimeParseException) { - rem.head.err(e.localizedMessage, PARSE_INVALID_TIME_STRING) + rem.head.err(e.localizedMessage, ErrorCode.PARSE_INVALID_TIME_STRING) } } @@ -2447,10 +2507,10 @@ class SqlParser(private val ion: IonSystem) : Parser { rem = remainingAfterOptionalTimeZone val timeStringToken = rem.head - if (timeStringToken?.value == null || timeStringToken.type != LITERAL || !timeStringToken.value.isText) { + if (timeStringToken?.value == null || timeStringToken.type != TokenType.LITERAL || !timeStringToken.value.isText) { rem.head.err( "Expected time string followed by the keyword TIME OR TIME WITH TIME ZONE, found ${rem.head?.value?.type}", - PARSE_UNEXPECTED_TOKEN + ErrorCode.PARSE_UNEXPECTED_TOKEN ) } @@ -2459,7 +2519,7 @@ class SqlParser(private val ion: IonSystem) : Parser { val timeString = timeStringToken.value.stringValue()?.replace(" ", "") if (!genericTimeRegex.matches(timeString!!)) { rem.head.err("Invalid format for time string. Expected format is \"TIME [(p)] [WITH TIME ZONE] HH:MM:SS[.ddddd...][+|-HH:MM]\"", - PARSE_INVALID_TIME_STRING) + ErrorCode. PARSE_INVALID_TIME_STRING) } // For "TIME WITH TIME ZONE", if the time zone is not explicitly specified, we still consider it as valid. // We will add the default time zone to it later in the evaluation phase. @@ -2475,10 +2535,10 @@ class SqlParser(private val ion: IonSystem) : Parser { // The source span here is just the filler value and does not reflect the actual source location of the precision // as it does not exists in case the precision is unspecified. val precisionOfValue = precision.token ?: - Token(LITERAL, ion.newInt(getPrecisionFromTimeString(timeString)), timeStringToken.span) + Token(TokenType.LITERAL, ion.newInt(getPrecisionFromTimeString(timeString)), timeStringToken.span) return ParseNode( - if (withTimeZone) TIME_WITH_TIME_ZONE else TIME, + if (withTimeZone) ParseType.TIME_WITH_TIME_ZONE else ParseType.TIME, rem.head!!.copy(value = ion.newString(timeString)), listOf(precision.copy(token = precisionOfValue)), rem.tail) @@ -2491,15 +2551,15 @@ class SqlParser(private val ion: IonSystem) : Parser { * is the value of [name]. */ private fun List.parseDateAddOrDateDiff(name: Token): ParseNode { - if (head?.type != LEFT_PAREN) err("Expected $LEFT_PAREN", - PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL) + if (head?.type != TokenType.LEFT_PAREN) err("Expected ${TokenType.LEFT_PAREN}", + ErrorCode.PARSE_EXPECTED_LEFT_PAREN_BUILTIN_FUNCTION_CALL) - val datePart = this.tail.parseDatePart().deriveExpected(COMMA) + val datePart = this.tail.parseDatePart().deriveExpected(TokenType.COMMA) - val timestamp1 = datePart.remaining.parseExpression().deriveExpected(COMMA) - val timestamp2 = timestamp1.remaining.parseExpression().deriveExpected(RIGHT_PAREN) + val timestamp1 = datePart.remaining.parseExpression().deriveExpected(TokenType.COMMA) + val timestamp2 = timestamp1.remaining.parseExpression().deriveExpected(TokenType.RIGHT_PAREN) - return ParseNode(CALL, name, listOf(datePart, timestamp1, timestamp2), timestamp2.remaining) + return ParseNode(ParseType.CALL, name, listOf(datePart, timestamp1, timestamp2), timestamp2.remaining) } private fun List.parseLet(): ParseNode { @@ -2508,96 +2568,99 @@ class SqlParser(private val ion: IonSystem) : Parser { var child = rem.parseExpression() rem = child.remaining - if (rem.head?.type != AS) { - rem.head.err("Expected $AS following $LET expr", PARSE_EXPECTED_AS_FOR_LET) + if (rem.head?.type != TokenType.AS) { + rem.head.err("Expected ${TokenType.AS} following ${ParseType.LET} expr", + ErrorCode.PARSE_EXPECTED_AS_FOR_LET) } rem = rem.tail if (rem.head?.type?.isIdentifier() != true) { - rem.head.err("Expected identifier for $AS-alias", PARSE_EXPECTED_IDENT_FOR_ALIAS) + rem.head.err("Expected identifier for ${TokenType.AS}-alias", + ErrorCode.PARSE_EXPECTED_IDENT_FOR_ALIAS) } var name = rem.head rem = rem.tail - letClauses.add(ParseNode(AS_ALIAS, name, listOf(child), rem)) + letClauses.add(ParseNode(ParseType.AS_ALIAS, name, listOf(child), rem)) - while (rem.head?.type == COMMA) { + while (rem.head?.type == TokenType.COMMA) { rem = rem.tail child = rem.parseExpression() rem = child.remaining - if (rem.head?.type != AS) { - rem.head.err("Expected $AS following $LET expr", PARSE_EXPECTED_AS_FOR_LET) + if (rem.head?.type != TokenType.AS) { + rem.head.err("Expected ${TokenType.AS} following ${ParseType.LET} expr", ErrorCode.PARSE_EXPECTED_AS_FOR_LET) } rem = rem.tail if (rem.head?.type?.isIdentifier() != true) { - rem.head.err("Expected identifier for $AS-alias", PARSE_EXPECTED_IDENT_FOR_ALIAS) + rem.head.err("Expected identifier for ${TokenType.AS}-alias", ErrorCode.PARSE_EXPECTED_IDENT_FOR_ALIAS) } name = rem.head rem = rem.tail - letClauses.add(ParseNode(AS_ALIAS, name, listOf(child), rem)) + letClauses.add(ParseNode(ParseType.AS_ALIAS, name, listOf(child), rem)) } - return ParseNode(LET, null, letClauses, rem) + return ParseNode(ParseType.LET, null, letClauses, rem) } private fun List.parseListLiteral(): ParseNode = parseArgList( - aliasSupportType = NONE, - mode = NORMAL_ARG_LIST + aliasSupportType = AliasSupportType.NONE, + mode = ArgListMode.NORMAL_ARG_LIST ).copy( - type = LIST - ).deriveExpected(RIGHT_BRACKET) + type = ParseType.LIST + ).deriveExpected(TokenType.RIGHT_BRACKET) private fun List.parseBagLiteral(): ParseNode = parseArgList( - aliasSupportType = NONE, - mode = NORMAL_ARG_LIST + aliasSupportType = AliasSupportType.NONE, + mode = ArgListMode.NORMAL_ARG_LIST ).copy( - type = BAG - ).deriveExpected(RIGHT_DOUBLE_ANGLE_BRACKET) + type = ParseType.BAG + ).deriveExpected(TokenType.RIGHT_DOUBLE_ANGLE_BRACKET) private fun List.parseStructLiteral(): ParseNode = parseArgList( - aliasSupportType = NONE, - mode = STRUCT_LITERAL_ARG_LIST + aliasSupportType = AliasSupportType.NONE, + mode = ArgListMode.STRUCT_LITERAL_ARG_LIST ).copy( - type = STRUCT - ).deriveExpected(RIGHT_CURLY) + type = ParseType.STRUCT + ).deriveExpected(TokenType.RIGHT_CURLY) private fun List.parseTableValues(): ParseNode = parseCommaList { var rem = this - if (rem.head?.type != LEFT_PAREN) { - err("Expected $LEFT_PAREN for row value constructor", PARSE_EXPECTED_LEFT_PAREN_VALUE_CONSTRUCTOR) + if (rem.head?.type != TokenType.LEFT_PAREN) { + err("Expected ${TokenType.LEFT_PAREN} for row value constructor", + ErrorCode.PARSE_EXPECTED_LEFT_PAREN_VALUE_CONSTRUCTOR) } rem = rem.tail rem.parseArgList( - aliasSupportType = NONE, - mode = NORMAL_ARG_LIST + aliasSupportType = AliasSupportType.NONE, + mode = ArgListMode.NORMAL_ARG_LIST ).copy( - type = LIST - ).deriveExpected(RIGHT_PAREN) + type = ParseType.LIST + ).deriveExpected(TokenType.RIGHT_PAREN) } private val parseCommaDelim: List.() -> ParseNode? = { when (head?.type) { - COMMA -> atomFromHead() + TokenType.COMMA -> atomFromHead() else -> null } } private val parseJoinDelim: List.() -> ParseNode? = { when (head?.type) { - COMMA -> atomFromHead(INNER_JOIN) - KEYWORD -> when (head?.keywordText) { - "join", "cross_join", "inner_join" -> atomFromHead(INNER_JOIN) - "left_join", "left_cross_join" -> atomFromHead(LEFT_JOIN) - "right_join", "right_cross_join" -> atomFromHead(RIGHT_JOIN) - "outer_join", "outer_cross_join" -> atomFromHead(OUTER_JOIN) + TokenType.COMMA -> atomFromHead(ParseType.INNER_JOIN) + TokenType.KEYWORD -> when (head?.keywordText) { + "join", "cross_join", "inner_join" -> atomFromHead(ParseType.INNER_JOIN) + "left_join", "left_cross_join" -> atomFromHead(ParseType.LEFT_JOIN) + "right_join", "right_cross_join" -> atomFromHead(ParseType.RIGHT_JOIN) + "outer_join", "outer_cross_join" -> atomFromHead(ParseType.OUTER_JOIN) else -> null } else -> null @@ -2613,16 +2676,16 @@ class SqlParser(private val ion: IonSystem) : Parser { rem = child.remaining when (rem.head?.type) { - ASC, DESC -> { + TokenType.ASC, TokenType.DESC -> { sortSpecKey = listOf(child, ParseNode( - type = ORDERING_SPEC, + type = ParseType.ORDERING_SPEC, token = rem.head, children = listOf(), remaining = rem.tail)) rem = rem.tail } } - ParseNode(type = SORT_SPEC, token = null, children = sortSpecKey, remaining = rem) + ParseNode(type = ParseType.SORT_SPEC, token = null, children = sortSpecKey, remaining = rem) } } @@ -2632,18 +2695,18 @@ class SqlParser(private val ion: IonSystem) : Parser { "unpivot" -> { val actualChild = rem.tail.parseExpression(precedence) ParseNode( - UNPIVOT, - rem.head, - listOf(actualChild), - actualChild.remaining + ParseType.UNPIVOT, + rem.head, + listOf(actualChild), + actualChild.remaining ) } else -> { - val isSubqueryOrLiteral = rem.tail.head?.type == LITERAL || rem.tail.head?.keywordText == "select" - if (rem.head?.type == LEFT_PAREN && !isSubqueryOrLiteral) { + val isSubqueryOrLiteral = rem.tail.head?.type == TokenType.LITERAL || rem.tail.head?.keywordText == "select" + if (rem.head?.type == TokenType.LEFT_PAREN && !isSubqueryOrLiteral) { // Starts with a left paren and is not a subquery or literal, so parse as a from source rem = rem.tail - rem.parseFromSource(precedence).deriveExpected(RIGHT_PAREN) + rem.parseFromSource(precedence).deriveExpected(TokenType.RIGHT_PAREN) } else { rem.parseExpression(precedence) @@ -2670,7 +2733,7 @@ class SqlParser(private val ion: IonSystem) : Parser { if (parseRemaining) { while (delim?.type?.isJoin == true) { val isCrossJoin = delim.token?.keywordText?.contains("cross") ?: false - val hasOnClause = delim.token?.type == KEYWORD && !isCrossJoin + val hasOnClause = delim.token?.type == TokenType.KEYWORD && !isCrossJoin var children : List var joinToken : Token? = delim.token @@ -2678,15 +2741,15 @@ class SqlParser(private val ion: IonSystem) : Parser { if (hasOnClause) { // Explicit join - if (rem.head?.type == LEFT_PAREN) { + if (rem.head?.type == TokenType.LEFT_PAREN) { // Starts with a left paren. Could indicate subquery/literal or indicate higher precedence - val isSubqueryOrLiteral = rem.tail.head?.type == LITERAL || rem.tail.head?.keywordText == "select" + val isSubqueryOrLiteral = rem.tail.head?.type == TokenType.LITERAL || rem.tail.head?.keywordText == "select" val parenClause = rem.parseFromSource(precedence, parseRemaining = true) rem = parenClause.remaining // check for an ON-clause if (rem.head?.keywordText != "on") { - rem.err("Expected 'ON'", PARSE_MALFORMED_JOIN) + rem.err("Expected 'ON'", ErrorCode.PARSE_MALFORMED_JOIN) } val onClause = rem.tail.parseExpression(precedence) @@ -2708,7 +2771,7 @@ class SqlParser(private val ion: IonSystem) : Parser { // check for an ON-clause if (rem.head?.keywordText != "on") { - rem.err("Expected 'ON'", PARSE_MALFORMED_JOIN) + rem.err("Expected 'ON'", ErrorCode.PARSE_MALFORMED_JOIN) } val onClause = rem.tail.parseExpression(precedence) @@ -2724,14 +2787,14 @@ class SqlParser(private val ion: IonSystem) : Parser { val rightRef = rem.parseFromSource(precedence, parseRemaining = false) rem = rightRef.remaining children = listOf(left, rightRef) - if (delim.token?.type == COMMA) { + if (delim.token?.type == TokenType.COMMA) { joinToken = delim.token?.copy( - type = KEYWORD, + type = TokenType.KEYWORD, value = ion.newSymbol("cross_join") ) } } - left = ParseNode(FROM_SOURCE_JOIN, joinToken, children, rem) + left = ParseNode(ParseType.FROM_SOURCE_JOIN, joinToken, children, rem) delim = rem.parseJoinDelim() } return left @@ -2742,7 +2805,7 @@ class SqlParser(private val ion: IonSystem) : Parser { private fun List.parseFromSourceList(precedence: Int = -1): ParseNode { val child = this.parseFromSource(precedence) - return ParseNode(FROM_CLAUSE, null, listOf(child), child.remaining) + return ParseNode(ParseType.FROM_CLAUSE, null, listOf(child), child.remaining) } private fun List.parseArgList( @@ -2755,24 +2818,24 @@ class SqlParser(private val ion: IonSystem) : Parser { return parseDelimitedList(parseDelim) { delim -> var rem = this var child = when (mode) { - STRUCT_LITERAL_ARG_LIST -> { - val field = rem.parseExpression(precedence).deriveExpected(COLON) + ArgListMode.STRUCT_LITERAL_ARG_LIST -> { + val field = rem.parseExpression(precedence).deriveExpected(TokenType.COLON) rem = field.remaining val value = rem.parseExpression(precedence) - ParseNode(MEMBER, null, listOf(field, value), value.remaining) + ParseNode(ParseType.MEMBER, null, listOf(field, value), value.remaining) } - SIMPLE_PATH_ARG_LIST -> rem.parsePathTerm(PathMode.SIMPLE_PATH) - SET_CLAUSE_ARG_LIST -> { + ArgListMode.SIMPLE_PATH_ARG_LIST -> rem.parsePathTerm(PathMode.SIMPLE_PATH) + ArgListMode.SET_CLAUSE_ARG_LIST -> { val lvalue = rem.parsePathTerm(PathMode.SIMPLE_PATH) rem = lvalue.remaining if (rem.head?.keywordText != "=") { - rem.err("Expected '='", PARSE_MISSING_SET_ASSIGNMENT) + rem.err("Expected '='", ErrorCode.PARSE_MISSING_SET_ASSIGNMENT) } rem = rem.tail val rvalue = rem.parseExpression(precedence) - ParseNode(ASSIGNMENT, null, listOf(lvalue, rvalue), rvalue.remaining) + ParseNode(ParseType.ASSIGNMENT, null, listOf(lvalue, rvalue), rvalue.remaining) } - NORMAL_ARG_LIST -> rem.parseExpression(precedence) + ArgListMode.NORMAL_ARG_LIST -> rem.parseExpression(precedence) } rem = child.remaining @@ -2816,7 +2879,8 @@ class SqlParser(private val ion: IonSystem) : Parser { rem = rem.tail val name = rem.head if (rem.head?.type?.isIdentifier() != true) { - rem.head.err("Expected identifier for $keywordTokenType-alias", PARSE_EXPECTED_IDENT_FOR_ALIAS) + rem.head.err("Expected identifier for $keywordTokenType-alias", + ErrorCode.PARSE_EXPECTED_IDENT_FOR_ALIAS) } rem = rem.tail ParseNode(parseNodeType, name, listOf(child), rem) @@ -2830,13 +2894,13 @@ class SqlParser(private val ion: IonSystem) : Parser { } private fun List.parseOptionalAsAlias(child: ParseNode) = - parseOptionalAlias(child = child, keywordTokenType = AS, keywordIsOptional = true, parseNodeType = AS_ALIAS) + parseOptionalAlias(child = child, keywordTokenType = TokenType.AS, keywordIsOptional = true, parseNodeType = ParseType.AS_ALIAS) private fun List.parseOptionalAtAlias(child: ParseNode) = - parseOptionalAlias(child = child, keywordTokenType = AT, keywordIsOptional = false, parseNodeType = AT_ALIAS) + parseOptionalAlias(child = child, keywordTokenType = TokenType.AT, keywordIsOptional = false, parseNodeType = ParseType.AT_ALIAS) private fun List.parseOptionalByAlias(child: ParseNode) = - parseOptionalAlias(child = child, keywordTokenType = BY, keywordIsOptional = false, parseNodeType = BY_ALIAS) + parseOptionalAlias(child = child, keywordTokenType = TokenType.BY, keywordIsOptional = false, parseNodeType = ParseType.BY_ALIAS) private inline fun List.parseCommaList(parseItem: List.() -> ParseNode) = parseDelimitedList(parseCommaDelim) { parseItem() } @@ -2870,12 +2934,13 @@ class SqlParser(private val ion: IonSystem) : Parser { rem = delim.remaining } - return ParseNode(ARG_LIST, null, items, rem) + return ParseNode(ParseType.ARG_LIST, null, items, rem) } private fun ParseNode.throwTopLevelParserError(): Nothing = - token?.err("Keyword ${token.text} only expected at the top level in the query", PARSE_UNEXPECTED_TERM) - ?: throw ParserException("Keyword ${token?.text} only expected at the top level in the query", PARSE_UNEXPECTED_TERM, PropertyValueMap()) + token?.err("Keyword ${token.text} only expected at the top level in the query", ErrorCode.PARSE_UNEXPECTED_TERM) + ?: throw ParserException("Keyword ${token?.text} only expected at the top level in the query", + ErrorCode.PARSE_UNEXPECTED_TERM, PropertyValueMap()) /** * Validates tree to make sure that the top level tokens are not found below the top level. @@ -2914,7 +2979,7 @@ class SqlParser(private val ion: IonSystem) : Parser { node = it, level = level + 1, topLevelTokenSeen = topLevelTokenSeen || isTopLevelType, - dmlListTokenSeen = dmlListTokenSeen || node.type == DML_LIST + dmlListTokenSeen = dmlListTokenSeen || node.type == ParseType.DML_LIST ) } } @@ -2934,9 +2999,9 @@ class SqlParser(private val ion: IonSystem) : Parser { val rem = node.remaining if (!rem.onlyEndOfStatement()) { when (rem.head?.type) { - SEMICOLON -> rem.tail.err("Unexpected token after semicolon. (Only one query is allowed.)", - PARSE_UNEXPECTED_TOKEN) - else -> rem.err("Unexpected token after expression", PARSE_UNEXPECTED_TOKEN) + TokenType.SEMICOLON -> rem.tail.err("Unexpected token after semicolon. (Only one query is allowed.)", + ErrorCode.PARSE_UNEXPECTED_TOKEN) + else -> rem.err("Unexpected token after expression", ErrorCode.PARSE_UNEXPECTED_TOKEN) } } diff --git a/lang/src/org/partiql/lang/syntax/Token.kt b/lang/src/org/partiql/lang/syntax/Token.kt index e0d094ed4d..3f88e80bb0 100644 --- a/lang/src/org/partiql/lang/syntax/Token.kt +++ b/lang/src/org/partiql/lang/syntax/Token.kt @@ -15,8 +15,7 @@ package org.partiql.lang.syntax import com.amazon.ion.IonValue -import org.partiql.lang.util.* -import org.partiql.lang.syntax.TokenType.* +import org.partiql.lang.util.stringValue /** * Simple [IonValue] based token for lexing PartiQL. @@ -29,28 +28,28 @@ data class Token(val type: TokenType, val keywordText: String? get() = when (type) { - OPERATOR, KEYWORD, AS, AT -> text - MISSING -> "missing" - NULL -> "null" + TokenType.OPERATOR, TokenType.KEYWORD, TokenType.AS, TokenType.AT -> text + TokenType.MISSING -> "missing" + TokenType.NULL -> "null" else -> null } val isSpecialOperator: Boolean get() = when (type) { - OPERATOR -> text in SPECIAL_OPERATORS + TokenType.OPERATOR -> text in SPECIAL_OPERATORS else -> false } val isBinaryOperator: Boolean get() = when (type) { - OPERATOR, KEYWORD -> text in BINARY_OPERATORS - STAR -> true + TokenType.OPERATOR, TokenType.KEYWORD -> text in BINARY_OPERATORS + TokenType.STAR -> true else -> false } val isUnaryOperator: Boolean get() = when (type){ - OPERATOR, KEYWORD -> text in UNARY_OPERATORS + TokenType.OPERATOR, TokenType.KEYWORD -> text in UNARY_OPERATORS else -> false } diff --git a/lang/src/org/partiql/lang/util/BindingHelpers.kt b/lang/src/org/partiql/lang/util/BindingHelpers.kt index e94f3a607b..1fe8ec3b15 100644 --- a/lang/src/org/partiql/lang/util/BindingHelpers.kt +++ b/lang/src/org/partiql/lang/util/BindingHelpers.kt @@ -14,9 +14,10 @@ package org.partiql.lang.util -import com.amazon.ion.* -import org.partiql.lang.errors.* -import org.partiql.lang.eval.* +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.errors.Property +import org.partiql.lang.eval.err +import org.partiql.lang.eval.BindingCase internal fun errAmbiguousBinding(bindingName: String, matchingNames: List): Nothing { err("Multiple matches were found for the specified identifier", diff --git a/lang/src/org/partiql/lang/util/CollectionExtensions.kt b/lang/src/org/partiql/lang/util/CollectionExtensions.kt index a2a61984d6..7d3f38f7fd 100644 --- a/lang/src/org/partiql/lang/util/CollectionExtensions.kt +++ b/lang/src/org/partiql/lang/util/CollectionExtensions.kt @@ -14,10 +14,9 @@ package org.partiql.lang.util -import com.amazon.ion.* -import org.partiql.lang.eval.* -import java.util.* -import java.util.Collections.* +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueType +import org.partiql.lang.eval.isUnknown /** Returns the first element of the list or `null` if it doesn't exist. */ inline val List.head: T? diff --git a/lang/src/org/partiql/lang/util/ExprValueFormatter.kt b/lang/src/org/partiql/lang/util/ExprValueFormatter.kt index 4a3c76f99a..dc83dfbbce 100644 --- a/lang/src/org/partiql/lang/util/ExprValueFormatter.kt +++ b/lang/src/org/partiql/lang/util/ExprValueFormatter.kt @@ -1,12 +1,10 @@ package org.partiql.lang.util -import com.amazon.ion.system.* -import org.partiql.lang.eval.* -import org.partiql.lang.eval.ExprValueType.* +import com.amazon.ion.system.IonTextWriterBuilder +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueType +import org.partiql.lang.eval.name import java.lang.StringBuilder -import java.time.LocalTime -import java.time.OffsetTime -import java.time.format.DateTimeFormatter private const val MISSING_STRING = "MISSING" private const val NULL_STRING = "NULL" @@ -41,25 +39,26 @@ class ConfigurableExprValueFormatter(private val config: Configuration) : ExprVa fun recursivePrettyPrint(value: ExprValue) { when (value.type) { - MISSING -> out.append(MISSING_STRING) - NULL -> out.append(NULL_STRING) + ExprValueType.MISSING -> out.append(MISSING_STRING) + ExprValueType.NULL -> out.append(NULL_STRING) - BOOL -> out.append(value.scalar.booleanValue().toString()) + ExprValueType.BOOL -> out.append(value.scalar.booleanValue().toString()) - INT, DECIMAL -> out.append(value.scalar.numberValue().toString()) + ExprValueType.INT, ExprValueType.DECIMAL -> out.append(value.scalar.numberValue().toString()) - STRING -> out.append("'${value.scalar.stringValue()}'") + ExprValueType.STRING -> out.append("'${value.scalar.stringValue()}'") - DATE -> out.append(value.scalar.dateValue().toString()) + ExprValueType.DATE -> out.append(value.scalar.dateValue().toString()) - TIME -> out.append(value.scalar.timeValue().toString()) + ExprValueType.TIME -> out.append(value.scalar.timeValue().toString()) // fallback to an Ion literal for all types that don't have a native PartiQL representation - FLOAT, TIMESTAMP, SYMBOL, CLOB, BLOB, SEXP -> prettyPrintIonLiteral(value) + ExprValueType.FLOAT, ExprValueType.TIMESTAMP, ExprValueType.SYMBOL, + ExprValueType.CLOB,ExprValueType. BLOB, ExprValueType.SEXP -> prettyPrintIonLiteral(value) - LIST -> prettyPrintContainer(value, "[", "]") - BAG -> prettyPrintContainer(value, "<<", ">>") - STRUCT -> prettyPrintContainer(value, "{", "}") { v -> + ExprValueType.LIST -> prettyPrintContainer(value, "[", "]") + ExprValueType.BAG -> prettyPrintContainer(value, "<<", ">>") + ExprValueType.STRUCT -> prettyPrintContainer(value, "{", "}") { v -> val fieldName = v.name!!.scalar.stringValue() out.append("'$fieldName': ") diff --git a/lang/src/org/partiql/lang/util/IonValueExtensions.kt b/lang/src/org/partiql/lang/util/IonValueExtensions.kt index 17bff26aed..423c6c983b 100644 --- a/lang/src/org/partiql/lang/util/IonValueExtensions.kt +++ b/lang/src/org/partiql/lang/util/IonValueExtensions.kt @@ -14,9 +14,23 @@ package org.partiql.lang.util -import com.amazon.ion.* -import org.partiql.lang.eval.* -import java.math.* +import com.amazon.ion.IntegerSize +import com.amazon.ion.IonBool +import com.amazon.ion.IonContainer +import com.amazon.ion.IonDecimal +import com.amazon.ion.IonFloat +import com.amazon.ion.IonInt +import com.amazon.ion.IonLob +import com.amazon.ion.IonSequence +import com.amazon.ion.IonSexp +import com.amazon.ion.IonStruct +import com.amazon.ion.IonSymbol +import com.amazon.ion.IonText +import com.amazon.ion.IonTimestamp +import com.amazon.ion.IonValue +import com.amazon.ion.Timestamp +import java.math.BigDecimal +import java.math.BigInteger @JvmName("IonValueUtils") @@ -65,7 +79,7 @@ fun IonValue.numberValue(): Number = when { isNullValue -> err("Expected non-null number: $this") else -> when (this) { is IonInt -> javaValue() - is IonFloat -> doubleValue() + is IonFloat -> doubleValue() is IonDecimal -> decimalValue() else -> err("Expected number: $this") } diff --git a/lang/src/org/partiql/lang/util/IonWriterContext.kt b/lang/src/org/partiql/lang/util/IonWriterContext.kt index f59aa664da..8ef8cc988b 100644 --- a/lang/src/org/partiql/lang/util/IonWriterContext.kt +++ b/lang/src/org/partiql/lang/util/IonWriterContext.kt @@ -14,7 +14,9 @@ package org.partiql.lang.util -import com.amazon.ion.* +import com.amazon.ion.IonType +import com.amazon.ion.IonValue +import com.amazon.ion.IonWriter /** * A simple wrapper for writing Ion from Kotlin. diff --git a/lang/src/org/partiql/lang/util/NumberExtensions.kt b/lang/src/org/partiql/lang/util/NumberExtensions.kt index 220e1ae6d1..6d75503614 100644 --- a/lang/src/org/partiql/lang/util/NumberExtensions.kt +++ b/lang/src/org/partiql/lang/util/NumberExtensions.kt @@ -14,9 +14,14 @@ package org.partiql.lang.util -import com.amazon.ion.* -import org.partiql.lang.eval.* -import java.math.* +import com.amazon.ion.Decimal +import com.amazon.ion.IonSystem +import com.amazon.ion.IonValue +import org.partiql.lang.eval.errIntOverflow +import java.math.BigDecimal +import java.math.BigInteger +import java.math.MathContext +import java.math.RoundingMode private val MATH_CONTEXT = MathContext(38, RoundingMode.HALF_EVEN) // TODO should this be configurable? @@ -25,7 +30,7 @@ private val MATH_CONTEXT = MathContext(38, RoundingMode.HALF_EVEN) // TODO shoul * and factory methods */ internal fun bigDecimalOf(num: Number, mc: MathContext = MATH_CONTEXT): BigDecimal = when (num) { - is Decimal -> num + is Decimal -> num is Int -> BigDecimal(num, mc) is Long -> BigDecimal(num, mc) is Double -> BigDecimal(num, mc) diff --git a/lang/src/org/partiql/lang/util/PropertyMapHelpers.kt b/lang/src/org/partiql/lang/util/PropertyMapHelpers.kt index b0c999b856..79eb783633 100644 --- a/lang/src/org/partiql/lang/util/PropertyMapHelpers.kt +++ b/lang/src/org/partiql/lang/util/PropertyMapHelpers.kt @@ -14,9 +14,10 @@ package org.partiql.lang.util -import com.amazon.ion.* -import org.partiql.lang.errors.* -import org.partiql.lang.syntax.* +import com.amazon.ion.IonValue +import org.partiql.lang.errors.Property +import org.partiql.lang.errors.PropertyValueMap +import org.partiql.lang.syntax.TokenType /** * Helper function to reduce the syntactical overhead of creating a [PropertyValueMap]. @@ -30,7 +31,7 @@ fun propertyValueMapOf(vararg properties: Pair): PropertyValueMap is Long -> pvm[it.first] = it.second as Long is String -> pvm[it.first] = it.second as String is TokenType -> pvm[it.first] = it.second as TokenType - is IonValue -> pvm[it.first] = it.second as IonValue + is IonValue -> pvm[it.first] = it.second as IonValue is Enum<*> -> pvm[it.first] = it.second.toString() else -> throw IllegalArgumentException("Cannot convert ${it.second.javaClass.name} to PropertyValue") } diff --git a/lang/src/org/partiql/lang/util/TokenListExtensions.kt b/lang/src/org/partiql/lang/util/TokenListExtensions.kt index 56e2672298..b9ddd1664a 100644 --- a/lang/src/org/partiql/lang/util/TokenListExtensions.kt +++ b/lang/src/org/partiql/lang/util/TokenListExtensions.kt @@ -17,7 +17,11 @@ package org.partiql.lang.util import org.partiql.lang.errors.ErrorCode import org.partiql.lang.errors.Property import org.partiql.lang.errors.PropertyValueMap -import org.partiql.lang.syntax.* +import org.partiql.lang.syntax.ParserException +import org.partiql.lang.syntax.SourceSpan +import org.partiql.lang.syntax.SqlParser +import org.partiql.lang.syntax.Token +import org.partiql.lang.syntax.TokenType /** * Predicate to check if the list of [Token]'s only contains a end of statement with diff --git a/lang/test/org/partiql/lang/TestBase.kt b/lang/test/org/partiql/lang/TestBase.kt index b7a5284d4e..66f460ed43 100644 --- a/lang/test/org/partiql/lang/TestBase.kt +++ b/lang/test/org/partiql/lang/TestBase.kt @@ -14,22 +14,34 @@ package org.partiql.lang -import com.amazon.ion.* +import com.amazon.ion.Decimal +import com.amazon.ion.IonSystem +import com.amazon.ion.IonValue +import com.amazon.ion.Timestamp import com.amazon.ion.system.IonSystemBuilder -import org.assertj.core.api.* -import org.partiql.lang.ast.* -import org.partiql.lang.eval.* -import org.partiql.lang.errors.* -import org.partiql.lang.util.* import org.junit.Assert import org.junit.runner.RunWith -import java.util.* import junitparams.JUnitParamsRunner +import org.assertj.core.api.SoftAssertions +import org.partiql.lang.ast.ExprNode import org.partiql.lang.ast.passes.AstRewriterBase +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.errors.Property +import org.partiql.lang.errors.PropertyType +import org.partiql.lang.errors.PropertyValue +import org.partiql.lang.errors.PropertyValueMap +import org.partiql.lang.eval.BindingCase +import org.partiql.lang.eval.BindingName +import org.partiql.lang.eval.Bindings +import org.partiql.lang.eval.EvaluationException +import org.partiql.lang.eval.ExprValue +import org.partiql.lang.eval.ExprValueFactory import org.partiql.lang.eval.time.Time +import org.partiql.lang.util.SexpAstPrettyPrinter +import org.partiql.lang.util.softAssert import java.math.BigDecimal import java.time.LocalDate -import kotlin.reflect.* +import kotlin.reflect.KClass @RunWith(JUnitParamsRunner::class) @@ -42,7 +54,7 @@ abstract class TestBase : Assert() { protected fun anyToExprValue(value: Any) = when (value) { is String -> valueFactory.newString(value) is Int -> valueFactory.newInt(value) - is Decimal -> valueFactory.newDecimal(value) + is Decimal -> valueFactory.newDecimal(value) is Timestamp -> valueFactory.newTimestamp(value) is LocalDate -> valueFactory.newDate(value) is Time -> valueFactory.newTime(value) diff --git a/lang/test/org/partiql/lang/ast/AstNodeTest.kt b/lang/test/org/partiql/lang/ast/AstNodeTest.kt index 434a0932b9..9ff6222548 100644 --- a/lang/test/org/partiql/lang/ast/AstNodeTest.kt +++ b/lang/test/org/partiql/lang/ast/AstNodeTest.kt @@ -1,12 +1,14 @@ package org.partiql.lang.ast -import com.amazon.ion.* -import com.amazon.ion.system.* -import junitparams.* -import org.junit.* -import org.junit.Assert.* -import org.junit.runner.* -import org.partiql.lang.syntax.* +import com.amazon.ion.IonSystem +import com.amazon.ion.system.IonSystemBuilder +import junitparams.JUnitParamsRunner +import junitparams.Parameters +import org.junit.Assert.assertTrue +import org.junit.Assert.assertEquals +import org.junit.Test +import org.junit.runner.RunWith +import org.partiql.lang.syntax.SqlParser // TODO: add tests for DDL & DML ExprNodes. diff --git a/lang/test/org/partiql/lang/ast/AstSerDeTests.kt b/lang/test/org/partiql/lang/ast/AstSerDeTests.kt index 192f05ec45..9c47b3fca1 100644 --- a/lang/test/org/partiql/lang/ast/AstSerDeTests.kt +++ b/lang/test/org/partiql/lang/ast/AstSerDeTests.kt @@ -14,9 +14,9 @@ package org.partiql.lang.ast -import org.partiql.lang.* -import junitparams.* -import org.junit.* +import junitparams.Parameters +import org.junit.Test +import org.partiql.lang.TestBase /** * This class contains tests for (de)serialization of metas. diff --git a/lang/test/org/partiql/lang/ast/IsIonLiteralMetaTest.kt b/lang/test/org/partiql/lang/ast/IsIonLiteralMetaTest.kt index a6cf04235c..5277a7ced7 100644 --- a/lang/test/org/partiql/lang/ast/IsIonLiteralMetaTest.kt +++ b/lang/test/org/partiql/lang/ast/IsIonLiteralMetaTest.kt @@ -14,8 +14,9 @@ package org.partiql.lang.ast * language governing permissions and limitations under the License. */ -import com.amazon.ion.system.* -import org.junit.* +import com.amazon.ion.system.IonSystemBuilder +import org.junit.Assert +import org.junit.Test import org.partiql.lang.syntax.SqlParser class IsIonLiteralMetaTest { diff --git a/lang/test/org/partiql/lang/ast/PathComponentExprTest.kt b/lang/test/org/partiql/lang/ast/PathComponentExprTest.kt index 7f93051229..5c6872276b 100644 --- a/lang/test/org/partiql/lang/ast/PathComponentExprTest.kt +++ b/lang/test/org/partiql/lang/ast/PathComponentExprTest.kt @@ -14,12 +14,16 @@ package org.partiql.lang.ast -import com.amazon.ion.* -import com.amazon.ion.system.* -import junitparams.* +import com.amazon.ion.IonSystem +import com.amazon.ion.system.IonSystemBuilder +import junitparams.JUnitParamsRunner +import junitparams.Parameters import org.junit.Test -import org.junit.runner.* -import kotlin.test.* +import org.junit.runner.RunWith +import kotlin.test.assertEquals +import kotlin.test.assertFalse +import kotlin.test.assertNotEquals +import kotlin.test.assertTrue @RunWith(JUnitParamsRunner::class) class PathComponentExprTest { diff --git a/lang/test/org/partiql/lang/ast/SourceLocationMetaTest.kt b/lang/test/org/partiql/lang/ast/SourceLocationMetaTest.kt index f82c63bff7..16cb03dc1b 100644 --- a/lang/test/org/partiql/lang/ast/SourceLocationMetaTest.kt +++ b/lang/test/org/partiql/lang/ast/SourceLocationMetaTest.kt @@ -14,10 +14,9 @@ package org.partiql.lang.ast -import com.amazon.ion.system.* -import org.junit.* +import com.amazon.ion.system.IonSystemBuilder import org.junit.Test -import kotlin.test.* +import kotlin.test.assertEquals class SourceLocationMetaTest { @Test diff --git a/lang/test/org/partiql/lang/ast/VariableReferenceTest.kt b/lang/test/org/partiql/lang/ast/VariableReferenceTest.kt index 2b9722c9e6..379608a502 100644 --- a/lang/test/org/partiql/lang/ast/VariableReferenceTest.kt +++ b/lang/test/org/partiql/lang/ast/VariableReferenceTest.kt @@ -14,9 +14,9 @@ package org.partiql.lang.ast -import org.junit.* +import org.junit.Assert import org.junit.Test -import kotlin.test.* +import kotlin.test.assertEquals class VariableReferenceTest { diff --git a/lang/test/org/partiql/lang/ast/passes/AstWalkerTests.kt b/lang/test/org/partiql/lang/ast/passes/AstWalkerTests.kt index 3fd10efe9b..3fb11fffed 100644 --- a/lang/test/org/partiql/lang/ast/passes/AstWalkerTests.kt +++ b/lang/test/org/partiql/lang/ast/passes/AstWalkerTests.kt @@ -14,14 +14,21 @@ package org.partiql.lang.ast.passes -import com.amazon.ion.system.* -import org.partiql.lang.ast.* -import org.partiql.lang.syntax.* -import junitparams.* -import org.junit.* +import com.amazon.ion.system.IonSystemBuilder + +import junitparams.JUnitParamsRunner +import junitparams.Parameters import org.junit.Test -import org.junit.runner.* -import kotlin.test.* +import org.junit.runner.RunWith +import org.partiql.lang.ast.DataManipulationOperation +import org.partiql.lang.ast.DataType +import org.partiql.lang.ast.ExprNode +import org.partiql.lang.ast.FromSource +import org.partiql.lang.ast.PathComponent +import org.partiql.lang.ast.SelectListItem +import org.partiql.lang.ast.SelectProjection +import org.partiql.lang.syntax.SqlParser +import kotlin.test.assertEquals /** * [AstWalker] simply traverses each node in the [ExprNode] instance but performs no transformations. diff --git a/lang/test/org/partiql/lang/ast/passes/RewriterTestBase.kt b/lang/test/org/partiql/lang/ast/passes/RewriterTestBase.kt index 851f591426..313d251204 100644 --- a/lang/test/org/partiql/lang/ast/passes/RewriterTestBase.kt +++ b/lang/test/org/partiql/lang/ast/passes/RewriterTestBase.kt @@ -14,8 +14,7 @@ package org.partiql.lang.ast.passes -import org.partiql.lang.syntax.* - +import org.partiql.lang.syntax.SqlParserTestBase @Deprecated("New rewriters should implement PIG's PartiqlAst.VisitorTransform and use VisitorTransformTestBase to test") abstract class RewriterTestBase : SqlParserTestBase() { diff --git a/lang/test/org/partiql/lang/errors/LexerErrorsTest.kt b/lang/test/org/partiql/lang/errors/LexerErrorsTest.kt index 577af1f9df..15ba20d685 100644 --- a/lang/test/org/partiql/lang/errors/LexerErrorsTest.kt +++ b/lang/test/org/partiql/lang/errors/LexerErrorsTest.kt @@ -14,10 +14,11 @@ package org.partiql.lang.errors -import org.partiql.lang.* -import org.partiql.lang.syntax.* -import org.partiql.lang.util.* import org.junit.Test +import org.partiql.lang.TestBase +import org.partiql.lang.syntax.LexerException +import org.partiql.lang.syntax.SqlLexer +import org.partiql.lang.util.softAssert class LexerErrorsTest : TestBase() { diff --git a/lang/test/org/partiql/lang/errors/ParserErrorsTest.kt b/lang/test/org/partiql/lang/errors/ParserErrorsTest.kt index 3d278d3047..52583bb2b5 100644 --- a/lang/test/org/partiql/lang/errors/ParserErrorsTest.kt +++ b/lang/test/org/partiql/lang/errors/ParserErrorsTest.kt @@ -14,9 +14,9 @@ package org.partiql.lang.errors import com.amazon.ion.Timestamp +import org.junit.Ignore import org.partiql.lang.syntax.TokenType -import org.partiql.lang.util.* -import org.junit.* +import org.junit.Test import org.partiql.lang.syntax.SqlParserTestBase class ParserErrorsTest : SqlParserTestBase() { diff --git a/lang/test/org/partiql/lang/errors/PropertyValueMapTest.kt b/lang/test/org/partiql/lang/errors/PropertyValueMapTest.kt index 073a8fbf8d..274a1d475a 100644 --- a/lang/test/org/partiql/lang/errors/PropertyValueMapTest.kt +++ b/lang/test/org/partiql/lang/errors/PropertyValueMapTest.kt @@ -14,11 +14,10 @@ package org.partiql.lang.errors -import org.partiql.lang.* -import org.partiql.lang.errors.Property.* import org.partiql.lang.syntax.TokenType import org.junit.Before import org.junit.Test +import org.partiql.lang.TestBase class PropertyValueMapTest : TestBase() { @@ -29,29 +28,29 @@ class PropertyValueMapTest : TestBase() { @Before fun setUp() { - onlyColumnValueMap[COLUMN_NUMBER] = 11L - oneOfEachType[EXPECTED_TOKEN_TYPE] = TokenType.COMMA - oneOfEachType[KEYWORD] = "test" - oneOfEachType[EXPECTED_ARITY_MAX] = 1 - oneOfEachType[TOKEN_VALUE] = ion.newEmptyList() - oneOfEachType[COLUMN_NUMBER] = 11L + onlyColumnValueMap[Property.COLUMN_NUMBER] = 11L + oneOfEachType[Property.EXPECTED_TOKEN_TYPE] = TokenType.COMMA + oneOfEachType[Property.KEYWORD] = "test" + oneOfEachType[Property.EXPECTED_ARITY_MAX] = 1 + oneOfEachType[Property.TOKEN_VALUE] = ion.newEmptyList() + oneOfEachType[Property.COLUMN_NUMBER] = 11L } @Test fun getPropFromEmptyBag() { - assertNull(emptyValueMap[LINE_NUMBER]) + assertNull(emptyValueMap[Property.LINE_NUMBER]) } @Test fun getAbsentPropFromNonEmptyBag() { - assertNull(onlyColumnValueMap[LINE_NUMBER]) + assertNull(onlyColumnValueMap[Property.LINE_NUMBER]) } @Test fun getValues() { - assertEquals(11L, oneOfEachType[COLUMN_NUMBER]?.longValue()) - assertEquals(TokenType.COMMA, oneOfEachType[EXPECTED_TOKEN_TYPE]?.tokenTypeValue()) - assertEquals("test", oneOfEachType[KEYWORD]?.stringValue()) - assertEquals(1, oneOfEachType[EXPECTED_ARITY_MAX]?.integerValue()) - assertEquals(11L, oneOfEachType[COLUMN_NUMBER]?.longValue()) + assertEquals(11L, oneOfEachType[Property.COLUMN_NUMBER]?.longValue()) + assertEquals(TokenType.COMMA, oneOfEachType[Property.EXPECTED_TOKEN_TYPE]?.tokenTypeValue()) + assertEquals("test", oneOfEachType[Property.KEYWORD]?.stringValue()) + assertEquals(1, oneOfEachType[Property.EXPECTED_ARITY_MAX]?.integerValue()) + assertEquals(11L, oneOfEachType[Property.COLUMN_NUMBER]?.longValue()) } } \ No newline at end of file diff --git a/lang/test/org/partiql/lang/errors/SqlExceptionTest.kt b/lang/test/org/partiql/lang/errors/SqlExceptionTest.kt index 9e5dbb1f40..066d871ad9 100644 --- a/lang/test/org/partiql/lang/errors/SqlExceptionTest.kt +++ b/lang/test/org/partiql/lang/errors/SqlExceptionTest.kt @@ -14,10 +14,9 @@ package org.partiql.lang.errors -import org.partiql.lang.* -import org.partiql.lang.errors.Property.* import org.junit.Test - +import org.partiql.lang.SqlException +import org.partiql.lang.TestBase class SqlExceptionTest : TestBase() { @@ -33,9 +32,9 @@ class SqlExceptionTest : TestBase() { @Test fun noErrorMessageErrorCodeContext() { val errorContext = PropertyValueMap() - errorContext[COLUMN_NUMBER] = 10L - errorContext[LINE_NUMBER] = 20L - errorContext[TOKEN_STRING] = "c" + errorContext[Property.COLUMN_NUMBER] = 10L + errorContext[Property.LINE_NUMBER] = 20L + errorContext[Property.TOKEN_STRING] = "c" val ex = SqlException(ErrorCode.LEXER_INVALID_CHAR, errorContext) @@ -45,9 +44,9 @@ class SqlExceptionTest : TestBase() { @Test fun customErrorMessageErrorCodeContext() { val errorContext = PropertyValueMap() - errorContext[COLUMN_NUMBER] = 10L - errorContext[LINE_NUMBER] = 20L - errorContext[TOKEN_STRING] = "c" + errorContext[Property.COLUMN_NUMBER] = 10L + errorContext[Property.LINE_NUMBER] = 20L + errorContext[Property.TOKEN_STRING] = "c" val ex = SqlException("Unexpected token", ErrorCode.LEXER_INVALID_CHAR, errorContext) @@ -57,9 +56,9 @@ class SqlExceptionTest : TestBase() { @Test fun toStringDoesNotAccumulateMessageText() { val errorContext = PropertyValueMap() - errorContext[COLUMN_NUMBER] = 10L - errorContext[LINE_NUMBER] = 20L - errorContext[TOKEN_STRING] = "c" + errorContext[Property.COLUMN_NUMBER] = 10L + errorContext[Property.LINE_NUMBER] = 20L + errorContext[Property.TOKEN_STRING] = "c" val ex = SqlException("Unexpected token", ErrorCode.LEXER_INVALID_CHAR, errorContext) diff --git a/lang/test/org/partiql/lang/eval/BindingsTest.kt b/lang/test/org/partiql/lang/eval/BindingsTest.kt index d1455b61b6..9fbd84bdea 100644 --- a/lang/test/org/partiql/lang/eval/BindingsTest.kt +++ b/lang/test/org/partiql/lang/eval/BindingsTest.kt @@ -14,10 +14,10 @@ package org.partiql.lang.eval -import org.junit.* -import org.partiql.lang.* -import org.partiql.lang.errors.* -import org.partiql.lang.util.* +import org.junit.Test +import org.partiql.lang.TestBase +import org.partiql.lang.errors.ErrorCode +import org.partiql.lang.util.newFromIonText class BindingsTest : TestBase() { diff --git a/lang/test/org/partiql/lang/eval/CompileOptionsTest.kt b/lang/test/org/partiql/lang/eval/CompileOptionsTest.kt index a08b5a09d5..bfadbe8a01 100644 --- a/lang/test/org/partiql/lang/eval/CompileOptionsTest.kt +++ b/lang/test/org/partiql/lang/eval/CompileOptionsTest.kt @@ -14,8 +14,8 @@ package org.partiql.lang.eval -import org.junit.* -import org.junit.Assert.* +import org.junit.Assert.assertEquals +import org.junit.Test class CompileOptionsTest { private fun assertDefault(actual: CompileOptions) { diff --git a/lang/test/org/partiql/lang/eval/EvaluatingCompilerCastTest.kt b/lang/test/org/partiql/lang/eval/EvaluatingCompilerCastTest.kt index 5ed514c68e..eef24d3cf3 100644 --- a/lang/test/org/partiql/lang/eval/EvaluatingCompilerCastTest.kt +++ b/lang/test/org/partiql/lang/eval/EvaluatingCompilerCastTest.kt @@ -14,12 +14,10 @@ package org.partiql.lang.eval -import org.partiql.lang.errors.* -import org.partiql.lang.eval.ExprValueType.* -import org.partiql.lang.errors.ErrorCode.* import junitparams.Parameters import junitparams.naming.TestCaseName import org.junit.Test +import org.partiql.lang.errors.ErrorCode import org.partiql.lang.syntax.ParserException import org.partiql.lang.util.getOffsetHHmm import java.time.ZoneOffset @@ -72,11 +70,11 @@ class EvaluatingCompilerCastTest : EvaluatorTestBase() { * is to be expected. */ data class CastCase( - val source: String, - val type: String, - val expected: String?, - val expectedErrorCode: ErrorCode?, - val session: EvaluationSession = EvaluationSession.standard() + val source: String, + val type: String, + val expected: String?, + val expectedErrorCode: ErrorCode?, + val session: EvaluationSession = EvaluationSession.standard() ) { val expression = "CAST($source AS $type)" override fun toString(): String = expression @@ -122,40 +120,40 @@ class EvaluatingCompilerCastTest : EvaluatorTestBase() { case("`0e0`", "false"), case("1.1", "true"), // timestamp - case("`2007-10-10T`", EVALUATOR_INVALID_CAST), + case("`2007-10-10T`", ErrorCode.EVALUATOR_INVALID_CAST), // text - case("'hello'", EVALUATOR_CAST_FAILED), + case("'hello'", ErrorCode.EVALUATOR_CAST_FAILED), case("'TrUe'", "true"), case("""`"FALSE"`""", "false"), case("""`'true'`""", "true"), // lob - case("""`{{""}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"goodbye"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"false"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"true"}}`""", EVALUATOR_INVALID_CAST), - case("`{{}}`", EVALUATOR_INVALID_CAST), - case("`{{Z29vZGJ5ZQ==}}`", EVALUATOR_INVALID_CAST), // goodbye - case("`{{ZmFsc2U=}}`", EVALUATOR_INVALID_CAST), // false - case("`{{dHJ1ZQ==}}`", EVALUATOR_INVALID_CAST), // true + case("""`{{""}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"goodbye"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"false"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"true"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{}}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{Z29vZGJ5ZQ==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // goodbye + case("`{{ZmFsc2U=}}`", ErrorCode.EVALUATOR_INVALID_CAST), // false + case("`{{dHJ1ZQ==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // true // list - case("`[]`", EVALUATOR_INVALID_CAST), - case("`[true]`", EVALUATOR_INVALID_CAST), - case("`[false]`", EVALUATOR_INVALID_CAST), - case("`[true, false]`", EVALUATOR_INVALID_CAST), + case("`[]`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`[true]`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`[false]`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`[true, false]`", ErrorCode.EVALUATOR_INVALID_CAST), // sexp - case("`()`", EVALUATOR_INVALID_CAST), - case("`(true)`", EVALUATOR_INVALID_CAST), - case("`(false)`", EVALUATOR_INVALID_CAST), + case("`()`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`(true)`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`(false)`", ErrorCode.EVALUATOR_INVALID_CAST), // struct - case("`{}`", EVALUATOR_INVALID_CAST), - case("{}", EVALUATOR_INVALID_CAST), - case("`{a:true}`", EVALUATOR_INVALID_CAST), - case("{'b':true}", EVALUATOR_INVALID_CAST), + case("`{}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{}", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{a:true}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{'b':true}", ErrorCode.EVALUATOR_INVALID_CAST), // bag - case("<<>>", EVALUATOR_INVALID_CAST), - case("<>", EVALUATOR_INVALID_CAST), - case("<>", EVALUATOR_INVALID_CAST) - ).types(BOOL.sqlTextNames), + case("<<>>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<>", ErrorCode.EVALUATOR_INVALID_CAST) + ).types(ExprValueType.BOOL.sqlTextNames), listOf( // booleans case("TRUE AND FALSE", "0"), @@ -169,10 +167,10 @@ class EvaluatingCompilerCastTest : EvaluatorTestBase() { case("-20.1", "-20"), case("-20.9", "-20"), // timestamp - case("`2007-10-10T`", EVALUATOR_INVALID_CAST), + case("`2007-10-10T`", ErrorCode.EVALUATOR_INVALID_CAST), // text - case("'hello'", EVALUATOR_CAST_FAILED), - case("'1234A'", EVALUATOR_CAST_FAILED), // Invalid ION value + case("'hello'", ErrorCode.EVALUATOR_CAST_FAILED), + case("'1234A'", ErrorCode.EVALUATOR_CAST_FAILED), // Invalid ION value case("'20'", "20"), case("'020'", "20"), case("'+20'", "20"), @@ -200,37 +198,37 @@ class EvaluatingCompilerCastTest : EvaluatorTestBase() { case("'-0b10'", "-2"), case("'-0b010'", "-2"), case("""`"1000"`""", "1000"), - case("""`'2e100'`""", EVALUATOR_CAST_FAILED), - case("""`'2d100'`""", EVALUATOR_CAST_FAILED), - case("'00xA'", EVALUATOR_CAST_FAILED), - case("'00b10'", EVALUATOR_CAST_FAILED), + case("""`'2e100'`""", ErrorCode.EVALUATOR_CAST_FAILED), + case("""`'2d100'`""", ErrorCode.EVALUATOR_CAST_FAILED), + case("'00xA'", ErrorCode.EVALUATOR_CAST_FAILED), + case("'00b10'", ErrorCode.EVALUATOR_CAST_FAILED), // lob - case("""`{{""}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"0"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"1.0"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"2e10"}}`""", EVALUATOR_INVALID_CAST), - case("`{{}}`", EVALUATOR_INVALID_CAST), - case("`{{MA==}}`", EVALUATOR_INVALID_CAST), // 0 - case("`{{MS4w}}`", EVALUATOR_INVALID_CAST), // 1.0 - case("`{{MmUxMA==}}`", EVALUATOR_INVALID_CAST), // 2e10 + case("""`{{""}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"1.0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"2e10"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{}}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{MA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 0 + case("`{{MS4w}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 1.0 + case("`{{MmUxMA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 2e10 // list - case("`[]`", EVALUATOR_INVALID_CAST), - case("`[1]`", EVALUATOR_INVALID_CAST), - case("`[-2, 0]`", EVALUATOR_INVALID_CAST), + case("`[]`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`[1]`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`[-2, 0]`", ErrorCode.EVALUATOR_INVALID_CAST), // sexp - case("`()`", EVALUATOR_INVALID_CAST), - case("`(1)`", EVALUATOR_INVALID_CAST), - case("`(0)`", EVALUATOR_INVALID_CAST), + case("`()`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`(1)`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`(0)`", ErrorCode.EVALUATOR_INVALID_CAST), // struct - case("`{}`", EVALUATOR_INVALID_CAST), - case("{}", EVALUATOR_INVALID_CAST), - case("`{a:12}`", EVALUATOR_INVALID_CAST), - case("{'b':-4}", EVALUATOR_INVALID_CAST), + case("`{}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{}", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{a:12}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{'b':-4}", ErrorCode.EVALUATOR_INVALID_CAST), // bag - case("<<>>", EVALUATOR_INVALID_CAST), - case("<<14>>", EVALUATOR_INVALID_CAST), - case("<<20>>", EVALUATOR_INVALID_CAST) - ).types(INT.sqlTextNames), + case("<<>>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<<14>>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<<20>>", ErrorCode.EVALUATOR_INVALID_CAST) + ).types(ExprValueType.INT.sqlTextNames), listOf( // booleans case("TRUE AND FALSE", "0e0"), @@ -242,40 +240,40 @@ class EvaluatingCompilerCastTest : EvaluatorTestBase() { case("1.1", "1.1e0"), case("-20.1", "-20.1e0"), // timestamp - case("`2007-10-10T`", EVALUATOR_INVALID_CAST), + case("`2007-10-10T`", ErrorCode.EVALUATOR_INVALID_CAST), // text - case("'hello'", EVALUATOR_CAST_FAILED), + case("'hello'", ErrorCode.EVALUATOR_CAST_FAILED), case("'-20'", "-20e0"), case("""`"1000"`""", "1000e0"), case("""`'2e100'`""", "2e100"), - case("""`'2d100'`""", EVALUATOR_CAST_FAILED), + case("""`'2d100'`""", ErrorCode.EVALUATOR_CAST_FAILED), // lob - case("""`{{""}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"0"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"1.0"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"2e10"}}`""", EVALUATOR_INVALID_CAST), - case("`{{}}`", EVALUATOR_INVALID_CAST), - case("`{{MA==}}`", EVALUATOR_INVALID_CAST), // 0 - case("`{{MS4w}}`", EVALUATOR_INVALID_CAST), // 1.0 - case("`{{MmUxMA==}}`", EVALUATOR_INVALID_CAST), // 2e10 + case("""`{{""}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"1.0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"2e10"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{}}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{MA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 0 + case("`{{MS4w}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 1.0 + case("`{{MmUxMA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 2e10 // list - case("`[]`", EVALUATOR_INVALID_CAST), - case("`[1e0]`", EVALUATOR_INVALID_CAST), - case("`[-2e0, 0e0]`", EVALUATOR_INVALID_CAST), + case("`[]`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`[1e0]`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`[-2e0, 0e0]`", ErrorCode.EVALUATOR_INVALID_CAST), // sexp - case("`()`", EVALUATOR_INVALID_CAST), - case("`(1e0)`", EVALUATOR_INVALID_CAST), - case("`(0e0)`", EVALUATOR_INVALID_CAST), + case("`()`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`(1e0)`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`(0e0)`", ErrorCode.EVALUATOR_INVALID_CAST), // struct - case("`{}`", EVALUATOR_INVALID_CAST), - case("{}", EVALUATOR_INVALID_CAST), - case("`{a:12e0}`", EVALUATOR_INVALID_CAST), - case("{'b':`-4e0`}", EVALUATOR_INVALID_CAST), + case("`{}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{}", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{a:12e0}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{'b':`-4e0`}", ErrorCode.EVALUATOR_INVALID_CAST), // bag - case("<<>>", EVALUATOR_INVALID_CAST), - case("<<`14e0`>>", EVALUATOR_INVALID_CAST), - case("<<`20e0`>>", EVALUATOR_INVALID_CAST) - ).types(FLOAT.sqlTextNames), + case("<<>>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<<`14e0`>>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<<`20e0`>>", ErrorCode.EVALUATOR_INVALID_CAST) + ).types(ExprValueType.FLOAT.sqlTextNames), listOf( // booleans case("TRUE AND FALSE", "0d0"), @@ -288,49 +286,49 @@ class EvaluatingCompilerCastTest : EvaluatorTestBase() { case("1.1", "1.1d0"), case("-20.1", "-20.1d0"), // timestamp - case("`2007-10-10T`", EVALUATOR_INVALID_CAST), + case("`2007-10-10T`", ErrorCode.EVALUATOR_INVALID_CAST), // text - case("'hello'", EVALUATOR_CAST_FAILED), + case("'hello'", ErrorCode.EVALUATOR_CAST_FAILED), case("'-20'", "-20d0"), case("""`"1000"`""", "1000d0"), case("""`'2e100'`""", "2d100"), - case("""`'2d100'`""", EVALUATOR_CAST_FAILED), + case("""`'2d100'`""", ErrorCode.EVALUATOR_CAST_FAILED), // lob - case("""`{{""}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"0"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"1.0"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"2e10"}}`""", EVALUATOR_INVALID_CAST), - case("`{{}}`", EVALUATOR_INVALID_CAST), - case("`{{MA==}}`", EVALUATOR_INVALID_CAST), // 0 - case("`{{MS4w}}`", EVALUATOR_INVALID_CAST), // 1.0 - case("`{{MmUxMA==}}`", EVALUATOR_INVALID_CAST), // 2e10 + case("""`{{""}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"1.0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"2e10"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{}}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{MA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 0 + case("`{{MS4w}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 1.0 + case("`{{MmUxMA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 2e10 // list - case("`[]`", EVALUATOR_INVALID_CAST), - case("`[1d0]`", EVALUATOR_INVALID_CAST), - case("`[-2d0, 0d0]`", EVALUATOR_INVALID_CAST), + case("`[]`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`[1d0]`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`[-2d0, 0d0]`", ErrorCode.EVALUATOR_INVALID_CAST), // sexp - case("`()`", EVALUATOR_INVALID_CAST), - case("`(1d0)`", EVALUATOR_INVALID_CAST), - case("`(0d0)`", EVALUATOR_INVALID_CAST), + case("`()`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`(1d0)`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`(0d0)`", ErrorCode.EVALUATOR_INVALID_CAST), // struct - case("`{}`", EVALUATOR_INVALID_CAST), - case("{}", EVALUATOR_INVALID_CAST), - case("`{a:12d0}`", EVALUATOR_INVALID_CAST), - case("{'b':`-4d0`}", EVALUATOR_INVALID_CAST), + case("`{}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{}", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{a:12d0}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{'b':`-4d0`}", ErrorCode.EVALUATOR_INVALID_CAST), // bag - case("<<>>", EVALUATOR_INVALID_CAST), - case("<<`14d0`>>", EVALUATOR_INVALID_CAST), - case("<<`20d0`>>", EVALUATOR_INVALID_CAST) - ).types(DECIMAL.sqlTextNames), + case("<<>>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<<`14d0`>>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<<`20d0`>>", ErrorCode.EVALUATOR_INVALID_CAST) + ).types(ExprValueType.DECIMAL.sqlTextNames), listOf( // booleans - case("TRUE AND FALSE", EVALUATOR_INVALID_CAST), - case("`true`", EVALUATOR_INVALID_CAST), + case("TRUE AND FALSE", ErrorCode.EVALUATOR_INVALID_CAST), + case("`true`", ErrorCode.EVALUATOR_INVALID_CAST), // numbers - case("5", EVALUATOR_INVALID_CAST), - case("`0e0`", EVALUATOR_INVALID_CAST), - case("1.1", EVALUATOR_INVALID_CAST), - case("-20.1", EVALUATOR_INVALID_CAST), + case("5", ErrorCode.EVALUATOR_INVALID_CAST), + case("`0e0`", ErrorCode.EVALUATOR_INVALID_CAST), + case("1.1", ErrorCode.EVALUATOR_INVALID_CAST), + case("-20.1", ErrorCode.EVALUATOR_INVALID_CAST), // timestamp case("`2007-10-10T`", "\$partiql_date::2007-10-10"), case("`2007-02-23T12:14Z`", "\$partiql_date::2007-02-23"), @@ -339,35 +337,35 @@ class EvaluatingCompilerCastTest : EvaluatorTestBase() { case("`2007-02T`", "\$partiql_date::2007-02-01"), case("`2007T`", "\$partiql_date::2007-01-01"), // text - case("'hello'", EVALUATOR_CAST_FAILED), - case("'2016-03-01T01:12:12Z'", EVALUATOR_CAST_FAILED), + case("'hello'", ErrorCode.EVALUATOR_CAST_FAILED), + case("'2016-03-01T01:12:12Z'", ErrorCode.EVALUATOR_CAST_FAILED), case("""`"2001-01-01"`""", "\$partiql_date::2001-01-01"), - case("""`"+20212-02-01"`""", EVALUATOR_CAST_FAILED), - case("""`"20212-02-01"`""", EVALUATOR_CAST_FAILED), - case("""`'2000T'`""", EVALUATOR_CAST_FAILED), - case("""`'1999-04T'`""", EVALUATOR_CAST_FAILED), + case("""`"+20212-02-01"`""", ErrorCode.EVALUATOR_CAST_FAILED), + case("""`"20212-02-01"`""", ErrorCode.EVALUATOR_CAST_FAILED), + case("""`'2000T'`""", ErrorCode.EVALUATOR_CAST_FAILED), + case("""`'1999-04T'`""", ErrorCode.EVALUATOR_CAST_FAILED), // lob - case("""`{{""}}`""", EVALUATOR_INVALID_CAST), - case("`{{}}`", EVALUATOR_INVALID_CAST), + case("""`{{""}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{}}`", ErrorCode.EVALUATOR_INVALID_CAST), // list - case("`[]`", EVALUATOR_INVALID_CAST), + case("`[]`", ErrorCode.EVALUATOR_INVALID_CAST), // sexp - case("`()`", EVALUATOR_INVALID_CAST), + case("`()`", ErrorCode.EVALUATOR_INVALID_CAST), // struct - case("`{}`", EVALUATOR_INVALID_CAST), + case("`{}`", ErrorCode.EVALUATOR_INVALID_CAST), // bag - case("<<>>", EVALUATOR_INVALID_CAST) - ).types(DATE.sqlTextNames), + case("<<>>", ErrorCode.EVALUATOR_INVALID_CAST) + ).types(ExprValueType.DATE.sqlTextNames), // Find more coverage for the "Cast as Time" tests in `castDateAndTime`. listOf( // booleans - case("TRUE AND FALSE", EVALUATOR_INVALID_CAST), - case("`true`", EVALUATOR_INVALID_CAST), + case("TRUE AND FALSE", ErrorCode.EVALUATOR_INVALID_CAST), + case("`true`", ErrorCode.EVALUATOR_INVALID_CAST), // numbers - case("5", EVALUATOR_INVALID_CAST), - case("`0e0`", EVALUATOR_INVALID_CAST), - case("1.1", EVALUATOR_INVALID_CAST), - case("-20.1", EVALUATOR_INVALID_CAST), + case("5", ErrorCode.EVALUATOR_INVALID_CAST), + case("`0e0`", ErrorCode.EVALUATOR_INVALID_CAST), + case("1.1", ErrorCode.EVALUATOR_INVALID_CAST), + case("-20.1", ErrorCode.EVALUATOR_INVALID_CAST), // timestamp case("`2007-10-10T`", "\$partiql_time::{hour:0,minute:0,second:0.,timezone_hour:null.int,timezone_minute:null.int}"), case("`2007-02-23T12:14Z`", "\$partiql_time::{hour:12,minute:14,second:0.,timezone_hour:null.int,timezone_minute:null.int}"), @@ -376,54 +374,54 @@ class EvaluatingCompilerCastTest : EvaluatorTestBase() { case("`2007-02T`", "\$partiql_time::{hour:0,minute:0,second:0.,timezone_hour:null.int,timezone_minute:null.int}"), case("`2007T`", "\$partiql_time::{hour:0,minute:0,second:0.,timezone_hour:null.int,timezone_minute:null.int}"), // text - case("'hello'", EVALUATOR_CAST_FAILED), - case("'2016-03-01T01:12:12Z'", EVALUATOR_CAST_FAILED), - case("""`"23:2:12.12345"`""", EVALUATOR_CAST_FAILED), - case("""`"+20212-02-01"`""", EVALUATOR_CAST_FAILED), - case("""`"20212-02-01"`""", EVALUATOR_CAST_FAILED), - case("""`'2000T'`""", EVALUATOR_CAST_FAILED), - case("""`'1999-04T'`""", EVALUATOR_CAST_FAILED), + case("'hello'", ErrorCode.EVALUATOR_CAST_FAILED), + case("'2016-03-01T01:12:12Z'", ErrorCode.EVALUATOR_CAST_FAILED), + case("""`"23:2:12.12345"`""", ErrorCode.EVALUATOR_CAST_FAILED), + case("""`"+20212-02-01"`""", ErrorCode.EVALUATOR_CAST_FAILED), + case("""`"20212-02-01"`""", ErrorCode.EVALUATOR_CAST_FAILED), + case("""`'2000T'`""", ErrorCode.EVALUATOR_CAST_FAILED), + case("""`'1999-04T'`""", ErrorCode.EVALUATOR_CAST_FAILED), // lob - case("""`{{""}}`""", EVALUATOR_INVALID_CAST), - case("`{{}}`", EVALUATOR_INVALID_CAST), + case("""`{{""}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{}}`", ErrorCode.EVALUATOR_INVALID_CAST), // list - case("`[]`", EVALUATOR_INVALID_CAST), + case("`[]`", ErrorCode.EVALUATOR_INVALID_CAST), // sexp - case("`()`", EVALUATOR_INVALID_CAST), + case("`()`", ErrorCode.EVALUATOR_INVALID_CAST), // struct - case("`{}`", EVALUATOR_INVALID_CAST), + case("`{}`", ErrorCode.EVALUATOR_INVALID_CAST), // bag - case("<<>>", EVALUATOR_INVALID_CAST) - ).types(TIME.sqlTextNames), + case("<<>>", ErrorCode.EVALUATOR_INVALID_CAST) + ).types(ExprValueType.TIME.sqlTextNames), listOf( // booleans - case("TRUE AND FALSE", EVALUATOR_INVALID_CAST), - case("`true`", EVALUATOR_INVALID_CAST), + case("TRUE AND FALSE", ErrorCode.EVALUATOR_INVALID_CAST), + case("`true`", ErrorCode.EVALUATOR_INVALID_CAST), // numbers - case("5", EVALUATOR_INVALID_CAST), - case("`0e0`", EVALUATOR_INVALID_CAST), - case("1.1", EVALUATOR_INVALID_CAST), - case("-20.1", EVALUATOR_INVALID_CAST), + case("5", ErrorCode.EVALUATOR_INVALID_CAST), + case("`0e0`", ErrorCode.EVALUATOR_INVALID_CAST), + case("1.1", ErrorCode.EVALUATOR_INVALID_CAST), + case("-20.1", ErrorCode.EVALUATOR_INVALID_CAST), // timestamp case("`2007-10-10T`", "2007-10-10T"), // text - case("'hello'", EVALUATOR_CAST_FAILED), + case("'hello'", ErrorCode.EVALUATOR_CAST_FAILED), case("'2016-03-01T01:12:12Z'", "2016-03-01T01:12:12Z"), case("""`"2001-01-01"`""", "2001-01-01T"), case("""`'2000T'`""", "2000T"), case("""`'1999-04T'`""", "1999-04T"), // lob - case("""`{{""}}`""", EVALUATOR_INVALID_CAST), - case("`{{}}`", EVALUATOR_INVALID_CAST), + case("""`{{""}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{}}`", ErrorCode.EVALUATOR_INVALID_CAST), // list - case("`[]`", EVALUATOR_INVALID_CAST), + case("`[]`", ErrorCode.EVALUATOR_INVALID_CAST), // sexp - case("`()`", EVALUATOR_INVALID_CAST), + case("`()`", ErrorCode.EVALUATOR_INVALID_CAST), // struct - case("`{}`", EVALUATOR_INVALID_CAST), + case("`{}`", ErrorCode.EVALUATOR_INVALID_CAST), // bag - case("<<>>", EVALUATOR_INVALID_CAST) - ).types(TIMESTAMP.sqlTextNames), + case("<<>>", ErrorCode.EVALUATOR_INVALID_CAST) + ).types(ExprValueType.TIMESTAMP.sqlTextNames), listOf( // booleans case("TRUE AND FALSE", "'false'"), @@ -442,32 +440,32 @@ class EvaluatingCompilerCastTest : EvaluatorTestBase() { case("""`'2e100'`""", "'2e100'"), case("""`'2d100'`""", "'2d100'"), // lob - case("""`{{""}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"0"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"1.0"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"2e10"}}`""", EVALUATOR_INVALID_CAST), - case("`{{}}`", EVALUATOR_INVALID_CAST), - case("`{{MA==}}`", EVALUATOR_INVALID_CAST), // 0 - case("`{{MS4w}}`", EVALUATOR_INVALID_CAST), // 1.0 - case("`{{MmUxMA==}}`", EVALUATOR_INVALID_CAST), // 2e10 + case("""`{{""}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"1.0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"2e10"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{}}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{MA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 0 + case("`{{MS4w}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 1.0 + case("`{{MmUxMA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 2e10 // list - case("`[]`", EVALUATOR_INVALID_CAST), - case("['hello']", EVALUATOR_INVALID_CAST), - case("`[-2d0, 0d0]`", EVALUATOR_INVALID_CAST), + case("`[]`", ErrorCode.EVALUATOR_INVALID_CAST), + case("['hello']", ErrorCode.EVALUATOR_INVALID_CAST), + case("`[-2d0, 0d0]`", ErrorCode.EVALUATOR_INVALID_CAST), // sexp - case("`()`", EVALUATOR_INVALID_CAST), - case("`(1d0)`", EVALUATOR_INVALID_CAST), - case("`(0d0)`", EVALUATOR_INVALID_CAST), + case("`()`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`(1d0)`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`(0d0)`", ErrorCode.EVALUATOR_INVALID_CAST), // struct - case("`{}`", EVALUATOR_INVALID_CAST), - case("{}", EVALUATOR_INVALID_CAST), - case("`{a:12d0}`", EVALUATOR_INVALID_CAST), - case("{'b':`-4d0`}", EVALUATOR_INVALID_CAST), + case("`{}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{}", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{a:12d0}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{'b':`-4d0`}", ErrorCode.EVALUATOR_INVALID_CAST), // bag - case("<<>>", EVALUATOR_INVALID_CAST), - case("<<`14d0`>>", EVALUATOR_INVALID_CAST), - case("<<`20d0`>>", EVALUATOR_INVALID_CAST) - ).types(SYMBOL.sqlTextNames), + case("<<>>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<<`14d0`>>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<<`20d0`>>", ErrorCode.EVALUATOR_INVALID_CAST) + ).types(ExprValueType.SYMBOL.sqlTextNames), listOf( // booleans case("TRUE AND FALSE", "\"false\""), @@ -486,50 +484,50 @@ class EvaluatingCompilerCastTest : EvaluatorTestBase() { case("""`'2e100'`""", "\"2e100\""), case("""`'2d100'`""", "\"2d100\""), // lob - case("""`{{""}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"0"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"1.0"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"2e10"}}`""", EVALUATOR_INVALID_CAST), - case("`{{}}`", EVALUATOR_INVALID_CAST), - case("`{{MA==}}`", EVALUATOR_INVALID_CAST), // 0 - case("`{{MS4w}}`", EVALUATOR_INVALID_CAST), // 1.0 - case("`{{MmUxMA==}}`", EVALUATOR_INVALID_CAST), // 2e10 + case("""`{{""}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"1.0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"2e10"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{}}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{MA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 0 + case("`{{MS4w}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 1.0 + case("`{{MmUxMA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 2e10 // list - case("`[]`", EVALUATOR_INVALID_CAST), - case("['hello']", EVALUATOR_INVALID_CAST), - case("`[-2d0, 0d0]`", EVALUATOR_INVALID_CAST), + case("`[]`", ErrorCode.EVALUATOR_INVALID_CAST), + case("['hello']", ErrorCode.EVALUATOR_INVALID_CAST), + case("`[-2d0, 0d0]`", ErrorCode.EVALUATOR_INVALID_CAST), // sexp - case("`()`", EVALUATOR_INVALID_CAST), - case("`(1d0)`", EVALUATOR_INVALID_CAST), - case("`(0d0)`", EVALUATOR_INVALID_CAST), + case("`()`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`(1d0)`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`(0d0)`", ErrorCode.EVALUATOR_INVALID_CAST), // struct - case("`{}`", EVALUATOR_INVALID_CAST), - case("{}", EVALUATOR_INVALID_CAST), - case("`{a:12d0}`", EVALUATOR_INVALID_CAST), - case("{'b':`-4d0`}", EVALUATOR_INVALID_CAST), + case("`{}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{}", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{a:12d0}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{'b':`-4d0`}", ErrorCode.EVALUATOR_INVALID_CAST), // bag - case("<<>>", EVALUATOR_INVALID_CAST), - case("<<`14d0`>>", EVALUATOR_INVALID_CAST), - case("<<'a', <<'hello'>>>>", EVALUATOR_INVALID_CAST), - case("<<`20d0`>>", EVALUATOR_INVALID_CAST) - ).types(STRING.sqlTextNames), + case("<<>>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<<`14d0`>>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<<'a', <<'hello'>>>>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<<`20d0`>>", ErrorCode.EVALUATOR_INVALID_CAST) + ).types(ExprValueType.STRING.sqlTextNames), listOf( // booleans - case("TRUE AND FALSE", EVALUATOR_INVALID_CAST), - case("`true`", EVALUATOR_INVALID_CAST), + case("TRUE AND FALSE", ErrorCode.EVALUATOR_INVALID_CAST), + case("`true`", ErrorCode.EVALUATOR_INVALID_CAST), // numbers - case("5", EVALUATOR_INVALID_CAST), - case("`0e0`", EVALUATOR_INVALID_CAST), - case("1.1", EVALUATOR_INVALID_CAST), - case("-20.1", EVALUATOR_INVALID_CAST), + case("5", ErrorCode.EVALUATOR_INVALID_CAST), + case("`0e0`", ErrorCode.EVALUATOR_INVALID_CAST), + case("1.1", ErrorCode.EVALUATOR_INVALID_CAST), + case("-20.1", ErrorCode.EVALUATOR_INVALID_CAST), // timestamp - case("`2007-10-10T`", EVALUATOR_INVALID_CAST), + case("`2007-10-10T`", ErrorCode.EVALUATOR_INVALID_CAST), // text - case("'hello'", EVALUATOR_INVALID_CAST), - case("'-20'", EVALUATOR_INVALID_CAST), - case("""`"1000"`""", EVALUATOR_INVALID_CAST), - case("""`'2e100'`""", EVALUATOR_INVALID_CAST), - case("""`'2d100'`""", EVALUATOR_INVALID_CAST), + case("'hello'", ErrorCode.EVALUATOR_INVALID_CAST), + case("'-20'", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`"1000"`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`'2e100'`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`'2d100'`""", ErrorCode.EVALUATOR_INVALID_CAST), // lob case("""`{{""}}`""", """{{""}}"""), case("""`{{"0"}}`""", """{{"0"}}"""), @@ -540,40 +538,40 @@ class EvaluatingCompilerCastTest : EvaluatorTestBase() { case("`{{MS4w}}`", """{{"1.0"}}"""), case("`{{MmUxMA==}}`", """{{"2e10"}}"""), // list - case("`[]`", EVALUATOR_INVALID_CAST), - case("['hello']", EVALUATOR_INVALID_CAST), - case("`[-2d0, 0d0]`", EVALUATOR_INVALID_CAST), + case("`[]`", ErrorCode.EVALUATOR_INVALID_CAST), + case("['hello']", ErrorCode.EVALUATOR_INVALID_CAST), + case("`[-2d0, 0d0]`", ErrorCode.EVALUATOR_INVALID_CAST), // sexp - case("`()`", EVALUATOR_INVALID_CAST), - case("`(1d0)`", EVALUATOR_INVALID_CAST), - case("`(0d0)`", EVALUATOR_INVALID_CAST), + case("`()`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`(1d0)`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`(0d0)`", ErrorCode.EVALUATOR_INVALID_CAST), // struct - case("`{}`", EVALUATOR_INVALID_CAST), - case("{}", EVALUATOR_INVALID_CAST), - case("`{a:12d0}`", EVALUATOR_INVALID_CAST), - case("{'b':`-4d0`}", EVALUATOR_INVALID_CAST), + case("`{}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{}", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{a:12d0}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{'b':`-4d0`}", ErrorCode.EVALUATOR_INVALID_CAST), // bag - case("<<>>", EVALUATOR_INVALID_CAST), - case("<<`14d0`>>", EVALUATOR_INVALID_CAST), - case("<<`20d0`>>", EVALUATOR_INVALID_CAST) - ).types(CLOB.sqlTextNames), + case("<<>>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<<`14d0`>>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<<`20d0`>>", ErrorCode.EVALUATOR_INVALID_CAST) + ).types(ExprValueType.CLOB.sqlTextNames), listOf( // booleans - case("TRUE AND FALSE", EVALUATOR_INVALID_CAST), - case("`true`", EVALUATOR_INVALID_CAST), + case("TRUE AND FALSE", ErrorCode.EVALUATOR_INVALID_CAST), + case("`true`", ErrorCode.EVALUATOR_INVALID_CAST), // numbers - case("5", EVALUATOR_INVALID_CAST), - case("`0e0`", EVALUATOR_INVALID_CAST), - case("1.1", EVALUATOR_INVALID_CAST), - case("-20.1", EVALUATOR_INVALID_CAST), + case("5", ErrorCode.EVALUATOR_INVALID_CAST), + case("`0e0`", ErrorCode.EVALUATOR_INVALID_CAST), + case("1.1", ErrorCode.EVALUATOR_INVALID_CAST), + case("-20.1", ErrorCode.EVALUATOR_INVALID_CAST), // timestamp - case("`2007-10-10T`", EVALUATOR_INVALID_CAST), + case("`2007-10-10T`", ErrorCode.EVALUATOR_INVALID_CAST), // text - case("'hello'", EVALUATOR_INVALID_CAST), - case("'-20'", EVALUATOR_INVALID_CAST), - case("""`"1000"`""", EVALUATOR_INVALID_CAST), - case("""`'2e100'`""", EVALUATOR_INVALID_CAST), - case("""`'2d100'`""", EVALUATOR_INVALID_CAST), + case("'hello'", ErrorCode.EVALUATOR_INVALID_CAST), + case("'-20'", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`"1000"`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`'2e100'`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`'2d100'`""", ErrorCode.EVALUATOR_INVALID_CAST), // lob case("""`{{""}}`""", """{{}}"""), case("""`{{"0"}}`""", """{{MA==}}"""), @@ -584,49 +582,49 @@ class EvaluatingCompilerCastTest : EvaluatorTestBase() { case("`{{MS4w}}`", """{{MS4w}}"""), // 1.0 case("`{{MmUxMA==}}`", """{{MmUxMA==}}"""), // 2e10 // list - case("`[]`", EVALUATOR_INVALID_CAST), - case("['hello']", EVALUATOR_INVALID_CAST), - case("`[-2d0, 0d0]`", EVALUATOR_INVALID_CAST), + case("`[]`", ErrorCode.EVALUATOR_INVALID_CAST), + case("['hello']", ErrorCode.EVALUATOR_INVALID_CAST), + case("`[-2d0, 0d0]`", ErrorCode.EVALUATOR_INVALID_CAST), // sexp - case("`()`", EVALUATOR_INVALID_CAST), - case("`(1d0)`", EVALUATOR_INVALID_CAST), - case("`(0d0)`", EVALUATOR_INVALID_CAST), + case("`()`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`(1d0)`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`(0d0)`", ErrorCode.EVALUATOR_INVALID_CAST), // struct - case("`{}`", EVALUATOR_INVALID_CAST), - case("{}", EVALUATOR_INVALID_CAST), - case("`{a:12d0}`", EVALUATOR_INVALID_CAST), - case("{'b':`-4d0`}", EVALUATOR_INVALID_CAST), + case("`{}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{}", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{a:12d0}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{'b':`-4d0`}", ErrorCode.EVALUATOR_INVALID_CAST), // bag - case("<<>>", EVALUATOR_INVALID_CAST), - case("<<`14d0`>>", EVALUATOR_INVALID_CAST), - case("<<`20d0`>>", EVALUATOR_INVALID_CAST) - ).types(BLOB.sqlTextNames), + case("<<>>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<<`14d0`>>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<<`20d0`>>", ErrorCode.EVALUATOR_INVALID_CAST) + ).types(ExprValueType.BLOB.sqlTextNames), listOf( // booleans - case("TRUE AND FALSE", EVALUATOR_INVALID_CAST), - case("`true`", EVALUATOR_INVALID_CAST), + case("TRUE AND FALSE", ErrorCode.EVALUATOR_INVALID_CAST), + case("`true`", ErrorCode.EVALUATOR_INVALID_CAST), // numbers - case("5", EVALUATOR_INVALID_CAST), - case("`0e0`", EVALUATOR_INVALID_CAST), - case("1.1", EVALUATOR_INVALID_CAST), - case("-20.1", EVALUATOR_INVALID_CAST), + case("5", ErrorCode.EVALUATOR_INVALID_CAST), + case("`0e0`", ErrorCode.EVALUATOR_INVALID_CAST), + case("1.1", ErrorCode.EVALUATOR_INVALID_CAST), + case("-20.1", ErrorCode.EVALUATOR_INVALID_CAST), // timestamp - case("`2007-10-10T`", EVALUATOR_INVALID_CAST), + case("`2007-10-10T`", ErrorCode.EVALUATOR_INVALID_CAST), // text - case("'hello'", EVALUATOR_INVALID_CAST), - case("'-20'", EVALUATOR_INVALID_CAST), - case("""`"1000"`""", EVALUATOR_INVALID_CAST), - case("""`'2e100'`""", EVALUATOR_INVALID_CAST), - case("""`'2d100'`""", EVALUATOR_INVALID_CAST), + case("'hello'", ErrorCode.EVALUATOR_INVALID_CAST), + case("'-20'", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`"1000"`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`'2e100'`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`'2d100'`""", ErrorCode.EVALUATOR_INVALID_CAST), // lob - case("""`{{""}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"0"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"1.0"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"2e10"}}`""", EVALUATOR_INVALID_CAST), - case("`{{}}`", EVALUATOR_INVALID_CAST), - case("`{{MA==}}`", EVALUATOR_INVALID_CAST), // 0 - case("`{{MS4w}}`", EVALUATOR_INVALID_CAST), // 1.0 - case("`{{MmUxMA==}}`", EVALUATOR_INVALID_CAST), // 2e10 + case("""`{{""}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"1.0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"2e10"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{}}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{MA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 0 + case("`{{MS4w}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 1.0 + case("`{{MmUxMA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 2e10 // list case("`[]`", "[]"), case("['hello']", "[\"hello\"]"), @@ -636,41 +634,41 @@ class EvaluatingCompilerCastTest : EvaluatorTestBase() { case("`(1d0)`", "[1d0]"), case("`(0d0)`", "[0d0]"), // struct - case("`{}`", EVALUATOR_INVALID_CAST), - case("{}", EVALUATOR_INVALID_CAST), - case("`{a:12d0}`", EVALUATOR_INVALID_CAST), - case("{'b':`-4d0`}", EVALUATOR_INVALID_CAST), + case("`{}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{}", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{a:12d0}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{'b':`-4d0`}", ErrorCode.EVALUATOR_INVALID_CAST), // bag case("<<>>", "[]"), // TODO bag verification case("<<`14d0`>>", "[14d0]"), // TODO bag verification case("<<`20d0`>>", "[20d0]") // TODO bag verification - ).types(LIST.sqlTextNames), + ).types(ExprValueType.LIST.sqlTextNames), listOf( // booleans - case("TRUE AND FALSE", EVALUATOR_INVALID_CAST), - case("`true`", EVALUATOR_INVALID_CAST), + case("TRUE AND FALSE", ErrorCode.EVALUATOR_INVALID_CAST), + case("`true`", ErrorCode.EVALUATOR_INVALID_CAST), // numbers - case("5", EVALUATOR_INVALID_CAST), - case("`0e0`", EVALUATOR_INVALID_CAST), - case("1.1", EVALUATOR_INVALID_CAST), - case("-20.1", EVALUATOR_INVALID_CAST), + case("5", ErrorCode.EVALUATOR_INVALID_CAST), + case("`0e0`", ErrorCode.EVALUATOR_INVALID_CAST), + case("1.1", ErrorCode.EVALUATOR_INVALID_CAST), + case("-20.1", ErrorCode.EVALUATOR_INVALID_CAST), // timestamp - case("`2007-10-10T`", EVALUATOR_INVALID_CAST), + case("`2007-10-10T`", ErrorCode.EVALUATOR_INVALID_CAST), // text - case("'hello'", EVALUATOR_INVALID_CAST), - case("'-20'", EVALUATOR_INVALID_CAST), - case("""`"1000"`""", EVALUATOR_INVALID_CAST), - case("""`'2e100'`""", EVALUATOR_INVALID_CAST), - case("""`'2d100'`""", EVALUATOR_INVALID_CAST), + case("'hello'", ErrorCode.EVALUATOR_INVALID_CAST), + case("'-20'", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`"1000"`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`'2e100'`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`'2d100'`""", ErrorCode.EVALUATOR_INVALID_CAST), // lob - case("""`{{""}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"0"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"1.0"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"2e10"}}`""", EVALUATOR_INVALID_CAST), - case("`{{}}`", EVALUATOR_INVALID_CAST), - case("`{{MA==}}`", EVALUATOR_INVALID_CAST), // 0 - case("`{{MS4w}}`", EVALUATOR_INVALID_CAST), // 1.0 - case("`{{MmUxMA==}}`", EVALUATOR_INVALID_CAST), // 2e10 + case("""`{{""}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"1.0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"2e10"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{}}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{MA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 0 + case("`{{MS4w}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 1.0 + case("`{{MmUxMA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 2e10 // list case("`[]`", "()"), case("['hello']", "(\"hello\")"), @@ -680,85 +678,85 @@ class EvaluatingCompilerCastTest : EvaluatorTestBase() { case("`(1d0)`", "(1d0)"), case("`(0d0)`", "(0d0)"), // struct - case("`{}`", EVALUATOR_INVALID_CAST), - case("{}", EVALUATOR_INVALID_CAST), - case("`{a:12d0}`", EVALUATOR_INVALID_CAST), - case("{'b':`-4d0`}", EVALUATOR_INVALID_CAST), + case("`{}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{}", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{a:12d0}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{'b':`-4d0`}", ErrorCode.EVALUATOR_INVALID_CAST), // bag case("<<>>", "()"), case("<<`14d0`>>", "(14d0)"), case("<<`20d0`>>", "(20d0)") - ).types(SEXP.sqlTextNames), + ).types(ExprValueType.SEXP.sqlTextNames), listOf( // booleans - case("TRUE AND FALSE", EVALUATOR_INVALID_CAST), - case("`true`", EVALUATOR_INVALID_CAST), + case("TRUE AND FALSE", ErrorCode.EVALUATOR_INVALID_CAST), + case("`true`", ErrorCode.EVALUATOR_INVALID_CAST), // numbers - case("5", EVALUATOR_INVALID_CAST), - case("`0e0`", EVALUATOR_INVALID_CAST), - case("1.1", EVALUATOR_INVALID_CAST), - case("-20.1", EVALUATOR_INVALID_CAST), + case("5", ErrorCode.EVALUATOR_INVALID_CAST), + case("`0e0`", ErrorCode.EVALUATOR_INVALID_CAST), + case("1.1", ErrorCode.EVALUATOR_INVALID_CAST), + case("-20.1", ErrorCode.EVALUATOR_INVALID_CAST), // timestamp - case("`2007-10-10T`", EVALUATOR_INVALID_CAST), + case("`2007-10-10T`", ErrorCode.EVALUATOR_INVALID_CAST), // text - case("'hello'", EVALUATOR_INVALID_CAST), - case("'-20'", EVALUATOR_INVALID_CAST), - case("""`"1000"`""", EVALUATOR_INVALID_CAST), - case("""`'2e100'`""", EVALUATOR_INVALID_CAST), - case("""`'2d100'`""", EVALUATOR_INVALID_CAST), + case("'hello'", ErrorCode.EVALUATOR_INVALID_CAST), + case("'-20'", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`"1000"`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`'2e100'`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`'2d100'`""", ErrorCode.EVALUATOR_INVALID_CAST), // lob - case("""`{{""}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"0"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"1.0"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"2e10"}}`""", EVALUATOR_INVALID_CAST), - case("`{{}}`", EVALUATOR_INVALID_CAST), - case("`{{MA==}}`", EVALUATOR_INVALID_CAST), // 0 - case("`{{MS4w}}`", EVALUATOR_INVALID_CAST), // 1.0 - case("`{{MmUxMA==}}`", EVALUATOR_INVALID_CAST), // 2e10 + case("""`{{""}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"1.0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"2e10"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{}}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{MA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 0 + case("`{{MS4w}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 1.0 + case("`{{MmUxMA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 2e10 // list - case("`[]`", EVALUATOR_INVALID_CAST), - case("['hello']", EVALUATOR_INVALID_CAST), - case("`[-2d0, 0d0]`", EVALUATOR_INVALID_CAST), + case("`[]`", ErrorCode.EVALUATOR_INVALID_CAST), + case("['hello']", ErrorCode.EVALUATOR_INVALID_CAST), + case("`[-2d0, 0d0]`", ErrorCode.EVALUATOR_INVALID_CAST), // sexp - case("`()`", EVALUATOR_INVALID_CAST), - case("`(1d0)`", EVALUATOR_INVALID_CAST), - case("`(0d0)`", EVALUATOR_INVALID_CAST), + case("`()`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`(1d0)`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`(0d0)`", ErrorCode.EVALUATOR_INVALID_CAST), // struct case("`{}`", "{}"), case("{}", "{}"), case("`{a:12d0}`", "{a:12d0}"), case("{'b':`-4d0`}", "{b:-4d0}"), // bag - case("<<>>", EVALUATOR_INVALID_CAST), - case("<<`14d0`>>", EVALUATOR_INVALID_CAST), - case("<<`20d0`>>", EVALUATOR_INVALID_CAST) - ).types(STRUCT.sqlTextNames), + case("<<>>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<<`14d0`>>", ErrorCode.EVALUATOR_INVALID_CAST), + case("<<`20d0`>>", ErrorCode.EVALUATOR_INVALID_CAST) + ).types(ExprValueType.STRUCT.sqlTextNames), listOf( // booleans - case("TRUE AND FALSE", EVALUATOR_INVALID_CAST), - case("`true`", EVALUATOR_INVALID_CAST), + case("TRUE AND FALSE", ErrorCode.EVALUATOR_INVALID_CAST), + case("`true`", ErrorCode.EVALUATOR_INVALID_CAST), // numbers - case("5", EVALUATOR_INVALID_CAST), - case("`0e0`", EVALUATOR_INVALID_CAST), - case("1.1", EVALUATOR_INVALID_CAST), - case("-20.1", EVALUATOR_INVALID_CAST), + case("5", ErrorCode.EVALUATOR_INVALID_CAST), + case("`0e0`", ErrorCode.EVALUATOR_INVALID_CAST), + case("1.1", ErrorCode.EVALUATOR_INVALID_CAST), + case("-20.1", ErrorCode.EVALUATOR_INVALID_CAST), // timestamp - case("`2007-10-10T`", EVALUATOR_INVALID_CAST), + case("`2007-10-10T`", ErrorCode.EVALUATOR_INVALID_CAST), // text - case("'hello'", EVALUATOR_INVALID_CAST), - case("'-20'", EVALUATOR_INVALID_CAST), - case("""`"1000"`""", EVALUATOR_INVALID_CAST), - case("""`'2e100'`""", EVALUATOR_INVALID_CAST), - case("""`'2d100'`""", EVALUATOR_INVALID_CAST), + case("'hello'", ErrorCode.EVALUATOR_INVALID_CAST), + case("'-20'", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`"1000"`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`'2e100'`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`'2d100'`""", ErrorCode.EVALUATOR_INVALID_CAST), // lob - case("""`{{""}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"0"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"1.0"}}`""", EVALUATOR_INVALID_CAST), - case("""`{{"2e10"}}`""", EVALUATOR_INVALID_CAST), - case("`{{}}`", EVALUATOR_INVALID_CAST), - case("`{{MA==}}`", EVALUATOR_INVALID_CAST), // 0 - case("`{{MS4w}}`", EVALUATOR_INVALID_CAST), // 1.0 - case("`{{MmUxMA==}}`", EVALUATOR_INVALID_CAST), // 2e10 + case("""`{{""}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"1.0"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("""`{{"2e10"}}`""", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{}}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{{MA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 0 + case("`{{MS4w}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 1.0 + case("`{{MmUxMA==}}`", ErrorCode.EVALUATOR_INVALID_CAST), // 2e10 // list case("`[]`", "[]"), // TODO bag verification case("['hello']", "[\"hello\"]"), // TODO bag verification @@ -768,15 +766,15 @@ class EvaluatingCompilerCastTest : EvaluatorTestBase() { case("`(1d0)`", "[1d0]"), // TODO bag verification case("`(0d0)`", "[0d0]"), // TODO bag verification // struct - case("`{}`", EVALUATOR_INVALID_CAST), - case("{}", EVALUATOR_INVALID_CAST), - case("`{a:12d0}`", EVALUATOR_INVALID_CAST), - case("{'b':`-4d0`}", EVALUATOR_INVALID_CAST), + case("`{}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{}", ErrorCode.EVALUATOR_INVALID_CAST), + case("`{a:12d0}`", ErrorCode.EVALUATOR_INVALID_CAST), + case("{'b':`-4d0`}", ErrorCode.EVALUATOR_INVALID_CAST), // bag case("<<>>", "[]"), // TODO bag verification case("<<`14d0`>>", "[14d0]"), // TODO bag verification case("<<`20d0`>>", "[20d0]") // TODO bag verification - ).types(BAG.sqlTextNames) + ).types(ExprValueType.BAG.sqlTextNames) ).flatMap { it } @Test @@ -804,13 +802,13 @@ class EvaluatingCompilerCastTest : EvaluatorTestBase() { fun parametersForCastDateAndTime() = listOf( listOf( case("DATE '2007-10-10'", "2007-10-10") - ).types(DATE.sqlTextNames), + ).types(ExprValueType.DATE.sqlTextNames), listOf( case("DATE '2007-10-10'", "`'2007-10-10'`") - ).types(SYMBOL.sqlTextNames), + ).types(ExprValueType.SYMBOL.sqlTextNames), listOf( case("DATE '2007-10-10'", "'2007-10-10'") - ).types(STRING.sqlTextNames), + ).types(ExprValueType.STRING.sqlTextNames), listOf( // CAST(