Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix underspecified insert tests #961

Merged
merged 6 commits into from
Apr 21, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 18 additions & 1 deletion enginetest/enginetests.go
Original file line number Diff line number Diff line change
Expand Up @@ -6564,7 +6564,21 @@ func runQueryPreparedWithCtx(
require := require.New(t)
parsed, err := parse.Parse(ctx, q)

if _, ok := parsed.(sql.Databaser); ok {
_, isInsert := parsed.(*plan.InsertInto)
_, isDatabaser := parsed.(sql.Databaser)

// *ast.MultiAlterDDL parses arbitrary nodes in a *plan.Block
if bl, ok := parsed.(*plan.Block); ok {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

When is an insert ever inside a block node? What's going on here?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this one also confuses me, daylon added Block last week for foreign keys

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

looks like *sqlparser.MultiAlterDDL is now a thing, Block wraps arbitrarily nested statements?

for _, n := range bl.Children() {
if _, ok := n.(*plan.InsertInto); ok {
isInsert = true
} else if _, ok := n.(sql.Databaser); ok {
isDatabaser = true
}

}
}
if isDatabaser && !isInsert {
max-hoffman marked this conversation as resolved.
Show resolved Hide resolved
// DDL statements don't support prepared statements
sch, iter, err := e.QueryNodeWithBindings(ctx, q, nil, nil)
require.NoError(err, "Unexpected error for query %s", q)
Expand Down Expand Up @@ -6612,6 +6626,9 @@ func runQueryPreparedWithCtx(
}

prepared, err := e.Analyzer.PrepareQuery(ctx, bound, nil)
if err != nil {
return nil, nil, err
}
e.PreparedData[ctx.Session.ID()] = sqle.PreparedData{
Query: q,
Node: prepared,
Expand Down
20 changes: 20 additions & 0 deletions enginetest/insert_queries.go
Original file line number Diff line number Diff line change
Expand Up @@ -602,6 +602,26 @@ var InsertQueries = []WriteQueryTest{
{4, 44},
},
},
{
WriteQuery: `INSERT INTO keyless (c0, c1) SELECT * from keyless where c0=0 and c1=0`,
ExpectedWriteResult: []sql.Row{{sql.NewOkResult(1)}},
SelectQuery: `SELECT * from keyless where c0=0`,
ExpectedSelect: []sql.Row{
{0, 0},
{0, 0},
},
},
{
WriteQuery: `insert into keyless (c0, c1) select a.c0, a.c1 from (select 1, 1) as a(c0, c1) join keyless on a.c0 = keyless.c0`,
ExpectedWriteResult: []sql.Row{{sql.NewOkResult(2)}},
SelectQuery: `SELECT * from keyless where c0=1`,
ExpectedSelect: []sql.Row{
{1, 1},
{1, 1},
{1, 1},
{1, 1},
},
},
}

var SpatialInsertQueries = []WriteQueryTest{
Expand Down
25 changes: 6 additions & 19 deletions enginetest/memory_engine_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -158,14 +158,9 @@ func TestSingleQuery(t *testing.T) {

var test enginetest.QueryTest
test = enginetest.QueryTest{
Query: `SELECT ST_SRID(g, 0) from geometry_table order by i`,
Query: `SELECT 1, 2`,
Expected: []sql.Row{
{sql.Point{X: 1, Y: 2}},
{sql.Linestring{Points: []sql.Point{{X: 1, Y: 2}, {X: 3, Y: 4}}}},
{sql.Polygon{Lines: []sql.Linestring{{Points: []sql.Point{{X: 0, Y: 0}, {X: 0, Y: 1}, {X: 1, Y: 1}, {X: 0, Y: 0}}}}}},
{sql.Point{X: 1, Y: 2}},
{sql.Linestring{Points: []sql.Point{{X: 1, Y: 2}, {X: 3, Y: 4}}}},
{sql.Polygon{Lines: []sql.Linestring{{Points: []sql.Point{{X: 0, Y: 0}, {X: 0, Y: 1}, {X: 1, Y: 1}, {X: 0, Y: 0}}}}}},
{1, 2},
},
}

Expand Down Expand Up @@ -213,18 +208,10 @@ func TestSingleScript(t *testing.T) {

var scripts = []enginetest.ScriptTest{
{
Name: "information_schema.key_column_usage works with composite foreign keys",
SetUpScript: []string{
"CREATE TABLE ptable (pk int primary key, test_score int, height int)",
"CREATE INDEX myindex on ptable(test_score, height)",
"CREATE TABLE ptable2 (pk int primary key, test_score2 int, height2 int, CONSTRAINT fkr FOREIGN KEY (test_score2, height2) REFERENCES ptable(test_score,height));",
},
Query: "SELECT * FROM information_schema.key_column_usage where table_name='ptable2' ORDER BY constraint_name",
Expected: []sql.Row{
{"def", "mydb", "PRIMARY", "def", "mydb", "ptable2", "pk", 1, nil, nil, nil, nil},
{"def", "mydb", "fkr", "def", "mydb", "ptable2", "test_score2", 1, 1, "mydb", "ptable", "test_score"},
{"def", "mydb", "fkr", "def", "mydb", "ptable2", "height2", 2, 2, "mydb", "ptable", "height"},
},
Name: "information_schema.key_column_usage works with composite foreign keys",
SetUpScript: []string{},
Query: "INSERT INTO mytable (i,s) SELECT i * 2, concat(s,s) from mytable order by 1 desc limit 1",
Expected: []sql.Row{},
},
}

Expand Down
2 changes: 1 addition & 1 deletion sql/analyzer/aliases.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ type TableAliases map[string]sql.Nameable
// returns an error.
func (ta TableAliases) add(alias sql.Nameable, target sql.Nameable) error {
lowerName := strings.ToLower(alias.Name())
if _, ok := ta[lowerName]; ok && lowerName != dualTableName {
if _, ok := ta[lowerName]; ok && lowerName != sql.DualTableName {
return sql.ErrDuplicateAliasOrTable.New(alias.Name())
}

Expand Down
38 changes: 36 additions & 2 deletions sql/analyzer/analyzer.go
Original file line number Diff line number Diff line change
Expand Up @@ -357,7 +357,8 @@ func DefaultRuleSelector(id RuleId) bool {
switch id {
// prepared statement rules are incompatible with default rules
case stripDecorationsId,
unresolveTablesId:
unresolveTablesId,
resolvePreparedInsertId:
return false
}
return true
Expand All @@ -374,7 +375,7 @@ func (a *Analyzer) Analyze(ctx *sql.Context, n sql.Node, scope *Scope) (sql.Node
// are applied
func prePrepareRuleSelector(id RuleId) bool {
switch id {
case resolveInsertRowsId,
case resolvePreparedInsertId,
insertTopNId,
inSubqueryIndexesId,
TrackProcessId,
Expand Down Expand Up @@ -413,6 +414,39 @@ func postPrepareRuleSelector(id RuleId) bool {
case stripDecorationsId,
unresolveTablesId,

expandStarsId,
resolveFunctionsId,
flattenTableAliasesId,
pushdownSortId,
pushdownGroupbyAliasesId,
resolveDatabasesId,
resolveTablesId,

resolveOrderbyLiteralsId,
qualifyColumnsId,
resolveColumnsId,

pushdownFiltersId,
subqueryIndexesId,
inSubqueryIndexesId,
resolvePreparedInsertId,

TrackProcessId,
parallelizeId,
clearWarningsId:
return true
}
return false
}

// prePrepareRuleSelector are applied to a cached prepared statement plan
// after bindvars are applied
func postPrepareInsertSourceRuleSelector(id RuleId) bool {
switch id {
case stripDecorationsId,
unresolveTablesId,

expandStarsId,
resolveFunctionsId,
flattenTableAliasesId,
pushdownSortId,
Expand Down
24 changes: 24 additions & 0 deletions sql/analyzer/inserts.go
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,30 @@ func resolveInsertRows(ctx *sql.Context, a *Analyzer, n sql.Node, scope *Scope,
})
}

// resolvePreparedInsert applies post-optimization
// rules to Insert.Source for prepared statements.
func resolvePreparedInsert(ctx *sql.Context, a *Analyzer, n sql.Node, scope *Scope, sel RuleSelector) (sql.Node, transform.TreeIdentity, error) {
return transform.Node(n, func(n sql.Node) (sql.Node, transform.TreeIdentity, error) {
ins, ok := n.(*plan.InsertInto)
if !ok {
return n, transform.SameTree, nil
}

// TriggerExecutor has already been analyzed
if _, ok := ins.Source.(*plan.TriggerExecutor); ok {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

So we don't do this if there's a pre-insert trigger? Why? Leave a comment

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

i'll copy the comment from above

return n, transform.SameTree, nil
}

source, _, err := a.analyzeWithSelector(ctx, ins.Source, scope, SelectAllBatches, postPrepareInsertSourceRuleSelector)
if err != nil {
return nil, transform.SameTree, err
}

source = StripPassthroughNodes(source)
return ins.WithSource(source), transform.NewTree, nil
})
}

// Ensures that the number of elements in each Value tuple is empty
func existsNonZeroValueCount(values sql.Node) bool {
switch node := values.(type) {
Expand Down
2 changes: 1 addition & 1 deletion sql/analyzer/privileges.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ func validatePrivileges(ctx *sql.Context, a *Analyzer, n sql.Node, scope *Scope,
if user == nil {
return nil, transform.SameTree, mysql.NewSQLError(mysql.ERAccessDeniedError, mysql.SSAccessDeniedError, "Access denied for user '%v'", ctx.Session.Client().User)
}
if isDualTable(getTable(n)) {
if sql.IsDualTable(getTable(n)) {
return n, transform.SameTree, nil
}
if !n.CheckPrivileges(ctx, a.Catalog.GrantTables) {
Expand Down
19 changes: 3 additions & 16 deletions sql/analyzer/resolve_tables.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,8 @@ import (
"github.com/dolthub/go-mysql-server/sql/transform"
)

const dualTableName = "dual"

var dualTableSchema = sql.NewPrimaryKeySchema(sql.Schema{
{Name: "dummy", Source: dualTableName, Type: sql.LongText, Nullable: false},
})
var dualTable = func() sql.Table {
t := memory.NewTable(dualTableName, dualTableSchema, nil)
t := memory.NewTable(sql.DualTableName, sql.DualTableSchema, nil)

ctx := sql.NewEmptyContext()

Expand All @@ -45,14 +40,6 @@ var dualTable = func() sql.Table {
return t
}()

// isDualTable returns whether the given table is the "dual" table.
func isDualTable(t sql.Table) bool {
if t == nil {
return false
}
return t.Name() == dualTableName && t.Schema().Equals(dualTableSchema.Schema)
}

func resolveTables(ctx *sql.Context, a *Analyzer, n sql.Node, scope *Scope, sel RuleSelector) (sql.Node, transform.TreeIdentity, error) {
span, _ := ctx.Span("resolve_tables")
defer span.Finish()
Expand Down Expand Up @@ -245,15 +232,15 @@ func setTargetSchemas(ctx *sql.Context, a *Analyzer, n sql.Node, scope *Scope, s

func handleTableLookupFailure(err error, tableName string, dbName string, a *Analyzer, t sql.UnresolvedTable) (sql.Node, error) {
if sql.ErrDatabaseNotFound.Is(err) {
if tableName == dualTableName {
if tableName == sql.DualTableName {
a.Log("table resolved: %q", t.Name())
return plan.NewResolvedTable(dualTable, nil, nil), nil
}
if dbName == "" {
return nil, sql.ErrNoDatabaseSelected.New()
}
} else if sql.ErrTableNotFound.Is(err) || sql.ErrDatabaseAccessDeniedForUser.Is(err) || sql.ErrTableAccessDeniedForUser.Is(err) {
if tableName == dualTableName {
if tableName == sql.DualTableName {
a.Log("table resolved: %s", t.Name())
return plan.NewResolvedTable(dualTable, nil, nil), nil
}
Expand Down
120 changes: 120 additions & 0 deletions sql/analyzer/rule_ids.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
package analyzer

//go:generate stringer -type=RuleId -linecomment

type RuleId int

const (
// once before
validateOffsetAndLimitId RuleId = iota //validateOffsetAndLimit
validateCreateTableId // validateCreateTable
validateExprSemId // validateExprSem
resolveVariablesId // resolveVariables
resolveNamedWindowsId // resolveNamedWindows
resolveSetVariablesId // resolveSetVariables
resolveViewsId // resolveViews
liftCtesId // liftCtes
resolveCtesId // resolveCtes
liftRecursiveCtesId // liftRecursiveCtes
resolveDatabasesId // resolveDatabases
resolveTablesId // resolveTables
loadStoredProceduresId // loadStoredProcedures
validateDropTablesId // validateDropTables
setTargetSchemasId // setTargetSchemas
resolveCreateLikeId // resolveCreateLike
parseColumnDefaultsId // parseColumnDefaults
resolveDropConstraintId // resolveDropConstraint
validateDropConstraintId // validateDropConstraint
loadCheckConstraintsId // loadCheckConstraints
resolveCreateSelectId // resolveCreateSelect
resolveSubqueriesId // resolveSubqueries
resolveUnionsId // resolveUnions
resolveDescribeQueryId // resolveDescribeQuery
checkUniqueTableNamesId // checkUniqueTableNames
resolveTableFunctionsId // resolveTableFunctions
resolveDeclarationsId // resolveDeclarations
validateCreateTriggerId // validateCreateTrigger
validateCreateProcedureId // validateCreateProcedure
loadInfoSchemaId // loadInfoSchema
validateReadOnlyDatabaseId // validateReadOnlyDatabase
validateReadOnlyTransactionId // validateReadOnlyTransaction
validateDatabaseSetId // validateDatabaseSet
validatePriviledgesId // validatePriviledges
stripDecorationsId // stripDecorations
unresolveTablesId // unresolveTables
validateJoinComplexityId // validateJoinComplexity

// default
resolveNaturalJoinsId // resolveNaturalJoins
resolveOrderbyLiteralsId // resolveOrderbyLiterals
resolveFunctionsId // resolveFunctions
flattenTableAliasesId // flattenTableAliases
pushdownSortId // pushdownSort
pushdownGroupbyAliasesId // pushdownGroupbyAliases
pushdownSubqueryAliasFiltersId // pushdownSubqueryAliasFilters
qualifyColumnsId // qualifyColumns
resolveColumnsId // resolveColumns
resolveColumnDefaultsId // resolveColumnDefaults
validateCheckConstraintId // validateCheckConstraint
resolveBarewordSetVariablesId // resolveBarewordSetVariables
expandStarsId // expandStars
resolveHavingId // resolveHaving
mergeUnionSchemasId // mergeUnionSchemas
flattenAggregationExprsId // flattenAggregationExprs
reorderProjectionId // reorderProjection
resolveSubqueryExprsId // resolveSubqueryExprs
replaceCrossJoinsId // replaceCrossJoins
moveJoinCondsToFilterId // moveJoinCondsToFilter
evalFilterId // evalFilter
optimizeDistinctId // optimizeDistinct

// after default
finalizeSubqueriesId // finalizeSubqueries
finalizeUnionsId // finalizeUnions
loadTriggersId // loadTriggers
processTruncateId // processTruncate
validateAlterColumnId // validateAlterColumn
resolveGeneratorsId // resolveGenerators
removeUnnecessaryConvertsId // removeUnnecessaryConverts
assignCatalogId // assignCatalog
pruneColumnsId // pruneColumns
optimizeJoinsId // optimizeJoins
pushdownFiltersId // pushdownFilters
subqueryIndexesId // subqueryIndexes
inSubqueryIndexesId // inSubqueryIndexes
pushdownProjectionsId // pushdownProjections
setJoinScopeLenId // setJoinScopeLen
eraseProjectionId // eraseProjection
insertTopNId // insertTopN
cacheSubqueryResultsId // cacheSubqueryResults
cacheSubqueryAliasesInJoinsId // cacheSubqueryAliasesInJoins
applyHashLookupsId // applyHashLookups
applyHashInId // applyHashIn
resolveInsertRowsId // resolveInsertRows
resolvePreparedInsertId // resolvePreparedInsert
applyTriggersId // applyTriggers
applyProceduresId // applyProcedures
assignRoutinesId // assignRoutines
modifyUpdateExprsForJoinId // modifyUpdateExprsForJoin
applyRowUpdateAccumulatorsId // applyRowUpdateAccumulators
applyFKsId // applyFKs

// validate
validateResolvedId // validateResolved
validateOrderById // validateOrderBy
validateGroupById // validateGroupBy
validateSchemaSourceId // validateSchemaSource
validateIndexCreationId // validateIndexCreation
validateOperandsId // validateOperands
validateCaseResultTypesId // validateCaseResultTypes
validateIntervalUsageId // validateIntervalUsage
validateExplodeUsageId // validateExplodeUsage
validateSubqueryColumnsId // validateSubqueryColumns
validateUnionSchemasMatchId // validateUnionSchemasMatch
validateAggregationsId // validateAggregations

// after all
TrackProcessId // trackProcess
parallelizeId // parallelize
clearWarningsId // clearWarnings
)
Loading