diff --git a/core/build.gradle b/core/build.gradle index a338b8f368..624c10fd6b 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -57,6 +57,7 @@ dependencies { testImplementation('org.junit.jupiter:junit-jupiter:5.6.2') testImplementation group: 'org.hamcrest', name: 'hamcrest-library', version: '2.1' testImplementation group: 'org.mockito', name: 'mockito-core', version: '3.12.4' + testImplementation group: 'org.mockito', name: 'mockito-inline', version: '3.12.4' testImplementation group: 'org.mockito', name: 'mockito-junit-jupiter', version: '3.12.4' } diff --git a/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java b/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java index ba40020782..0c1be4319b 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java @@ -49,6 +49,7 @@ import org.opensearch.sql.ast.tree.Kmeans; import org.opensearch.sql.ast.tree.Limit; import org.opensearch.sql.ast.tree.ML; +import org.opensearch.sql.ast.tree.Paginate; import org.opensearch.sql.ast.tree.Parse; import org.opensearch.sql.ast.tree.Project; import org.opensearch.sql.ast.tree.RareTopN; @@ -83,6 +84,7 @@ import org.opensearch.sql.planner.logical.LogicalLimit; import org.opensearch.sql.planner.logical.LogicalML; import org.opensearch.sql.planner.logical.LogicalMLCommons; +import org.opensearch.sql.planner.logical.LogicalPaginate; import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.planner.logical.LogicalProject; import org.opensearch.sql.planner.logical.LogicalRareTopN; @@ -520,6 +522,12 @@ public LogicalPlan visitML(ML node, AnalysisContext context) { return new LogicalML(child, node.getArguments()); } + @Override + public LogicalPlan visitPaginate(Paginate paginate, AnalysisContext context) { + LogicalPlan child = paginate.getChild().get(0).accept(this, context); + return new LogicalPaginate(paginate.getPageSize(), List.of(child)); + } + /** * The first argument is always "asc", others are optional. * Given nullFirst argument, use its value. Otherwise just use DEFAULT_ASC/DESC. diff --git a/core/src/main/java/org/opensearch/sql/ast/AbstractNodeVisitor.java b/core/src/main/java/org/opensearch/sql/ast/AbstractNodeVisitor.java index 393de05164..adcde61d42 100644 --- a/core/src/main/java/org/opensearch/sql/ast/AbstractNodeVisitor.java +++ b/core/src/main/java/org/opensearch/sql/ast/AbstractNodeVisitor.java @@ -47,6 +47,7 @@ import org.opensearch.sql.ast.tree.Kmeans; import org.opensearch.sql.ast.tree.Limit; import org.opensearch.sql.ast.tree.ML; +import org.opensearch.sql.ast.tree.Paginate; import org.opensearch.sql.ast.tree.Parse; import org.opensearch.sql.ast.tree.Project; import org.opensearch.sql.ast.tree.RareTopN; @@ -289,4 +290,8 @@ public T visitQuery(Query node, C context) { public T visitExplain(Explain node, C context) { return visitStatement(node, context); } + + public T visitPaginate(Paginate paginate, C context) { + return visitChildren(paginate, context); + } } diff --git a/core/src/main/java/org/opensearch/sql/ast/statement/Query.java b/core/src/main/java/org/opensearch/sql/ast/statement/Query.java index 17682cd47b..82efdde4dd 100644 --- a/core/src/main/java/org/opensearch/sql/ast/statement/Query.java +++ b/core/src/main/java/org/opensearch/sql/ast/statement/Query.java @@ -27,6 +27,7 @@ public class Query extends Statement { protected final UnresolvedPlan plan; + protected final int fetchSize; @Override public R accept(AbstractNodeVisitor visitor, C context) { diff --git a/core/src/main/java/org/opensearch/sql/ast/tree/Paginate.java b/core/src/main/java/org/opensearch/sql/ast/tree/Paginate.java new file mode 100644 index 0000000000..55e0e8c7a6 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/ast/tree/Paginate.java @@ -0,0 +1,48 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.ast.tree; + +import java.util.List; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import lombok.ToString; +import org.opensearch.sql.ast.AbstractNodeVisitor; +import org.opensearch.sql.ast.Node; + +/** + * AST node to represent pagination operation. + * Actually a wrapper to the AST. + */ +@RequiredArgsConstructor +@EqualsAndHashCode(callSuper = false) +@ToString +public class Paginate extends UnresolvedPlan { + @Getter + private final int pageSize; + private UnresolvedPlan child; + + public Paginate(int pageSize, UnresolvedPlan child) { + this.pageSize = pageSize; + this.child = child; + } + + @Override + public List getChild() { + return List.of(child); + } + + @Override + public T accept(AbstractNodeVisitor nodeVisitor, C context) { + return nodeVisitor.visitPaginate(this, context); + } + + @Override + public UnresolvedPlan attach(UnresolvedPlan child) { + this.child = child; + return this; + } +} diff --git a/core/src/main/java/org/opensearch/sql/datasource/model/DataSourceMetadata.java b/core/src/main/java/org/opensearch/sql/datasource/model/DataSourceMetadata.java index 27d06d8151..7945f8aec3 100644 --- a/core/src/main/java/org/opensearch/sql/datasource/model/DataSourceMetadata.java +++ b/core/src/main/java/org/opensearch/sql/datasource/model/DataSourceMetadata.java @@ -12,8 +12,6 @@ import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableMap; -import com.google.gson.Gson; -import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; @@ -21,7 +19,6 @@ import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NoArgsConstructor; -import lombok.RequiredArgsConstructor; import lombok.Setter; import org.opensearch.sql.datasource.DataSourceService; diff --git a/core/src/main/java/org/opensearch/sql/exception/UnsupportedCursorRequestException.java b/core/src/main/java/org/opensearch/sql/exception/UnsupportedCursorRequestException.java new file mode 100644 index 0000000000..6ed8e02e5f --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/exception/UnsupportedCursorRequestException.java @@ -0,0 +1,12 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.exception; + +/** + * This should be thrown by V2 engine to support fallback scenario. + */ +public class UnsupportedCursorRequestException extends RuntimeException { +} diff --git a/core/src/main/java/org/opensearch/sql/executor/ExecutionEngine.java b/core/src/main/java/org/opensearch/sql/executor/ExecutionEngine.java index 1936a0f517..8d87bd9b14 100644 --- a/core/src/main/java/org/opensearch/sql/executor/ExecutionEngine.java +++ b/core/src/main/java/org/opensearch/sql/executor/ExecutionEngine.java @@ -14,6 +14,7 @@ import org.opensearch.sql.common.response.ResponseListener; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.data.type.ExprType; +import org.opensearch.sql.executor.pagination.Cursor; import org.opensearch.sql.planner.physical.PhysicalPlan; /** @@ -53,6 +54,8 @@ void execute(PhysicalPlan plan, ExecutionContext context, class QueryResponse { private final Schema schema; private final List results; + private final long total; + private final Cursor cursor; } @Data diff --git a/core/src/main/java/org/opensearch/sql/executor/QueryId.java b/core/src/main/java/org/opensearch/sql/executor/QueryId.java index 933cb5d82d..43d6aed85e 100644 --- a/core/src/main/java/org/opensearch/sql/executor/QueryId.java +++ b/core/src/main/java/org/opensearch/sql/executor/QueryId.java @@ -16,6 +16,7 @@ * Query id of {@link AbstractPlan}. */ public class QueryId { + public static final QueryId None = new QueryId(""); /** * Query id. */ diff --git a/core/src/main/java/org/opensearch/sql/executor/QueryService.java b/core/src/main/java/org/opensearch/sql/executor/QueryService.java index 94e7081920..7870b14755 100644 --- a/core/src/main/java/org/opensearch/sql/executor/QueryService.java +++ b/core/src/main/java/org/opensearch/sql/executor/QueryService.java @@ -15,7 +15,9 @@ import org.opensearch.sql.common.response.ResponseListener; import org.opensearch.sql.planner.PlanContext; import org.opensearch.sql.planner.Planner; +import org.opensearch.sql.planner.logical.LogicalPaginate; import org.opensearch.sql.planner.logical.LogicalPlan; +import org.opensearch.sql.planner.optimizer.LogicalPlanOptimizer; import org.opensearch.sql.planner.physical.PhysicalPlan; /** @@ -28,7 +30,15 @@ public class QueryService { private final ExecutionEngine executionEngine; + /** + * There are two planners, one - to handle pagination requests (cursor/scroll) only and + * another one for everything else. + * @see OpenSearchPluginModule#queryPlanFactory (:plugin module) + * @see LogicalPlanOptimizer#paginationCreate + * @see QueryService + */ private final Planner planner; + private final Planner paginationPlanner; /** * Execute the {@link UnresolvedPlan}, using {@link ResponseListener} to get response. @@ -46,6 +56,14 @@ public void execute(UnresolvedPlan plan, } } + /** + * Execute a physical plan without analyzing or planning anything. + */ + public void executePlan(PhysicalPlan plan, + ResponseListener listener) { + executionEngine.execute(plan, ExecutionContext.emptyExecutionContext(), listener); + } + /** * Execute the {@link UnresolvedPlan}, with {@link PlanContext} and using {@link ResponseListener} * to get response. @@ -97,6 +115,6 @@ public LogicalPlan analyze(UnresolvedPlan plan) { * Translate {@link LogicalPlan} to {@link PhysicalPlan}. */ public PhysicalPlan plan(LogicalPlan plan) { - return planner.plan(plan); + return plan instanceof LogicalPaginate ? paginationPlanner.plan(plan) : planner.plan(plan); } } diff --git a/core/src/main/java/org/opensearch/sql/executor/execution/ContinuePaginatedPlan.java b/core/src/main/java/org/opensearch/sql/executor/execution/ContinuePaginatedPlan.java new file mode 100644 index 0000000000..03309359a1 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/executor/execution/ContinuePaginatedPlan.java @@ -0,0 +1,59 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.executor.execution; + +import org.opensearch.sql.common.response.ResponseListener; +import org.opensearch.sql.executor.ExecutionEngine; +import org.opensearch.sql.executor.QueryId; +import org.opensearch.sql.executor.QueryService; +import org.opensearch.sql.executor.pagination.PaginatedPlanCache; +import org.opensearch.sql.planner.physical.PhysicalPlan; + +/** + * ContinuePaginatedPlan represents cursor a request. + * It returns subsequent pages to the user (2nd page and all next). + * {@link PaginatedPlan} + */ +public class ContinuePaginatedPlan extends AbstractPlan { + + private final String cursor; + private final QueryService queryService; + private final PaginatedPlanCache paginatedPlanCache; + + private final ResponseListener queryResponseListener; + + + /** + * Create an abstract plan that can continue paginating a given cursor. + */ + public ContinuePaginatedPlan(QueryId queryId, String cursor, QueryService queryService, + PaginatedPlanCache planCache, + ResponseListener + queryResponseListener) { + super(queryId); + this.cursor = cursor; + this.paginatedPlanCache = planCache; + this.queryService = queryService; + this.queryResponseListener = queryResponseListener; + } + + @Override + public void execute() { + try { + PhysicalPlan plan = paginatedPlanCache.convertToPlan(cursor); + queryService.executePlan(plan, queryResponseListener); + } catch (Exception e) { + queryResponseListener.onFailure(e); + } + } + + @Override + public void explain(ResponseListener listener) { + listener.onFailure(new UnsupportedOperationException( + "Explain of a paged query continuation is not supported. " + + "Use `explain` for the initial query request.")); + } +} diff --git a/core/src/main/java/org/opensearch/sql/executor/execution/PaginatedPlan.java b/core/src/main/java/org/opensearch/sql/executor/execution/PaginatedPlan.java new file mode 100644 index 0000000000..5e217f1320 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/executor/execution/PaginatedPlan.java @@ -0,0 +1,53 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.executor.execution; + +import org.apache.commons.lang3.NotImplementedException; +import org.opensearch.sql.ast.tree.Paginate; +import org.opensearch.sql.ast.tree.UnresolvedPlan; +import org.opensearch.sql.common.response.ResponseListener; +import org.opensearch.sql.executor.ExecutionEngine; +import org.opensearch.sql.executor.QueryId; +import org.opensearch.sql.executor.QueryService; + +/** + * PaginatedPlan represents a page request. Dislike a regular QueryPlan, + * it returns paged response to the user and cursor, which allows to query + * next page. + * {@link ContinuePaginatedPlan} + */ +public class PaginatedPlan extends AbstractPlan { + final UnresolvedPlan plan; + final int fetchSize; + final QueryService queryService; + final ResponseListener + queryResponseResponseListener; + + /** + * Create an abstract plan that can start paging a query. + */ + public PaginatedPlan(QueryId queryId, UnresolvedPlan plan, int fetchSize, + QueryService queryService, + ResponseListener + queryResponseResponseListener) { + super(queryId); + this.plan = plan; + this.fetchSize = fetchSize; + this.queryService = queryService; + this.queryResponseResponseListener = queryResponseResponseListener; + } + + @Override + public void execute() { + queryService.execute(new Paginate(fetchSize, plan), queryResponseResponseListener); + } + + @Override + public void explain(ResponseListener listener) { + listener.onFailure(new NotImplementedException( + "`explain` feature for paginated requests is not implemented yet.")); + } +} diff --git a/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlanFactory.java b/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlanFactory.java index 851381cc7a..cabbfbff8e 100644 --- a/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlanFactory.java +++ b/core/src/main/java/org/opensearch/sql/executor/execution/QueryPlanFactory.java @@ -18,9 +18,11 @@ import org.opensearch.sql.ast.statement.Query; import org.opensearch.sql.ast.statement.Statement; import org.opensearch.sql.common.response.ResponseListener; +import org.opensearch.sql.exception.UnsupportedCursorRequestException; import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.executor.QueryId; import org.opensearch.sql.executor.QueryService; +import org.opensearch.sql.executor.pagination.PaginatedPlanCache; /** * QueryExecution Factory. @@ -37,9 +39,10 @@ public class QueryPlanFactory * Query Service. */ private final QueryService queryService; + private final PaginatedPlanCache paginatedPlanCache; /** - * NO_CONSUMER_RESPONSE_LISTENER should never been called. It is only used as constructor + * NO_CONSUMER_RESPONSE_LISTENER should never be called. It is only used as constructor * parameter of {@link QueryPlan}. */ @VisibleForTesting @@ -62,39 +65,62 @@ public void onFailure(Exception e) { /** * Create QueryExecution from Statement. */ - public AbstractPlan create( + public AbstractPlan createContinuePaginatedPlan( Statement statement, Optional> queryListener, Optional> explainListener) { return statement.accept(this, Pair.of(queryListener, explainListener)); } + /** + * Creates a ContinuePaginatedPlan from a cursor. + */ + public AbstractPlan createContinuePaginatedPlan(String cursor, boolean isExplain, + ResponseListener queryResponseListener, + ResponseListener explainListener) { + QueryId queryId = QueryId.queryId(); + var plan = new ContinuePaginatedPlan(queryId, cursor, queryService, + paginatedPlanCache, queryResponseListener); + return isExplain ? new ExplainPlan(queryId, plan, explainListener) : plan; + } + @Override public AbstractPlan visitQuery( Query node, - Pair< - Optional>, - Optional>> + Pair>, + Optional>> context) { Preconditions.checkArgument( context.getLeft().isPresent(), "[BUG] query listener must be not null"); - return new QueryPlan(QueryId.queryId(), node.getPlan(), queryService, context.getLeft().get()); + if (node.getFetchSize() > 0) { + if (paginatedPlanCache.canConvertToCursor(node.getPlan())) { + return new PaginatedPlan(QueryId.queryId(), node.getPlan(), node.getFetchSize(), + queryService, + context.getLeft().get()); + } else { + // This should be picked up by the legacy engine. + throw new UnsupportedCursorRequestException(); + } + } else { + return new QueryPlan(QueryId.queryId(), node.getPlan(), queryService, + context.getLeft().get()); + } } @Override public AbstractPlan visitExplain( Explain node, - Pair< - Optional>, - Optional>> + Pair>, + Optional>> context) { Preconditions.checkArgument( context.getRight().isPresent(), "[BUG] explain listener must be not null"); return new ExplainPlan( QueryId.queryId(), - create(node.getStatement(), Optional.of(NO_CONSUMER_RESPONSE_LISTENER), Optional.empty()), + createContinuePaginatedPlan(node.getStatement(), + Optional.of(NO_CONSUMER_RESPONSE_LISTENER), Optional.empty()), context.getRight().get()); } } diff --git a/core/src/main/java/org/opensearch/sql/executor/pagination/CanPaginateVisitor.java b/core/src/main/java/org/opensearch/sql/executor/pagination/CanPaginateVisitor.java new file mode 100644 index 0000000000..3164794abb --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/executor/pagination/CanPaginateVisitor.java @@ -0,0 +1,65 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.executor.pagination; + +import org.opensearch.sql.ast.AbstractNodeVisitor; +import org.opensearch.sql.ast.Node; +import org.opensearch.sql.ast.expression.AllFields; +import org.opensearch.sql.ast.tree.Project; +import org.opensearch.sql.ast.tree.Relation; + +/** + * Use this unresolved plan visitor to check if a plan can be serialized by PaginatedPlanCache. + * If plan.accept(new CanPaginateVisitor(...)) returns true, + * then PaginatedPlanCache.convertToCursor will succeed. Otherwise, it will fail. + * The purpose of this visitor is to activate legacy engine fallback mechanism. + * Currently, the conditions are: + * - only projection of a relation is supported. + * - projection only has * (a.k.a. allFields). + * - Relation only scans one table + * - The table is an open search index. + * So it accepts only queries like `select * from $index` + * See PaginatedPlanCache.canConvertToCursor for usage. + */ +public class CanPaginateVisitor extends AbstractNodeVisitor { + + @Override + public Boolean visitRelation(Relation node, Object context) { + if (!node.getChild().isEmpty()) { + // Relation instance should never have a child, but check just in case. + return Boolean.FALSE; + } + + return Boolean.TRUE; + } + + @Override + public Boolean visitChildren(Node node, Object context) { + return Boolean.FALSE; + } + + @Override + public Boolean visitProject(Project node, Object context) { + // Allow queries with 'SELECT *' only. Those restriction could be removed, but consider + // in-memory aggregation performed by window function (see WindowOperator). + // SELECT max(age) OVER (PARTITION BY city) ... + var projections = node.getProjectList(); + if (projections.size() != 1) { + return Boolean.FALSE; + } + + if (!(projections.get(0) instanceof AllFields)) { + return Boolean.FALSE; + } + + var children = node.getChild(); + if (children.size() != 1) { + return Boolean.FALSE; + } + + return children.get(0).accept(this, context); + } +} diff --git a/core/src/main/java/org/opensearch/sql/executor/pagination/Cursor.java b/core/src/main/java/org/opensearch/sql/executor/pagination/Cursor.java new file mode 100644 index 0000000000..0339bec9ca --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/executor/pagination/Cursor.java @@ -0,0 +1,29 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.executor.pagination; + +import lombok.EqualsAndHashCode; +import lombok.Getter; + +@EqualsAndHashCode +public class Cursor { + public static final Cursor None = new Cursor(); + + @Getter + private final byte[] raw; + + private Cursor() { + raw = new byte[] {}; + } + + public Cursor(byte[] raw) { + this.raw = raw; + } + + public String toString() { + return new String(raw); + } +} diff --git a/core/src/main/java/org/opensearch/sql/executor/pagination/PaginatedPlanCache.java b/core/src/main/java/org/opensearch/sql/executor/pagination/PaginatedPlanCache.java new file mode 100644 index 0000000000..89c008aa66 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/executor/pagination/PaginatedPlanCache.java @@ -0,0 +1,161 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.executor.pagination; + +import com.google.common.hash.HashCode; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.zip.GZIPInputStream; +import java.util.zip.GZIPOutputStream; +import lombok.RequiredArgsConstructor; +import org.opensearch.sql.ast.tree.UnresolvedPlan; +import org.opensearch.sql.expression.NamedExpression; +import org.opensearch.sql.expression.serialization.DefaultExpressionSerializer; +import org.opensearch.sql.planner.physical.PaginateOperator; +import org.opensearch.sql.planner.physical.PhysicalPlan; +import org.opensearch.sql.planner.physical.ProjectOperator; +import org.opensearch.sql.storage.StorageEngine; +import org.opensearch.sql.storage.TableScanOperator; + +/** + * This class is entry point to paged requests. It is responsible to cursor serialization + * and deserialization. + */ +@RequiredArgsConstructor +public class PaginatedPlanCache { + public static final String CURSOR_PREFIX = "n:"; + private final StorageEngine storageEngine; + + public boolean canConvertToCursor(UnresolvedPlan plan) { + return plan.accept(new CanPaginateVisitor(), null); + } + + /** + * Converts a physical plan tree to a cursor. May cache plan related data somewhere. + */ + public Cursor convertToCursor(PhysicalPlan plan) throws IOException { + if (plan instanceof PaginateOperator) { + var cursor = plan.toCursor(); + if (cursor == null) { + return Cursor.None; + } + var raw = CURSOR_PREFIX + compress(cursor); + return new Cursor(raw.getBytes()); + } + return Cursor.None; + } + + /** + * Compress serialized query plan. + * @param str string representing a query plan + * @return str compressed with gzip. + */ + String compress(String str) throws IOException { + if (str == null || str.length() == 0) { + return ""; + } + ByteArrayOutputStream out = new ByteArrayOutputStream(); + GZIPOutputStream gzip = new GZIPOutputStream(out); + gzip.write(str.getBytes()); + gzip.close(); + return HashCode.fromBytes(out.toByteArray()).toString(); + } + + /** + * Decompresses a query plan that was compress with {@link PaginatedPlanCache#compress}. + * @param input compressed query plan + * @return decompressed string + */ + String decompress(String input) throws IOException { + if (input == null || input.length() == 0) { + return ""; + } + GZIPInputStream gzip = new GZIPInputStream(new ByteArrayInputStream( + HashCode.fromString(input).asBytes())); + return new String(gzip.readAllBytes()); + } + + /** + * Parse `NamedExpression`s from cursor. + * @param listToFill List to fill with data. + * @param cursor Cursor to parse. + * @return Remaining part of the cursor. + */ + private String parseNamedExpressions(List listToFill, String cursor) { + var serializer = new DefaultExpressionSerializer(); + if (cursor.startsWith(")")) { //empty list + return cursor.substring(cursor.indexOf(',') + 1); + } + while (!cursor.startsWith("(")) { + listToFill.add((NamedExpression) + serializer.deserialize(cursor.substring(0, + Math.min(cursor.indexOf(','), cursor.indexOf(')'))))); + cursor = cursor.substring(cursor.indexOf(',') + 1); + } + return cursor; + } + + /** + * Converts a cursor to a physical plan tree. + */ + public PhysicalPlan convertToPlan(String cursor) { + if (!cursor.startsWith(CURSOR_PREFIX)) { + throw new UnsupportedOperationException("Unsupported cursor"); + } + try { + cursor = cursor.substring(CURSOR_PREFIX.length()); + cursor = decompress(cursor); + + // TODO Parse with ANTLR or serialize as JSON/XML + if (!cursor.startsWith("(Paginate,")) { + throw new UnsupportedOperationException("Unsupported cursor"); + } + // TODO add checks for > 0 + cursor = cursor.substring(cursor.indexOf(',') + 1); + final int currentPageIndex = Integer.parseInt(cursor, 0, cursor.indexOf(','), 10); + + cursor = cursor.substring(cursor.indexOf(',') + 1); + final int pageSize = Integer.parseInt(cursor, 0, cursor.indexOf(','), 10); + + cursor = cursor.substring(cursor.indexOf(',') + 1); + if (!cursor.startsWith("(Project,")) { + throw new UnsupportedOperationException("Unsupported cursor"); + } + cursor = cursor.substring(cursor.indexOf(',') + 1); + if (!cursor.startsWith("(namedParseExpressions,")) { + throw new UnsupportedOperationException("Unsupported cursor"); + } + + cursor = cursor.substring(cursor.indexOf(',') + 1); + List namedParseExpressions = new ArrayList<>(); + cursor = parseNamedExpressions(namedParseExpressions, cursor); + + List projectList = new ArrayList<>(); + if (!cursor.startsWith("(projectList,")) { + throw new UnsupportedOperationException("Unsupported cursor"); + } + cursor = cursor.substring(cursor.indexOf(',') + 1); + cursor = parseNamedExpressions(projectList, cursor); + + if (!cursor.startsWith("(OpenSearchPagedIndexScan,")) { + throw new UnsupportedOperationException("Unsupported cursor"); + } + cursor = cursor.substring(cursor.indexOf(',') + 1); + var indexName = cursor.substring(0, cursor.indexOf(',')); + cursor = cursor.substring(cursor.indexOf(',') + 1); + var scrollId = cursor.substring(0, cursor.indexOf(')')); + TableScanOperator scan = storageEngine.getTableScan(indexName, scrollId); + + return new PaginateOperator(new ProjectOperator(scan, projectList, namedParseExpressions), + pageSize, currentPageIndex); + } catch (Exception e) { + throw new UnsupportedOperationException("Unsupported cursor", e); + } + } +} diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializer.java b/core/src/main/java/org/opensearch/sql/expression/serialization/DefaultExpressionSerializer.java similarity index 95% rename from opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializer.java rename to core/src/main/java/org/opensearch/sql/expression/serialization/DefaultExpressionSerializer.java index dc67da9de5..33c22b2ea5 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializer.java +++ b/core/src/main/java/org/opensearch/sql/expression/serialization/DefaultExpressionSerializer.java @@ -4,7 +4,7 @@ */ -package org.opensearch.sql.opensearch.storage.serialization; +package org.opensearch.sql.expression.serialization; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/ExpressionSerializer.java b/core/src/main/java/org/opensearch/sql/expression/serialization/ExpressionSerializer.java similarity index 90% rename from opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/ExpressionSerializer.java rename to core/src/main/java/org/opensearch/sql/expression/serialization/ExpressionSerializer.java index b7caeb30f8..f96921e29c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/serialization/ExpressionSerializer.java +++ b/core/src/main/java/org/opensearch/sql/expression/serialization/ExpressionSerializer.java @@ -4,7 +4,7 @@ */ -package org.opensearch.sql.opensearch.storage.serialization; +package org.opensearch.sql.expression.serialization; import org.opensearch.sql.expression.Expression; diff --git a/core/src/main/java/org/opensearch/sql/planner/DefaultImplementor.java b/core/src/main/java/org/opensearch/sql/planner/DefaultImplementor.java index 4a6d4d8222..43422d8733 100644 --- a/core/src/main/java/org/opensearch/sql/planner/DefaultImplementor.java +++ b/core/src/main/java/org/opensearch/sql/planner/DefaultImplementor.java @@ -11,6 +11,7 @@ import org.opensearch.sql.planner.logical.LogicalEval; import org.opensearch.sql.planner.logical.LogicalFilter; import org.opensearch.sql.planner.logical.LogicalLimit; +import org.opensearch.sql.planner.logical.LogicalPaginate; import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.planner.logical.LogicalPlanNodeVisitor; import org.opensearch.sql.planner.logical.LogicalProject; @@ -26,6 +27,7 @@ import org.opensearch.sql.planner.physical.EvalOperator; import org.opensearch.sql.planner.physical.FilterOperator; import org.opensearch.sql.planner.physical.LimitOperator; +import org.opensearch.sql.planner.physical.PaginateOperator; import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.planner.physical.ProjectOperator; import org.opensearch.sql.planner.physical.RareTopNOperator; @@ -125,6 +127,11 @@ public PhysicalPlan visitLimit(LogicalLimit node, C context) { return new LimitOperator(visitChild(node, context), node.getLimit(), node.getOffset()); } + @Override + public PhysicalPlan visitPaginate(LogicalPaginate plan, C context) { + return new PaginateOperator(visitChild(plan, context), plan.getPageSize()); + } + @Override public PhysicalPlan visitTableScanBuilder(TableScanBuilder plan, C context) { return plan.build(); @@ -145,5 +152,4 @@ protected PhysicalPlan visitChild(LogicalPlan node, C context) { // Logical operators visited here must have a single child return node.getChild().get(0).accept(this, context); } - } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPaginate.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPaginate.java new file mode 100644 index 0000000000..372f9dcf0b --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPaginate.java @@ -0,0 +1,31 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.planner.logical; + +import java.util.List; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.ToString; + +/** + * LogicalPaginate represents pagination operation for underlying plan. + */ +@ToString +@EqualsAndHashCode(callSuper = false) +public class LogicalPaginate extends LogicalPlan { + @Getter + private final int pageSize; + + public LogicalPaginate(int pageSize, List childPlans) { + super(childPlans); + this.pageSize = pageSize; + } + + @Override + public R accept(LogicalPlanNodeVisitor visitor, C context) { + return visitor.visitPaginate(this, context); + } +} diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitor.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitor.java index 9a41072fe7..28cf6bcd79 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitor.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitor.java @@ -100,4 +100,8 @@ public R visitML(LogicalML plan, C context) { public R visitAD(LogicalAD plan, C context) { return visitNode(plan, context); } + + public R visitPaginate(LogicalPaginate plan, C context) { + return visitNode(plan, context); + } } diff --git a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRelation.java b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRelation.java index a49c3d5cbe..0ece74690e 100644 --- a/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRelation.java +++ b/core/src/main/java/org/opensearch/sql/planner/logical/LogicalRelation.java @@ -9,6 +9,7 @@ import com.google.common.collect.ImmutableList; import lombok.EqualsAndHashCode; import lombok.Getter; +import lombok.Setter; import lombok.ToString; import org.opensearch.sql.storage.Table; @@ -25,6 +26,10 @@ public class LogicalRelation extends LogicalPlan { @Getter private final Table table; + @Getter + @Setter + private Integer pageSize; + /** * Constructor of LogicalRelation. */ @@ -32,6 +37,7 @@ public LogicalRelation(String relationName, Table table) { super(ImmutableList.of()); this.relationName = relationName; this.table = table; + this.pageSize = null; } @Override diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizer.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizer.java index 70847b869b..13bcfabe74 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizer.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizer.java @@ -13,8 +13,10 @@ import java.util.List; import java.util.stream.Collectors; import org.opensearch.sql.planner.logical.LogicalPlan; +import org.opensearch.sql.planner.optimizer.rule.CreatePagingTableScanBuilder; import org.opensearch.sql.planner.optimizer.rule.MergeFilterAndFilter; import org.opensearch.sql.planner.optimizer.rule.PushFilterUnderSort; +import org.opensearch.sql.planner.optimizer.rule.PushPageSize; import org.opensearch.sql.planner.optimizer.rule.read.CreateTableScanBuilder; import org.opensearch.sql.planner.optimizer.rule.read.TableScanPushDown; import org.opensearch.sql.planner.optimizer.rule.write.CreateTableWriteBuilder; @@ -60,6 +62,30 @@ public static LogicalPlanOptimizer create() { new CreateTableWriteBuilder())); } + /** + * Create {@link LogicalPlanOptimizer} with pre-defined rules. + */ + public static LogicalPlanOptimizer paginationCreate() { + return new LogicalPlanOptimizer(Arrays.asList( + /* + * Phase 1: Transformations that rely on relational algebra equivalence + */ + new MergeFilterAndFilter(), + new PushFilterUnderSort(), + /* + * Phase 2: Transformations that rely on data source push down capability + */ + new PushPageSize(), + new CreatePagingTableScanBuilder(), + TableScanPushDown.PUSH_DOWN_FILTER, + TableScanPushDown.PUSH_DOWN_AGGREGATION, + TableScanPushDown.PUSH_DOWN_SORT, + TableScanPushDown.PUSH_DOWN_LIMIT, + TableScanPushDown.PUSH_DOWN_HIGHLIGHT, + TableScanPushDown.PUSH_DOWN_PROJECT, + new CreateTableWriteBuilder())); + } + /** * Optimize {@link LogicalPlan}. */ diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/pattern/Patterns.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/pattern/Patterns.java index 856d8df7ea..6e54897506 100644 --- a/core/src/main/java/org/opensearch/sql/planner/optimizer/pattern/Patterns.java +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/pattern/Patterns.java @@ -16,6 +16,7 @@ import org.opensearch.sql.planner.logical.LogicalFilter; import org.opensearch.sql.planner.logical.LogicalHighlight; import org.opensearch.sql.planner.logical.LogicalLimit; +import org.opensearch.sql.planner.logical.LogicalPaginate; import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.planner.logical.LogicalProject; import org.opensearch.sql.planner.logical.LogicalRelation; @@ -112,6 +113,16 @@ public static Property table() { : Optional.empty()); } + /** + * Logical pagination with page size. + */ + public static Property pagination() { + return Property.optionalProperty("pagination", + plan -> plan instanceof LogicalPaginate + ? Optional.of(((LogicalPaginate) plan).getPageSize()) + : Optional.empty()); + } + /** * Logical write with table field. */ diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/CreatePagingTableScanBuilder.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/CreatePagingTableScanBuilder.java new file mode 100644 index 0000000000..22079ed9ca --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/CreatePagingTableScanBuilder.java @@ -0,0 +1,49 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.planner.optimizer.rule; + +import static org.opensearch.sql.planner.optimizer.pattern.Patterns.table; + +import com.facebook.presto.matching.Capture; +import com.facebook.presto.matching.Captures; +import com.facebook.presto.matching.Pattern; +import lombok.Getter; +import lombok.experimental.Accessors; +import org.opensearch.sql.planner.logical.LogicalPlan; +import org.opensearch.sql.planner.logical.LogicalRelation; +import org.opensearch.sql.planner.optimizer.Rule; +import org.opensearch.sql.storage.Table; +import org.opensearch.sql.storage.read.TableScanBuilder; + +/** + * Rule to create a paged TableScanBuilder in pagination request. + */ +public class CreatePagingTableScanBuilder implements Rule { + /** Capture the table inside matched logical relation operator. */ + private final Capture capture; + + /** Pattern that matches logical relation operator. */ + @Accessors(fluent = true) + @Getter + private final Pattern pattern; + + /** + * Constructor. + */ + public CreatePagingTableScanBuilder() { + this.capture = Capture.newCapture(); + this.pattern = Pattern.typeOf(LogicalRelation.class) + .with(table().capturedAs(capture)); + } + + @Override + public LogicalPlan apply(LogicalRelation plan, Captures captures) { + TableScanBuilder scanBuilder = captures.get(capture) + .createPagedScanBuilder(plan.getPageSize()); + // TODO: Remove this after Prometheus refactored to new table scan builder too + return (scanBuilder == null) ? plan : scanBuilder; + } +} diff --git a/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/PushPageSize.java b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/PushPageSize.java new file mode 100644 index 0000000000..95cd23d6ca --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/planner/optimizer/rule/PushPageSize.java @@ -0,0 +1,60 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.planner.optimizer.rule; + +import static org.opensearch.sql.planner.optimizer.pattern.Patterns.pagination; + +import com.facebook.presto.matching.Capture; +import com.facebook.presto.matching.Captures; +import com.facebook.presto.matching.Pattern; +import lombok.Getter; +import lombok.experimental.Accessors; +import org.opensearch.sql.planner.logical.LogicalPaginate; +import org.opensearch.sql.planner.logical.LogicalPlan; +import org.opensearch.sql.planner.logical.LogicalRelation; +import org.opensearch.sql.planner.optimizer.Rule; + +public class PushPageSize + implements Rule { + /** Capture the table inside matched logical paginate operator. */ + private final Capture capture; + + /** Pattern that matches logical paginate operator. */ + @Accessors(fluent = true) + @Getter + private final Pattern pattern; + + /** + * Constructor. + */ + public PushPageSize() { + this.capture = Capture.newCapture(); + this.pattern = Pattern.typeOf(LogicalPaginate.class) + .with(pagination().capturedAs(capture)); + } + + private LogicalRelation findLogicalRelation(LogicalPlan plan) { //TODO TBD multiple relations? + for (var subplan : plan.getChild()) { + if (subplan instanceof LogicalRelation) { + return (LogicalRelation) subplan; + } + var found = findLogicalRelation(subplan); + if (found != null) { + return found; + } + } + return null; + } + + @Override + public LogicalPlan apply(LogicalPaginate plan, Captures captures) { + var relation = findLogicalRelation(plan); + if (relation != null) { + relation.setPageSize(captures.get(capture)); + } + return plan; + } +} diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/FilterOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/FilterOperator.java index 86cd411a2d..a9c7597c3e 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/FilterOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/FilterOperator.java @@ -17,8 +17,9 @@ import org.opensearch.sql.storage.bindingtuple.BindingTuple; /** - * The Filter operator use the conditions to evaluate the input {@link BindingTuple}. - * The Filter operator only return the results that evaluated to true. + * The Filter operator represents WHERE clause and + * uses the conditions to evaluate the input {@link BindingTuple}. + * The Filter operator only returns the results that evaluated to true. * The NULL and MISSING are handled by the logic defined in {@link BinaryPredicateOperator}. */ @EqualsAndHashCode(callSuper = false) @@ -29,7 +30,9 @@ public class FilterOperator extends PhysicalPlan { private final PhysicalPlan input; @Getter private final Expression conditions; - @ToString.Exclude private ExprValue next = null; + @ToString.Exclude + private ExprValue next = null; + private long totalHits = 0; @Override public R accept(PhysicalPlanNodeVisitor visitor, C context) { @@ -48,6 +51,7 @@ public boolean hasNext() { ExprValue exprValue = conditions.valueOf(inputValue.bindingTuples()); if (!(exprValue.isNull() || exprValue.isMissing()) && (exprValue.booleanValue())) { next = inputValue; + totalHits++; return true; } } @@ -58,4 +62,10 @@ public boolean hasNext() { public ExprValue next() { return next; } + + @Override + public long getTotalHits() { + // ignore `input.getTotalHits()`, because it returns wrong (unfiltered) value + return totalHits; + } } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/PaginateOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/PaginateOperator.java new file mode 100644 index 0000000000..97901def0f --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/planner/physical/PaginateOperator.java @@ -0,0 +1,84 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.planner.physical; + +import java.util.List; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.executor.ExecutionEngine; +import org.opensearch.sql.planner.physical.PhysicalPlan; +import org.opensearch.sql.planner.physical.PhysicalPlanNodeVisitor; +import org.opensearch.sql.planner.physical.ProjectOperator; + +@RequiredArgsConstructor +@EqualsAndHashCode(callSuper = false) +public class PaginateOperator extends PhysicalPlan { + @Getter + private final PhysicalPlan input; + + @Getter + private final int pageSize; + + /** + * Which page is this? + * May not be necessary in the end. Currently used to increment the "cursor counter" -- + * See usage. + */ + @Getter + private final int pageIndex; + + int numReturned = 0; + + /** + * Page given physical plan, with pageSize elements per page, starting with the first page. + */ + public PaginateOperator(PhysicalPlan input, int pageSize) { + this.pageSize = pageSize; + this.input = input; + this.pageIndex = 0; + } + + @Override + public R accept(PhysicalPlanNodeVisitor visitor, C context) { + return visitor.visitPaginate(this, context); + } + + @Override + public boolean hasNext() { + return numReturned < pageSize && input.hasNext(); + } + + @Override + public ExprValue next() { + numReturned += 1; + return input.next(); + } + + public List getChild() { + return List.of(input); + } + + @Override + public ExecutionEngine.Schema schema() { + return input.schema(); + } + + @Override + public String toCursor() { + // Save cursor to read the next page. + // Could process node.getChild() here with another visitor -- one that saves the + // parameters for other physical operators -- ProjectOperator, etc. + // cursor format: n:|" + String child = getChild().get(0).toCursor(); + + var nextPage = getPageIndex() + 1; + return child == null || child.isEmpty() + ? null : createSection("Paginate", Integer.toString(nextPage), + Integer.toString(getPageSize()), child); + } +} diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlan.java b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlan.java index b476b01557..312e4bfff9 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlan.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlan.java @@ -7,6 +7,7 @@ package org.opensearch.sql.planner.physical; import java.util.Iterator; +import java.util.List; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.planner.PlanNode; @@ -43,6 +44,34 @@ public void add(Split split) { public ExecutionEngine.Schema schema() { throw new IllegalStateException(String.format("[BUG] schema can been only applied to " - + "ProjectOperator, instead of %s", toString())); + + "ProjectOperator, instead of %s", this.getClass().getSimpleName())); + } + + /** + * Returns Total hits matched the search criteria. Note: query may return less if limited. + * {@see Settings#QUERY_SIZE_LIMIT}. + * Any plan which adds/removes rows to the response should overwrite it to provide valid values. + * + * @return Total hits matched the search criteria. + */ + public long getTotalHits() { + return getChild().stream().mapToLong(PhysicalPlan::getTotalHits).max().orElse(0); + } + + public String toCursor() { + throw new IllegalStateException(String.format("%s is not compatible with cursor feature", + this.getClass().getSimpleName())); + } + + /** + * Creates an S-expression that represents a plan node. + * @param plan Label for the plan. + * @param params List of serialized parameters. Including the child plans. + * @return A string that represents the plan called with those parameters. + */ + protected String createSection(String plan, String... params) { + return "(" + plan + "," + + String.join(",", params) + + ")"; } } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java index d4bc4a1ea9..f8b6f2243e 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitor.java @@ -88,4 +88,8 @@ public R visitAD(PhysicalPlan node, C context) { public R visitML(PhysicalPlan node, C context) { return visitNode(node, context); } + + public R visitPaginate(PaginateOperator node, C context) { + return visitNode(node, context); + } } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/ProjectOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/ProjectOperator.java index 496e4e6ddb..c61b35e0cb 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/ProjectOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/ProjectOperator.java @@ -22,6 +22,7 @@ import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.parse.ParseExpression; +import org.opensearch.sql.expression.serialization.DefaultExpressionSerializer; /** * Project the fields specified in {@link ProjectOperator#projectList} from input. @@ -94,4 +95,18 @@ public ExecutionEngine.Schema schema() { .map(expr -> new ExecutionEngine.Schema.Column(expr.getName(), expr.getAlias(), expr.type())).collect(Collectors.toList())); } + + @Override + public String toCursor() { + String child = getChild().get(0).toCursor(); + if (child == null || child.isEmpty()) { + return null; + } + var serializer = new DefaultExpressionSerializer(); + String projects = createSection("projectList", + projectList.stream().map(serializer::serialize).toArray(String[]::new)); + String namedExpressions = createSection("namedParseExpressions", + namedParseExpressions.stream().map(serializer::serialize).toArray(String[]::new)); + return createSection("Project", namedExpressions, projects, child); + } } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/ValuesOperator.java b/core/src/main/java/org/opensearch/sql/planner/physical/ValuesOperator.java index 51d2850df7..45884830e1 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/ValuesOperator.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/ValuesOperator.java @@ -15,6 +15,7 @@ import lombok.ToString; import org.opensearch.sql.data.model.ExprCollectionValue; import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.LiteralExpression; /** @@ -55,10 +56,17 @@ public boolean hasNext() { return valuesIterator.hasNext(); } + @Override + public long getTotalHits() { + // ValuesOperator used for queries without `FROM` clause, e.g. `select 1`. + // Such query always returns 1 row. + return 1; + } + @Override public ExprValue next() { List values = valuesIterator.next().stream() - .map(expr -> expr.valueOf()) + .map(Expression::valueOf) .collect(Collectors.toList()); return new ExprCollectionValue(values); } diff --git a/core/src/main/java/org/opensearch/sql/storage/StorageEngine.java b/core/src/main/java/org/opensearch/sql/storage/StorageEngine.java index 246a50ea09..18e9e92886 100644 --- a/core/src/main/java/org/opensearch/sql/storage/StorageEngine.java +++ b/core/src/main/java/org/opensearch/sql/storage/StorageEngine.java @@ -8,7 +8,9 @@ import java.util.Collection; import java.util.Collections; +import java.util.List; import org.opensearch.sql.DataSourceSchemaName; +import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.expression.function.FunctionResolver; /** @@ -30,4 +32,8 @@ default Collection getFunctions() { return Collections.emptyList(); } + default TableScanOperator getTableScan(String indexName, String scrollId) { + String error = String.format("%s.getTableScan needs to be implemented", getClass()); + throw new UnsupportedOperationException(error); + } } diff --git a/core/src/main/java/org/opensearch/sql/storage/Table.java b/core/src/main/java/org/opensearch/sql/storage/Table.java index 496281fa8d..a7f2b606ca 100644 --- a/core/src/main/java/org/opensearch/sql/storage/Table.java +++ b/core/src/main/java/org/opensearch/sql/storage/Table.java @@ -92,4 +92,9 @@ default TableWriteBuilder createWriteBuilder(LogicalWrite plan) { default StreamingSource asStreamingSource() { throw new UnsupportedOperationException(); } + + default TableScanBuilder createPagedScanBuilder(int pageSize) { + var error = String.format("'%s' does not support pagination", getClass().toString()); + throw new UnsupportedOperationException(error); + } } diff --git a/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java b/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java index 1db29a6a42..01e2091da9 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java +++ b/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTest.java @@ -75,6 +75,7 @@ import org.opensearch.sql.ast.tree.AD; import org.opensearch.sql.ast.tree.Kmeans; import org.opensearch.sql.ast.tree.ML; +import org.opensearch.sql.ast.tree.Paginate; import org.opensearch.sql.ast.tree.RareTopN.CommandType; import org.opensearch.sql.exception.ExpressionEvaluationException; import org.opensearch.sql.exception.SemanticCheckException; @@ -83,6 +84,7 @@ import org.opensearch.sql.expression.window.WindowDefinition; import org.opensearch.sql.planner.logical.LogicalAD; import org.opensearch.sql.planner.logical.LogicalMLCommons; +import org.opensearch.sql.planner.logical.LogicalPaginate; import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.planner.logical.LogicalPlanDSL; import org.opensearch.sql.planner.logical.LogicalProject; @@ -1189,4 +1191,11 @@ public void ml_relation_predict_rcf_without_time_field() { assertTrue(((LogicalProject) actual).getProjectList() .contains(DSL.named(RCF_ANOMALOUS, DSL.ref(RCF_ANOMALOUS, BOOLEAN)))); } + + @Test + public void visit_paginate() { + LogicalPlan actual = analyze(new Paginate(10, AstDSL.relation("dummy"))); + assertTrue(actual instanceof LogicalPaginate); + assertEquals(10, ((LogicalPaginate) actual).getPageSize()); + } } diff --git a/core/src/test/java/org/opensearch/sql/executor/QueryServiceTest.java b/core/src/test/java/org/opensearch/sql/executor/QueryServiceTest.java index 4df38027f4..e3e744d8ec 100644 --- a/core/src/test/java/org/opensearch/sql/executor/QueryServiceTest.java +++ b/core/src/test/java/org/opensearch/sql/executor/QueryServiceTest.java @@ -15,11 +15,9 @@ import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.lenient; -import static org.mockito.Mockito.when; import java.util.Collections; import java.util.Optional; -import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; @@ -27,6 +25,7 @@ import org.opensearch.sql.analysis.Analyzer; import org.opensearch.sql.ast.tree.UnresolvedPlan; import org.opensearch.sql.common.response.ResponseListener; +import org.opensearch.sql.executor.pagination.Cursor; import org.opensearch.sql.planner.PlanContext; import org.opensearch.sql.planner.Planner; import org.opensearch.sql.planner.logical.LogicalPlan; @@ -47,6 +46,9 @@ class QueryServiceTest { @Mock private Planner planner; + @Mock + private Planner paginationPlanner; + @Mock private UnresolvedPlan ast; @@ -118,8 +120,9 @@ class Helper { public Helper() { lenient().when(analyzer.analyze(any(), any())).thenReturn(logicalPlan); lenient().when(planner.plan(any())).thenReturn(plan); + lenient().when(paginationPlanner.plan(any())).thenReturn(plan); - queryService = new QueryService(analyzer, executionEngine, planner); + queryService = new QueryService(analyzer, executionEngine, planner, paginationPlanner); } Helper executeSuccess() { @@ -134,7 +137,8 @@ Helper executeSuccess(Split split) { invocation -> { ResponseListener listener = invocation.getArgument(2); listener.onResponse( - new ExecutionEngine.QueryResponse(schema, Collections.emptyList())); + new ExecutionEngine.QueryResponse(schema, Collections.emptyList(), 0, + Cursor.None)); return null; }) .when(executionEngine) diff --git a/core/src/test/java/org/opensearch/sql/executor/execution/ContinuePaginatedPlanTest.java b/core/src/test/java/org/opensearch/sql/executor/execution/ContinuePaginatedPlanTest.java new file mode 100644 index 0000000000..7ad2390e45 --- /dev/null +++ b/core/src/test/java/org/opensearch/sql/executor/execution/ContinuePaginatedPlanTest.java @@ -0,0 +1,96 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.executor.execution; + +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.CALLS_REAL_METHODS; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.withSettings; +import static org.opensearch.sql.executor.pagination.PaginatedPlanCacheTest.buildCursor; + +import java.util.Map; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; +import org.opensearch.sql.common.response.ResponseListener; +import org.opensearch.sql.executor.DefaultExecutionEngine; +import org.opensearch.sql.executor.ExecutionEngine; +import org.opensearch.sql.executor.QueryId; +import org.opensearch.sql.executor.QueryService; +import org.opensearch.sql.executor.pagination.PaginatedPlanCache; +import org.opensearch.sql.storage.StorageEngine; +import org.opensearch.sql.storage.TableScanOperator; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +public class ContinuePaginatedPlanTest { + + private static PaginatedPlanCache paginatedPlanCache; + + private static QueryService queryService; + + /** + * Initialize the mocks. + */ + @BeforeAll + public static void setUp() { + var storageEngine = mock(StorageEngine.class); + when(storageEngine.getTableScan(anyString(), anyString())) + .thenReturn(mock(TableScanOperator.class)); + paginatedPlanCache = new PaginatedPlanCache(storageEngine); + queryService = new QueryService(null, new DefaultExecutionEngine(), null, null); + } + + @Test + public void can_execute_plan() { + var listener = new ResponseListener() { + @Override + public void onResponse(ExecutionEngine.QueryResponse response) { + assertNotNull(response); + } + + @Override + public void onFailure(Exception e) { + fail(); + } + }; + var plan = new ContinuePaginatedPlan(QueryId.None, buildCursor(Map.of()), + queryService, paginatedPlanCache, listener); + plan.execute(); + } + + @Test + // Same as previous test, but with malformed cursor + public void can_handle_error_while_executing_plan() { + var listener = new ResponseListener() { + @Override + public void onResponse(ExecutionEngine.QueryResponse response) { + fail(); + } + + @Override + public void onFailure(Exception e) { + assertNotNull(e); + } + }; + var plan = new ContinuePaginatedPlan(QueryId.None, buildCursor(Map.of("pageSize", "abc")), + queryService, paginatedPlanCache, listener); + plan.execute(); + } + + @Test + public void explain_is_not_supported() { + var listener = mock(ResponseListener.class); + mock(ContinuePaginatedPlan.class, withSettings().defaultAnswer(CALLS_REAL_METHODS)) + .explain(listener); + verify(listener).onFailure(any(UnsupportedOperationException.class)); + } +} diff --git a/core/src/test/java/org/opensearch/sql/executor/execution/PaginatedPlanTest.java b/core/src/test/java/org/opensearch/sql/executor/execution/PaginatedPlanTest.java new file mode 100644 index 0000000000..16933b9b79 --- /dev/null +++ b/core/src/test/java/org/opensearch/sql/executor/execution/PaginatedPlanTest.java @@ -0,0 +1,100 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.executor.execution; + +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import org.apache.commons.lang3.NotImplementedException; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; +import org.opensearch.sql.analysis.Analyzer; +import org.opensearch.sql.ast.tree.UnresolvedPlan; +import org.opensearch.sql.common.response.ResponseListener; +import org.opensearch.sql.executor.DefaultExecutionEngine; +import org.opensearch.sql.executor.ExecutionEngine; +import org.opensearch.sql.executor.QueryId; +import org.opensearch.sql.executor.QueryService; +import org.opensearch.sql.planner.Planner; +import org.opensearch.sql.planner.logical.LogicalPaginate; +import org.opensearch.sql.planner.logical.LogicalPlan; +import org.opensearch.sql.planner.physical.PhysicalPlan; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +public class PaginatedPlanTest { + + private static QueryService queryService; + + /** + * Initialize the mocks. + */ + @BeforeAll + public static void setUp() { + var analyzer = mock(Analyzer.class); + when(analyzer.analyze(any(), any())).thenReturn(mock(LogicalPaginate.class)); + var planner = mock(Planner.class); + when(planner.plan(any())).thenReturn(mock(PhysicalPlan.class)); + queryService = new QueryService(analyzer, new DefaultExecutionEngine(), null, planner); + } + + @Test + public void can_execute_plan() { + var listener = new ResponseListener() { + @Override + public void onResponse(ExecutionEngine.QueryResponse response) { + assertNotNull(response); + } + + @Override + public void onFailure(Exception e) { + fail(); + } + }; + var plan = new PaginatedPlan(QueryId.None, mock(UnresolvedPlan.class), 10, + queryService, listener); + plan.execute(); + } + + @Test + // Same as previous test, but with incomplete PaginatedQueryService + public void can_handle_error_while_executing_plan() { + var listener = new ResponseListener() { + @Override + public void onResponse(ExecutionEngine.QueryResponse response) { + fail(); + } + + @Override + public void onFailure(Exception e) { + assertNotNull(e); + } + }; + var plan = new PaginatedPlan(QueryId.None, mock(UnresolvedPlan.class), 10, + new QueryService(null, new DefaultExecutionEngine(), null, null), listener); + plan.execute(); + } + + @Test + public void explain_is_not_supported() { + new PaginatedPlan(null, null, 0, null, null).explain(new ResponseListener<>() { + @Override + public void onResponse(ExecutionEngine.ExplainResponse response) { + fail(); + } + + @Override + public void onFailure(Exception e) { + assertTrue(e instanceof NotImplementedException); + } + }); + } +} diff --git a/core/src/test/java/org/opensearch/sql/executor/execution/QueryPlanFactoryTest.java b/core/src/test/java/org/opensearch/sql/executor/execution/QueryPlanFactoryTest.java index cc4bf070fb..c06b1186cd 100644 --- a/core/src/test/java/org/opensearch/sql/executor/execution/QueryPlanFactoryTest.java +++ b/core/src/test/java/org/opensearch/sql/executor/execution/QueryPlanFactoryTest.java @@ -8,9 +8,11 @@ package org.opensearch.sql.executor.execution; +import static org.junit.jupiter.api.Assertions.assertAll; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.when; import static org.opensearch.sql.executor.execution.QueryPlanFactory.NO_CONSUMER_RESPONSE_LISTENER; import java.util.Optional; @@ -24,8 +26,10 @@ import org.opensearch.sql.ast.statement.Statement; import org.opensearch.sql.ast.tree.UnresolvedPlan; import org.opensearch.sql.common.response.ResponseListener; +import org.opensearch.sql.exception.UnsupportedCursorRequestException; import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.executor.QueryService; +import org.opensearch.sql.executor.pagination.PaginatedPlanCache; @ExtendWith(MockitoExtension.class) class QueryPlanFactoryTest { @@ -45,46 +49,60 @@ class QueryPlanFactoryTest { @Mock private ExecutionEngine.QueryResponse queryResponse; + @Mock + private PaginatedPlanCache paginatedPlanCache; private QueryPlanFactory factory; @BeforeEach void init() { - factory = new QueryPlanFactory(queryService); + factory = new QueryPlanFactory(queryService, paginatedPlanCache); } @Test public void createFromQueryShouldSuccess() { - Statement query = new Query(plan); + Statement query = new Query(plan, 0); AbstractPlan queryExecution = - factory.create(query, Optional.of(queryListener), Optional.empty()); + factory.createContinuePaginatedPlan(query, Optional.of(queryListener), Optional.empty()); assertTrue(queryExecution instanceof QueryPlan); } @Test public void createFromExplainShouldSuccess() { - Statement query = new Explain(new Query(plan)); + Statement query = new Explain(new Query(plan, 0)); AbstractPlan queryExecution = - factory.create(query, Optional.empty(), Optional.of(explainListener)); + factory.createContinuePaginatedPlan(query, Optional.empty(), Optional.of(explainListener)); assertTrue(queryExecution instanceof ExplainPlan); } + @Test + public void createFromCursorShouldSuccess() { + AbstractPlan queryExecution = factory.createContinuePaginatedPlan("", false, + queryListener, explainListener); + AbstractPlan explainExecution = factory.createContinuePaginatedPlan("", true, + queryListener, explainListener); + assertAll( + () -> assertTrue(queryExecution instanceof ContinuePaginatedPlan), + () -> assertTrue(explainExecution instanceof ExplainPlan) + ); + } + @Test public void createFromQueryWithoutQueryListenerShouldThrowException() { - Statement query = new Query(plan); + Statement query = new Query(plan, 0); IllegalArgumentException exception = - assertThrows(IllegalArgumentException.class, () -> factory.create(query, - Optional.empty(), Optional.empty())); + assertThrows(IllegalArgumentException.class, () -> factory.createContinuePaginatedPlan( + query, Optional.empty(), Optional.empty())); assertEquals("[BUG] query listener must be not null", exception.getMessage()); } @Test public void createFromExplainWithoutExplainListenerShouldThrowException() { - Statement query = new Explain(new Query(plan)); + Statement query = new Explain(new Query(plan, 0)); IllegalArgumentException exception = - assertThrows(IllegalArgumentException.class, () -> factory.create(query, - Optional.empty(), Optional.empty())); + assertThrows(IllegalArgumentException.class, () -> factory.createContinuePaginatedPlan( + query, Optional.empty(), Optional.empty())); assertEquals("[BUG] explain listener must be not null", exception.getMessage()); } @@ -104,4 +122,24 @@ public void noConsumerResponseChannel() { assertEquals( "[BUG] exception response should not sent to unexpected channel", exception.getMessage()); } + + @Test + public void createQueryWithFetchSizeWhichCanBePaged() { + when(paginatedPlanCache.canConvertToCursor(plan)).thenReturn(true); + factory = new QueryPlanFactory(queryService, paginatedPlanCache); + Statement query = new Query(plan, 10); + AbstractPlan queryExecution = + factory.createContinuePaginatedPlan(query, Optional.of(queryListener), Optional.empty()); + assertTrue(queryExecution instanceof PaginatedPlan); + } + + @Test + public void createQueryWithFetchSizeWhichCannotBePaged() { + when(paginatedPlanCache.canConvertToCursor(plan)).thenReturn(false); + factory = new QueryPlanFactory(queryService, paginatedPlanCache); + Statement query = new Query(plan, 10); + assertThrows(UnsupportedCursorRequestException.class, + () -> factory.createContinuePaginatedPlan(query, + Optional.of(queryListener), Optional.empty())); + } } diff --git a/core/src/test/java/org/opensearch/sql/executor/pagination/CanPaginateVisitorTest.java b/core/src/test/java/org/opensearch/sql/executor/pagination/CanPaginateVisitorTest.java new file mode 100644 index 0000000000..02a0dbc05e --- /dev/null +++ b/core/src/test/java/org/opensearch/sql/executor/pagination/CanPaginateVisitorTest.java @@ -0,0 +1,132 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.executor.pagination; + +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.withSettings; + +import java.util.List; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; +import org.opensearch.sql.ast.dsl.AstDSL; +import org.opensearch.sql.ast.tree.Project; +import org.opensearch.sql.ast.tree.Relation; +import org.opensearch.sql.executor.pagination.CanPaginateVisitor; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +public class CanPaginateVisitorTest { + + static final CanPaginateVisitor visitor = new CanPaginateVisitor(); + + @Test + // select * from y + public void accept_query_with_select_star_and_from() { + var plan = AstDSL.project(AstDSL.relation("dummy"), AstDSL.allFields()); + assertTrue(plan.accept(visitor, null)); + } + + @Test + // select x from y + public void reject_query_with_select_field_and_from() { + var plan = AstDSL.project(AstDSL.relation("dummy"), AstDSL.field("pewpew")); + assertFalse(plan.accept(visitor, null)); + } + + @Test + // select x,z from y + public void reject_query_with_select_fields_and_from() { + var plan = AstDSL.project(AstDSL.relation("dummy"), + AstDSL.field("pewpew"), AstDSL.field("pewpew")); + assertFalse(plan.accept(visitor, null)); + } + + @Test + // select x + public void reject_query_without_from() { + var plan = AstDSL.project(AstDSL.values(List.of(AstDSL.intLiteral(1))), + AstDSL.alias("1",AstDSL.intLiteral(1))); + assertFalse(plan.accept(visitor, null)); + } + + @Test + // select * from y limit z + public void reject_query_with_limit() { + var plan = AstDSL.project(AstDSL.limit(AstDSL.relation("dummy"), 1, 2), AstDSL.allFields()); + assertFalse(plan.accept(visitor, null)); + } + + @Test + // select * from y where z + public void reject_query_with_where() { + var plan = AstDSL.project(AstDSL.filter(AstDSL.relation("dummy"), + AstDSL.booleanLiteral(true)), AstDSL.allFields()); + assertFalse(plan.accept(visitor, null)); + } + + @Test + // select * from y order by z + public void reject_query_with_order_by() { + var plan = AstDSL.project(AstDSL.sort(AstDSL.relation("dummy"), AstDSL.field("1")), + AstDSL.allFields()); + assertFalse(plan.accept(visitor, null)); + } + + @Test + // select * from y group by z + public void reject_query_with_group_by() { + var plan = AstDSL.project(AstDSL.agg( + AstDSL.relation("dummy"), List.of(), List.of(), List.of(AstDSL.field("1")), List.of()), + AstDSL.allFields()); + assertFalse(plan.accept(visitor, null)); + } + + @Test + // select agg(x) from y + public void reject_query_with_aggregation_function() { + var plan = AstDSL.project(AstDSL.agg( + AstDSL.relation("dummy"), + List.of(AstDSL.alias("agg", AstDSL.aggregate("func", AstDSL.field("pewpew")))), + List.of(), List.of(), List.of()), + AstDSL.allFields()); + assertFalse(plan.accept(visitor, null)); + } + + @Test + // select window(x) from y + public void reject_query_with_window_function() { + var plan = AstDSL.project(AstDSL.relation("dummy"), + AstDSL.alias("pewpew", + AstDSL.window( + AstDSL.aggregate("func", AstDSL.field("pewpew")), + List.of(AstDSL.qualifiedName("1")), List.of()))); + assertFalse(plan.accept(visitor, null)); + } + + @Test + // select * from y, z + public void reject_query_with_select_from_multiple_indices() { + var plan = mock(Project.class); + when(plan.getChild()).thenReturn(List.of(AstDSL.relation("dummy"), AstDSL.relation("pummy"))); + when(plan.getProjectList()).thenReturn(List.of(AstDSL.allFields())); + assertFalse(visitor.visitProject(plan, null)); + } + + @Test + // unreal case, added for coverage only + public void reject_project_when_relation_has_child() { + var relation = mock(Relation.class, withSettings().useConstructor(AstDSL.qualifiedName("42"))); + when(relation.getChild()).thenReturn(List.of(AstDSL.relation("pewpew"))); + when(relation.accept(visitor, null)).thenCallRealMethod(); + var plan = mock(Project.class); + when(plan.getChild()).thenReturn(List.of(relation)); + when(plan.getProjectList()).thenReturn(List.of(AstDSL.allFields())); + assertFalse(visitor.visitProject((Project) plan, null)); + } +} diff --git a/core/src/test/java/org/opensearch/sql/executor/pagination/CursorTest.java b/core/src/test/java/org/opensearch/sql/executor/pagination/CursorTest.java new file mode 100644 index 0000000000..ff5e0d37a7 --- /dev/null +++ b/core/src/test/java/org/opensearch/sql/executor/pagination/CursorTest.java @@ -0,0 +1,27 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.executor.pagination; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; +import org.opensearch.sql.executor.pagination.Cursor; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +class CursorTest { + + @Test + void empty_array_is_none() { + Assertions.assertEquals(Cursor.None, new Cursor(new byte[]{})); + } + + @Test + void toString_is_array_value() { + String cursorTxt = "This is a test"; + Assertions.assertEquals(cursorTxt, new Cursor(cursorTxt.getBytes()).toString()); + } +} diff --git a/core/src/test/java/org/opensearch/sql/executor/pagination/PaginatedPlanCacheTest.java b/core/src/test/java/org/opensearch/sql/executor/pagination/PaginatedPlanCacheTest.java new file mode 100644 index 0000000000..c3feb6e606 --- /dev/null +++ b/core/src/test/java/org/opensearch/sql/executor/pagination/PaginatedPlanCacheTest.java @@ -0,0 +1,459 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.executor.pagination; + +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.Map; +import java.util.stream.Stream; +import java.util.zip.GZIPOutputStream; +import lombok.SneakyThrows; +import org.apache.commons.lang3.reflect.FieldUtils; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.ValueSource; +import org.mockito.Mockito; +import org.opensearch.sql.ast.dsl.AstDSL; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.planner.physical.PaginateOperator; +import org.opensearch.sql.storage.StorageEngine; +import org.opensearch.sql.storage.TableScanOperator; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +public class PaginatedPlanCacheTest { + + StorageEngine storageEngine; + + PaginatedPlanCache planCache; + + // encoded query 'select * from cacls' o_O + static final String testCursor = "(Paginate,1,2,(Project," + + "(namedParseExpressions,),(projectList,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5zcWwuZXhwcmVzc2lvbi5" + + "OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZWdhdGVk" + + "dAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZXEAfgABeHBwc3IAMW9yZ" + + "y5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWvMkAIAA0wABGF0dHJxAH" + + "4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFyY2gvc3FsL2RhdGEvdHl" + + "wZS9FeHByVHlwZTt4cHQABWJvb2wzc3IAGmphdmEudXRpbC5BcnJheXMkQXJyYXlMaXN02aQ8vs2IBtICAAFbAAFh" + + "dAATW0xqYXZhL2xhbmcvT2JqZWN0O3hwdXIAE1tMamF2YS5sYW5nLlN0cmluZzut0lbn6R17RwIAAHhwAAAAAXEAf" + + "gAIfnIAKW9yZy5vcGVuc2VhcmNoLnNxbC5kYXRhLnR5cGUuRXhwckNvcmVUeXBlAAAAAAAAAAASAAB4cgAOamF2YS" + + "5sYW5nLkVudW0AAAAAAAAAABIAAHhwdAAHQk9PTEVBTnEAfgAI,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5zcWwuZX" + + "hwcmVzc2lvbi5OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpbmc7TAA" + + "JZGVsZWdhdGVkdAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZXEAfgAB" + + "eHBwc3IAMW9yZy5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWvMkAIAA" + + "0wABGF0dHJxAH4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFyY2gvc3" + + "FsL2RhdGEvdHlwZS9FeHByVHlwZTt4cHQABGludDBzcgAaamF2YS51dGlsLkFycmF5cyRBcnJheUxpc3TZpDy+zYg" + + "G0gIAAVsAAWF0ABNbTGphdmEvbGFuZy9PYmplY3Q7eHB1cgATW0xqYXZhLmxhbmcuU3RyaW5nO63SVufpHXtHAgAA" + + "eHAAAAABcQB+AAh+cgApb3JnLm9wZW5zZWFyY2guc3FsLmRhdGEudHlwZS5FeHByQ29yZVR5cGUAAAAAAAAAABIAA" + + "HhyAA5qYXZhLmxhbmcuRW51bQAAAAAAAAAAEgAAeHB0AAdJTlRFR0VScQB+AAg=,rO0ABXNyAC1vcmcub3BlbnNlY" + + "XJjaC5zcWwuZXhwcmVzc2lvbi5OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy" + + "9TdHJpbmc7TAAJZGVsZWdhdGVkdAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAA" + + "EbmFtZXEAfgABeHBwc3IAMW9yZy5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274" + + "AO0rxWvMkAIAA0wABGF0dHJxAH4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZ" + + "W5zZWFyY2gvc3FsL2RhdGEvdHlwZS9FeHByVHlwZTt4cHQABXRpbWUxc3IAGmphdmEudXRpbC5BcnJheXMkQXJyYX" + + "lMaXN02aQ8vs2IBtICAAFbAAFhdAATW0xqYXZhL2xhbmcvT2JqZWN0O3hwdXIAE1tMamF2YS5sYW5nLlN0cmluZzu" + + "t0lbn6R17RwIAAHhwAAAAAXEAfgAIfnIAKW9yZy5vcGVuc2VhcmNoLnNxbC5kYXRhLnR5cGUuRXhwckNvcmVUeXBl" + + "AAAAAAAAAAASAAB4cgAOamF2YS5sYW5nLkVudW0AAAAAAAAAABIAAHhwdAAJVElNRVNUQU1QcQB+AAg=,rO0ABXNy" + + "AC1vcmcub3BlbnNlYXJjaC5zcWwuZXhwcmVzc2lvbi5OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWFzd" + + "AASTGphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZWdhdGVkdAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0" + + "V4cHJlc3Npb247TAAEbmFtZXEAfgABeHBwc3IAMW9yZy5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5" + + "jZUV4cHJlc3Npb274AO0rxWvMkAIAA0wABGF0dHJxAH4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5" + + "cGV0ACdMb3JnL29wZW5zZWFyY2gvc3FsL2RhdGEvdHlwZS9FeHByVHlwZTt4cHQABWJvb2wyc3IAGmphdmEudXRpb" + + "C5BcnJheXMkQXJyYXlMaXN02aQ8vs2IBtICAAFbAAFhdAATW0xqYXZhL2xhbmcvT2JqZWN0O3hwdXIAE1tMamF2YS" + + "5sYW5nLlN0cmluZzut0lbn6R17RwIAAHhwAAAAAXEAfgAIfnIAKW9yZy5vcGVuc2VhcmNoLnNxbC5kYXRhLnR5cGU" + + "uRXhwckNvcmVUeXBlAAAAAAAAAAASAAB4cgAOamF2YS5sYW5nLkVudW0AAAAAAAAAABIAAHhwdAAHQk9PTEVBTnEA" + + "fgAI,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5zcWwuZXhwcmVzc2lvbi5OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIA" + + "A0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZWdhdGVkdAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9le" + + "HByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZXEAfgABeHBwc3IAMW9yZy5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW" + + "9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWvMkAIAA0wABGF0dHJxAH4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9" + + "MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFyY2gvc3FsL2RhdGEvdHlwZS9FeHByVHlwZTt4cHQABGludDJzcgAa" + + "amF2YS51dGlsLkFycmF5cyRBcnJheUxpc3TZpDy+zYgG0gIAAVsAAWF0ABNbTGphdmEvbGFuZy9PYmplY3Q7eHB1c" + + "gATW0xqYXZhLmxhbmcuU3RyaW5nO63SVufpHXtHAgAAeHAAAAABcQB+AAh+cgApb3JnLm9wZW5zZWFyY2guc3FsLm" + + "RhdGEudHlwZS5FeHByQ29yZVR5cGUAAAAAAAAAABIAAHhyAA5qYXZhLmxhbmcuRW51bQAAAAAAAAAAEgAAeHB0AAd" + + "JTlRFR0VScQB+AAg=,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5zcWwuZXhwcmVzc2lvbi5OYW1lZEV4cHJlc3Npb27" + + "4hhKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZWdhdGVkdAAqTG9yZy9vcGVuc2Vh" + + "cmNoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZXEAfgABeHBwc3IAMW9yZy5vcGVuc2VhcmNoLnNxb" + + "C5leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWvMkAIAA0wABGF0dHJxAH4AAUwABXBhdGhzdAAQTG" + + "phdmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFyY2gvc3FsL2RhdGEvdHlwZS9FeHByVHlwZTt4cHQ" + + "ABGludDFzcgAaamF2YS51dGlsLkFycmF5cyRBcnJheUxpc3TZpDy+zYgG0gIAAVsAAWF0ABNbTGphdmEvbGFuZy9P" + + "YmplY3Q7eHB1cgATW0xqYXZhLmxhbmcuU3RyaW5nO63SVufpHXtHAgAAeHAAAAABcQB+AAh+cgApb3JnLm9wZW5zZ" + + "WFyY2guc3FsLmRhdGEudHlwZS5FeHByQ29yZVR5cGUAAAAAAAAAABIAAHhyAA5qYXZhLmxhbmcuRW51bQAAAAAAAA" + + "AAEgAAeHB0AAdJTlRFR0VScQB+AAg=,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5zcWwuZXhwcmVzc2lvbi5OYW1lZE" + + "V4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZWdhdGVkdAAqTG9" + + "yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZXEAfgABeHBwc3IAMW9yZy5vcGVu" + + "c2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWvMkAIAA0wABGF0dHJxAH4AAUwAB" + + "XBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFyY2gvc3FsL2RhdGEvdHlwZS9FeH" + + "ByVHlwZTt4cHQABHN0cjNzcgAaamF2YS51dGlsLkFycmF5cyRBcnJheUxpc3TZpDy+zYgG0gIAAVsAAWF0ABNbTGp" + + "hdmEvbGFuZy9PYmplY3Q7eHB1cgATW0xqYXZhLmxhbmcuU3RyaW5nO63SVufpHXtHAgAAeHAAAAABcQB+AAh+cgAp" + + "b3JnLm9wZW5zZWFyY2guc3FsLmRhdGEudHlwZS5FeHByQ29yZVR5cGUAAAAAAAAAABIAAHhyAA5qYXZhLmxhbmcuR" + + "W51bQAAAAAAAAAAEgAAeHB0AAZTVFJJTkdxAH4ACA==,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5zcWwuZXhwcmVzc" + + "2lvbi5OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZW" + + "dhdGVkdAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZXEAfgABeHBwc3I" + + "AMW9yZy5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWvMkAIAA0wABGF0" + + "dHJxAH4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFyY2gvc3FsL2Rhd" + + "GEvdHlwZS9FeHByVHlwZTt4cHQABGludDNzcgAaamF2YS51dGlsLkFycmF5cyRBcnJheUxpc3TZpDy+zYgG0gIAAV" + + "sAAWF0ABNbTGphdmEvbGFuZy9PYmplY3Q7eHB1cgATW0xqYXZhLmxhbmcuU3RyaW5nO63SVufpHXtHAgAAeHAAAAA" + + "BcQB+AAh+cgApb3JnLm9wZW5zZWFyY2guc3FsLmRhdGEudHlwZS5FeHByQ29yZVR5cGUAAAAAAAAAABIAAHhyAA5q" + + "YXZhLmxhbmcuRW51bQAAAAAAAAAAEgAAeHB0AAdJTlRFR0VScQB+AAg=,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5z" + + "cWwuZXhwcmVzc2lvbi5OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpb" + + "mc7TAAJZGVsZWdhdGVkdAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZX" + + "EAfgABeHBwc3IAMW9yZy5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWv" + + "MkAIAA0wABGF0dHJxAH4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFy" + + "Y2gvc3FsL2RhdGEvdHlwZS9FeHByVHlwZTt4cHQABHN0cjFzcgAaamF2YS51dGlsLkFycmF5cyRBcnJheUxpc3TZp" + + "Dy+zYgG0gIAAVsAAWF0ABNbTGphdmEvbGFuZy9PYmplY3Q7eHB1cgATW0xqYXZhLmxhbmcuU3RyaW5nO63SVufpHX" + + "tHAgAAeHAAAAABcQB+AAh+cgApb3JnLm9wZW5zZWFyY2guc3FsLmRhdGEudHlwZS5FeHByQ29yZVR5cGUAAAAAAAA" + + "AABIAAHhyAA5qYXZhLmxhbmcuRW51bQAAAAAAAAAAEgAAeHB0AAZTVFJJTkdxAH4ACA==,rO0ABXNyAC1vcmcub3B" + + "lbnNlYXJjaC5zcWwuZXhwcmVzc2lvbi5OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEv" + + "bGFuZy9TdHJpbmc7TAAJZGVsZWdhdGVkdAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb" + + "247TAAEbmFtZXEAfgABeHBwc3IAMW9yZy5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3" + + "Npb274AO0rxWvMkAIAA0wABGF0dHJxAH4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3J" + + "nL29wZW5zZWFyY2gvc3FsL2RhdGEvdHlwZS9FeHByVHlwZTt4cHQABHN0cjJzcgAaamF2YS51dGlsLkFycmF5cyRB" + + "cnJheUxpc3TZpDy+zYgG0gIAAVsAAWF0ABNbTGphdmEvbGFuZy9PYmplY3Q7eHB1cgATW0xqYXZhLmxhbmcuU3Rya" + + "W5nO63SVufpHXtHAgAAeHAAAAABcQB+AAh+cgApb3JnLm9wZW5zZWFyY2guc3FsLmRhdGEudHlwZS5FeHByQ29yZV" + + "R5cGUAAAAAAAAAABIAAHhyAA5qYXZhLmxhbmcuRW51bQAAAAAAAAAAEgAAeHB0AAZTVFJJTkdxAH4ACA==,rO0ABX" + + "NyAC1vcmcub3BlbnNlYXJjaC5zcWwuZXhwcmVzc2lvbi5OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWF" + + "zdAASTGphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZWdhdGVkdAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9u" + + "L0V4cHJlc3Npb247TAAEbmFtZXEAfgABeHBwc3IAMW9yZy5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZ" + + "W5jZUV4cHJlc3Npb274AO0rxWvMkAIAA0wABGF0dHJxAH4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABH" + + "R5cGV0ACdMb3JnL29wZW5zZWFyY2gvc3FsL2RhdGEvdHlwZS9FeHByVHlwZTt4cHQABXRpbWUwc3IAGmphdmEudXR" + + "pbC5BcnJheXMkQXJyYXlMaXN02aQ8vs2IBtICAAFbAAFhdAATW0xqYXZhL2xhbmcvT2JqZWN0O3hwdXIAE1tMamF2" + + "YS5sYW5nLlN0cmluZzut0lbn6R17RwIAAHhwAAAAAXEAfgAIfnIAKW9yZy5vcGVuc2VhcmNoLnNxbC5kYXRhLnR5c" + + "GUuRXhwckNvcmVUeXBlAAAAAAAAAAASAAB4cgAOamF2YS5sYW5nLkVudW0AAAAAAAAAABIAAHhwdAAJVElNRVNUQU" + + "1QcQB+AAg=,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5zcWwuZXhwcmVzc2lvbi5OYW1lZEV4cHJlc3Npb274hhKW/q" + + "2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZWdhdGVkdAAqTG9yZy9vcGVuc2VhcmNoL3N" + + "xbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZXEAfgABeHBwc3IAMW9yZy5vcGVuc2VhcmNoLnNxbC5leHBy" + + "ZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWvMkAIAA0wABGF0dHJxAH4AAUwABXBhdGhzdAAQTGphdmEvd" + + "XRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFyY2gvc3FsL2RhdGEvdHlwZS9FeHByVHlwZTt4cHQACWRhdG" + + "V0aW1lMHNyABpqYXZhLnV0aWwuQXJyYXlzJEFycmF5TGlzdNmkPL7NiAbSAgABWwABYXQAE1tMamF2YS9sYW5nL09" + + "iamVjdDt4cHVyABNbTGphdmEubGFuZy5TdHJpbmc7rdJW5+kde0cCAAB4cAAAAAFxAH4ACH5yAClvcmcub3BlbnNl" + + "YXJjaC5zcWwuZGF0YS50eXBlLkV4cHJDb3JlVHlwZQAAAAAAAAAAEgAAeHIADmphdmEubGFuZy5FbnVtAAAAAAAAA" + + "AASAAB4cHQACVRJTUVTVEFNUHEAfgAI,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5zcWwuZXhwcmVzc2lvbi5OYW1lZ" + + "EV4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZWdhdGVkdAAqTG" + + "9yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZXEAfgABeHBwc3IAMW9yZy5vcGV" + + "uc2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWvMkAIAA0wABGF0dHJxAH4AAUwA" + + "BXBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFyY2gvc3FsL2RhdGEvdHlwZS9Fe" + + "HByVHlwZTt4cHQABG51bTFzcgAaamF2YS51dGlsLkFycmF5cyRBcnJheUxpc3TZpDy+zYgG0gIAAVsAAWF0ABNbTG" + + "phdmEvbGFuZy9PYmplY3Q7eHB1cgATW0xqYXZhLmxhbmcuU3RyaW5nO63SVufpHXtHAgAAeHAAAAABcQB+AAh+cgA" + + "pb3JnLm9wZW5zZWFyY2guc3FsLmRhdGEudHlwZS5FeHByQ29yZVR5cGUAAAAAAAAAABIAAHhyAA5qYXZhLmxhbmcu" + + "RW51bQAAAAAAAAAAEgAAeHB0AAZET1VCTEVxAH4ACA==,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5zcWwuZXhwcmVz" + + "c2lvbi5OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZ" + + "WdhdGVkdAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZXEAfgABeHBwc3" + + "IAMW9yZy5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWvMkAIAA0wABGF" + + "0dHJxAH4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFyY2gvc3FsL2Rh" + + "dGEvdHlwZS9FeHByVHlwZTt4cHQABG51bTBzcgAaamF2YS51dGlsLkFycmF5cyRBcnJheUxpc3TZpDy+zYgG0gIAA" + + "VsAAWF0ABNbTGphdmEvbGFuZy9PYmplY3Q7eHB1cgATW0xqYXZhLmxhbmcuU3RyaW5nO63SVufpHXtHAgAAeHAAAA" + + "ABcQB+AAh+cgApb3JnLm9wZW5zZWFyY2guc3FsLmRhdGEudHlwZS5FeHByQ29yZVR5cGUAAAAAAAAAABIAAHhyAA5" + + "qYXZhLmxhbmcuRW51bQAAAAAAAAAAEgAAeHB0AAZET1VCTEVxAH4ACA==,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5" + + "zcWwuZXhwcmVzc2lvbi5OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJp" + + "bmc7TAAJZGVsZWdhdGVkdAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZ" + + "XEAfgABeHBwc3IAMW9yZy5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxW" + + "vMkAIAA0wABGF0dHJxAH4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWF" + + "yY2gvc3FsL2RhdGEvdHlwZS9FeHByVHlwZTt4cHQACWRhdGV0aW1lMXNyABpqYXZhLnV0aWwuQXJyYXlzJEFycmF5" + + "TGlzdNmkPL7NiAbSAgABWwABYXQAE1tMamF2YS9sYW5nL09iamVjdDt4cHVyABNbTGphdmEubGFuZy5TdHJpbmc7r" + + "dJW5+kde0cCAAB4cAAAAAFxAH4ACH5yAClvcmcub3BlbnNlYXJjaC5zcWwuZGF0YS50eXBlLkV4cHJDb3JlVHlwZQ" + + "AAAAAAAAAAEgAAeHIADmphdmEubGFuZy5FbnVtAAAAAAAAAAASAAB4cHQACVRJTUVTVEFNUHEAfgAI,rO0ABXNyAC" + + "1vcmcub3BlbnNlYXJjaC5zcWwuZXhwcmVzc2lvbi5OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWFzdAA" + + "STGphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZWdhdGVkdAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0V4" + + "cHJlc3Npb247TAAEbmFtZXEAfgABeHBwc3IAMW9yZy5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5jZ" + + "UV4cHJlc3Npb274AO0rxWvMkAIAA0wABGF0dHJxAH4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5cG" + + "V0ACdMb3JnL29wZW5zZWFyY2gvc3FsL2RhdGEvdHlwZS9FeHByVHlwZTt4cHQABG51bTRzcgAaamF2YS51dGlsLkF" + + "ycmF5cyRBcnJheUxpc3TZpDy+zYgG0gIAAVsAAWF0ABNbTGphdmEvbGFuZy9PYmplY3Q7eHB1cgATW0xqYXZhLmxh" + + "bmcuU3RyaW5nO63SVufpHXtHAgAAeHAAAAABcQB+AAh+cgApb3JnLm9wZW5zZWFyY2guc3FsLmRhdGEudHlwZS5Fe" + + "HByQ29yZVR5cGUAAAAAAAAAABIAAHhyAA5qYXZhLmxhbmcuRW51bQAAAAAAAAAAEgAAeHB0AAZET1VCTEVxAH4ACA" + + "==,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5zcWwuZXhwcmVzc2lvbi5OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIAA0" + + "wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZWdhdGVkdAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9leHB" + + "yZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZXEAfgABeHBwc3IAMW9yZy5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW9u" + + "LlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWvMkAIAA0wABGF0dHJxAH4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9Ma" + + "XN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFyY2gvc3FsL2RhdGEvdHlwZS9FeHByVHlwZTt4cHQABWJvb2wxc3IAGm" + + "phdmEudXRpbC5BcnJheXMkQXJyYXlMaXN02aQ8vs2IBtICAAFbAAFhdAATW0xqYXZhL2xhbmcvT2JqZWN0O3hwdXI" + + "AE1tMamF2YS5sYW5nLlN0cmluZzut0lbn6R17RwIAAHhwAAAAAXEAfgAIfnIAKW9yZy5vcGVuc2VhcmNoLnNxbC5k" + + "YXRhLnR5cGUuRXhwckNvcmVUeXBlAAAAAAAAAAASAAB4cgAOamF2YS5sYW5nLkVudW0AAAAAAAAAABIAAHhwdAAHQ" + + "k9PTEVBTnEAfgAI,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5zcWwuZXhwcmVzc2lvbi5OYW1lZEV4cHJlc3Npb274h" + + "hKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZWdhdGVkdAAqTG9yZy9vcGVuc2Vhcm" + + "NoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZXEAfgABeHBwc3IAMW9yZy5vcGVuc2VhcmNoLnNxbC5" + + "leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWvMkAIAA0wABGF0dHJxAH4AAUwABXBhdGhzdAAQTGph" + + "dmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFyY2gvc3FsL2RhdGEvdHlwZS9FeHByVHlwZTt4cHQAA" + + "2tleXNyABpqYXZhLnV0aWwuQXJyYXlzJEFycmF5TGlzdNmkPL7NiAbSAgABWwABYXQAE1tMamF2YS9sYW5nL09iam" + + "VjdDt4cHVyABNbTGphdmEubGFuZy5TdHJpbmc7rdJW5+kde0cCAAB4cAAAAAFxAH4ACH5yAClvcmcub3BlbnNlYXJ" + + "jaC5zcWwuZGF0YS50eXBlLkV4cHJDb3JlVHlwZQAAAAAAAAAAEgAAeHIADmphdmEubGFuZy5FbnVtAAAAAAAAAAAS" + + "AAB4cHQABlNUUklOR3EAfgAI,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5zcWwuZXhwcmVzc2lvbi5OYW1lZEV4cHJl" + + "c3Npb274hhKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZWdhdGVkdAAqTG9yZy9vc" + + "GVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZXEAfgABeHBwc3IAMW9yZy5vcGVuc2Vhcm" + + "NoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWvMkAIAA0wABGF0dHJxAH4AAUwABXBhdGh" + + "zdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFyY2gvc3FsL2RhdGEvdHlwZS9FeHByVHlw" + + "ZTt4cHQABG51bTNzcgAaamF2YS51dGlsLkFycmF5cyRBcnJheUxpc3TZpDy+zYgG0gIAAVsAAWF0ABNbTGphdmEvb" + + "GFuZy9PYmplY3Q7eHB1cgATW0xqYXZhLmxhbmcuU3RyaW5nO63SVufpHXtHAgAAeHAAAAABcQB+AAh+cgApb3JnLm" + + "9wZW5zZWFyY2guc3FsLmRhdGEudHlwZS5FeHByQ29yZVR5cGUAAAAAAAAAABIAAHhyAA5qYXZhLmxhbmcuRW51bQA" + + "AAAAAAAAAEgAAeHB0AAZET1VCTEVxAH4ACA==,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5zcWwuZXhwcmVzc2lvbi5" + + "OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZWdhdGVk" + + "dAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZXEAfgABeHBwc3IAMW9yZ" + + "y5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWvMkAIAA0wABGF0dHJxAH" + + "4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFyY2gvc3FsL2RhdGEvdHl" + + "wZS9FeHByVHlwZTt4cHQABWJvb2wwc3IAGmphdmEudXRpbC5BcnJheXMkQXJyYXlMaXN02aQ8vs2IBtICAAFbAAFh" + + "dAATW0xqYXZhL2xhbmcvT2JqZWN0O3hwdXIAE1tMamF2YS5sYW5nLlN0cmluZzut0lbn6R17RwIAAHhwAAAAAXEAf" + + "gAIfnIAKW9yZy5vcGVuc2VhcmNoLnNxbC5kYXRhLnR5cGUuRXhwckNvcmVUeXBlAAAAAAAAAAASAAB4cgAOamF2YS" + + "5sYW5nLkVudW0AAAAAAAAAABIAAHhwdAAHQk9PTEVBTnEAfgAI,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5zcWwuZX" + + "hwcmVzc2lvbi5OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpbmc7TAA" + + "JZGVsZWdhdGVkdAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZXEAfgAB" + + "eHBwc3IAMW9yZy5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWvMkAIAA" + + "0wABGF0dHJxAH4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFyY2gvc3" + + "FsL2RhdGEvdHlwZS9FeHByVHlwZTt4cHQABG51bTJzcgAaamF2YS51dGlsLkFycmF5cyRBcnJheUxpc3TZpDy+zYg" + + "G0gIAAVsAAWF0ABNbTGphdmEvbGFuZy9PYmplY3Q7eHB1cgATW0xqYXZhLmxhbmcuU3RyaW5nO63SVufpHXtHAgAA" + + "eHAAAAABcQB+AAh+cgApb3JnLm9wZW5zZWFyY2guc3FsLmRhdGEudHlwZS5FeHByQ29yZVR5cGUAAAAAAAAAABIAA" + + "HhyAA5qYXZhLmxhbmcuRW51bQAAAAAAAAAAEgAAeHB0AAZET1VCTEVxAH4ACA==,rO0ABXNyAC1vcmcub3BlbnNlY" + + "XJjaC5zcWwuZXhwcmVzc2lvbi5OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy" + + "9TdHJpbmc7TAAJZGVsZWdhdGVkdAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAA" + + "EbmFtZXEAfgABeHBwc3IAMW9yZy5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274" + + "AO0rxWvMkAIAA0wABGF0dHJxAH4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZ" + + "W5zZWFyY2gvc3FsL2RhdGEvdHlwZS9FeHByVHlwZTt4cHQABHN0cjBzcgAaamF2YS51dGlsLkFycmF5cyRBcnJheU" + + "xpc3TZpDy+zYgG0gIAAVsAAWF0ABNbTGphdmEvbGFuZy9PYmplY3Q7eHB1cgATW0xqYXZhLmxhbmcuU3RyaW5nO63" + + "SVufpHXtHAgAAeHAAAAABcQB+AAh+cgApb3JnLm9wZW5zZWFyY2guc3FsLmRhdGEudHlwZS5FeHByQ29yZVR5cGUA" + + "AAAAAAAAABIAAHhyAA5qYXZhLmxhbmcuRW51bQAAAAAAAAAAEgAAeHB0AAZTVFJJTkdxAH4ACA==,rO0ABXNyAC1v" + + "cmcub3BlbnNlYXJjaC5zcWwuZXhwcmVzc2lvbi5OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWFzdAAST" + + "GphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZWdhdGVkdAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0V4cH" + + "Jlc3Npb247TAAEbmFtZXEAfgABeHBwc3IAMW9yZy5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5jZUV" + + "4cHJlc3Npb274AO0rxWvMkAIAA0wABGF0dHJxAH4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5cGV0" + + "ACdMb3JnL29wZW5zZWFyY2gvc3FsL2RhdGEvdHlwZS9FeHByVHlwZTt4cHQABWRhdGUzc3IAGmphdmEudXRpbC5Bc" + + "nJheXMkQXJyYXlMaXN02aQ8vs2IBtICAAFbAAFhdAATW0xqYXZhL2xhbmcvT2JqZWN0O3hwdXIAE1tMamF2YS5sYW" + + "5nLlN0cmluZzut0lbn6R17RwIAAHhwAAAAAXEAfgAIfnIAKW9yZy5vcGVuc2VhcmNoLnNxbC5kYXRhLnR5cGUuRXh" + + "wckNvcmVUeXBlAAAAAAAAAAASAAB4cgAOamF2YS5sYW5nLkVudW0AAAAAAAAAABIAAHhwdAAJVElNRVNUQU1QcQB+" + + "AAg=,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5zcWwuZXhwcmVzc2lvbi5OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIA" + + "A0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZWdhdGVkdAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9le" + + "HByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZXEAfgABeHBwc3IAMW9yZy5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW" + + "9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWvMkAIAA0wABGF0dHJxAH4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9" + + "MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFyY2gvc3FsL2RhdGEvdHlwZS9FeHByVHlwZTt4cHQABWRhdGUyc3IA" + + "GmphdmEudXRpbC5BcnJheXMkQXJyYXlMaXN02aQ8vs2IBtICAAFbAAFhdAATW0xqYXZhL2xhbmcvT2JqZWN0O3hwd" + + "XIAE1tMamF2YS5sYW5nLlN0cmluZzut0lbn6R17RwIAAHhwAAAAAXEAfgAIfnIAKW9yZy5vcGVuc2VhcmNoLnNxbC" + + "5kYXRhLnR5cGUuRXhwckNvcmVUeXBlAAAAAAAAAAASAAB4cgAOamF2YS5sYW5nLkVudW0AAAAAAAAAABIAAHhwdAA" + + "JVElNRVNUQU1QcQB+AAg=,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5zcWwuZXhwcmVzc2lvbi5OYW1lZEV4cHJlc3N" + + "pb274hhKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZWdhdGVkdAAqTG9yZy9vcGVu" + + "c2VhcmNoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZXEAfgABeHBwc3IAMW9yZy5vcGVuc2VhcmNoL" + + "nNxbC5leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWvMkAIAA0wABGF0dHJxAH4AAUwABXBhdGhzdA" + + "AQTGphdmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFyY2gvc3FsL2RhdGEvdHlwZS9FeHByVHlwZTt" + + "4cHQABWRhdGUxc3IAGmphdmEudXRpbC5BcnJheXMkQXJyYXlMaXN02aQ8vs2IBtICAAFbAAFhdAATW0xqYXZhL2xh" + + "bmcvT2JqZWN0O3hwdXIAE1tMamF2YS5sYW5nLlN0cmluZzut0lbn6R17RwIAAHhwAAAAAXEAfgAIfnIAKW9yZy5vc" + + "GVuc2VhcmNoLnNxbC5kYXRhLnR5cGUuRXhwckNvcmVUeXBlAAAAAAAAAAASAAB4cgAOamF2YS5sYW5nLkVudW0AAA" + + "AAAAAAABIAAHhwdAAJVElNRVNUQU1QcQB+AAg=,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5zcWwuZXhwcmVzc2lvbi" + + "5OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpbmc7TAAJZGVsZWdhdGV" + + "kdAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZXEAfgABeHBwc3IAMW9y" + + "Zy5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWvMkAIAA0wABGF0dHJxA" + + "H4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFyY2gvc3FsL2RhdGEvdH" + + "lwZS9FeHByVHlwZTt4cHQABWRhdGUwc3IAGmphdmEudXRpbC5BcnJheXMkQXJyYXlMaXN02aQ8vs2IBtICAAFbAAF" + + "hdAATW0xqYXZhL2xhbmcvT2JqZWN0O3hwdXIAE1tMamF2YS5sYW5nLlN0cmluZzut0lbn6R17RwIAAHhwAAAAAXEA" + + "fgAIfnIAKW9yZy5vcGVuc2VhcmNoLnNxbC5kYXRhLnR5cGUuRXhwckNvcmVUeXBlAAAAAAAAAAASAAB4cgAOamF2Y" + + "S5sYW5nLkVudW0AAAAAAAAAABIAAHhwdAAJVElNRVNUQU1QcQB+AAg=,rO0ABXNyAC1vcmcub3BlbnNlYXJjaC5zc" + + "WwuZXhwcmVzc2lvbi5OYW1lZEV4cHJlc3Npb274hhKW/q2YQQIAA0wABWFsaWFzdAASTGphdmEvbGFuZy9TdHJpbm" + + "c7TAAJZGVsZWdhdGVkdAAqTG9yZy9vcGVuc2VhcmNoL3NxbC9leHByZXNzaW9uL0V4cHJlc3Npb247TAAEbmFtZXE" + + "AfgABeHBwc3IAMW9yZy5vcGVuc2VhcmNoLnNxbC5leHByZXNzaW9uLlJlZmVyZW5jZUV4cHJlc3Npb274AO0rxWvM" + + "kAIAA0wABGF0dHJxAH4AAUwABXBhdGhzdAAQTGphdmEvdXRpbC9MaXN0O0wABHR5cGV0ACdMb3JnL29wZW5zZWFyY" + + "2gvc3FsL2RhdGEvdHlwZS9FeHByVHlwZTt4cHQAA3p6enNyABpqYXZhLnV0aWwuQXJyYXlzJEFycmF5TGlzdNmkPL" + + "7NiAbSAgABWwABYXQAE1tMamF2YS9sYW5nL09iamVjdDt4cHVyABNbTGphdmEubGFuZy5TdHJpbmc7rdJW5+kde0c" + + "CAAB4cAAAAAFxAH4ACH5yAClvcmcub3BlbnNlYXJjaC5zcWwuZGF0YS50eXBlLkV4cHJDb3JlVHlwZQAAAAAAAAAA" + + "EgAAeHIADmphdmEubGFuZy5FbnVtAAAAAAAAAAASAAB4cHQABlNUUklOR3EAfgAI),(OpenSearchPagedIndexSc" + + "an,calcs,FGluY2x1ZGVfY29udGV4dF91dWlkDXF1ZXJ5QW5kRmV0Y2gBFndYQmJZcHpxU3dtc1hUVkhhYU1uLVEA" + + "AAAAAAAADRY4RzRudHZqbFI0dTBFdkJNZEpCaDd3)))"; + + private static final String testIndexName = "dummyIndex"; + private static final String testScroll = "dummyScroll"; + + @BeforeEach + void setUp() { + storageEngine = mock(StorageEngine.class); + when(storageEngine.getTableScan(anyString(), anyString())) + .thenReturn(new MockedTableScanOperator()); + planCache = new PaginatedPlanCache(storageEngine); + } + + @Test + void canConvertToCursor_relation() { + assertTrue(planCache.canConvertToCursor(AstDSL.relation("Table"))); + } + + @Test + void canConvertToCursor_project_allFields_relation() { + var unresolvedPlan = AstDSL.project(AstDSL.relation("table"), AstDSL.allFields()); + assertTrue(planCache.canConvertToCursor(unresolvedPlan)); + } + + @Test + void canConvertToCursor_project_some_fields_relation() { + var unresolvedPlan = AstDSL.project(AstDSL.relation("table"), AstDSL.field("rando")); + Assertions.assertFalse(planCache.canConvertToCursor(unresolvedPlan)); + } + + @ParameterizedTest + @ValueSource(strings = {"pewpew", "asdkfhashdfjkgakgfwuigfaijkb", testCursor}) + void compress_decompress(String input) { + var compressed = compress(input); + assertEquals(input, decompress(compressed)); + if (input.length() > 200) { + // Compression of short strings isn't profitable, because encoding into string and gzip + // headers add more bytes than input string has. + assertTrue(compressed.length() < input.length()); + } + } + + @Test + // should never happen actually, at least for compress + void compress_decompress_null_or_empty_string() { + assertAll( + () -> assertTrue(compress(null).isEmpty()), + () -> assertTrue(compress("").isEmpty()), + () -> assertTrue(decompress(null).isEmpty()), + () -> assertTrue(decompress("").isEmpty()) + ); + } + + @Test + // test added for coverage only + void compress_throws() { + var mock = Mockito.mockConstructionWithAnswer(GZIPOutputStream.class, invocation -> null); + assertThrows(Throwable.class, () -> compress("\\_(`v`)_/")); + mock.close(); + } + + @Test + void decompress_throws() { + assertAll( + // from gzip - damaged header + () -> assertThrows(Throwable.class, () -> decompress("00")), + // from HashCode::fromString + () -> assertThrows(Throwable.class, () -> decompress("000")) + ); + } + + @Test + @SneakyThrows + void convert_deconvert_cursor() { + var cursor = buildCursor(Map.of()); + var plan = planCache.convertToPlan(cursor); + // `PaginateOperator::toCursor` shifts cursor to the next page. To have this test consistent + // we have to enforce it staying on the same page. This allows us to get same cursor strings. + var pageNum = (int)FieldUtils.readField(plan, "pageIndex", true); + FieldUtils.writeField(plan, "pageIndex", pageNum - 1, true); + var convertedCursor = planCache.convertToCursor(plan).toString(); + // Then we have to restore page num into the plan, otherwise comparison would fail due to this. + FieldUtils.writeField(plan, "pageIndex", pageNum, true); + var convertedPlan = planCache.convertToPlan(convertedCursor); + assertEquals(cursor, convertedCursor); + // TODO compare plans + } + + @Test + @SneakyThrows + void convertToCursor_cant_convert() { + var plan = mock(MockedTableScanOperator.class); + assertEquals(Cursor.None, planCache.convertToCursor(plan)); + when(plan.toCursor()).thenReturn(""); + assertEquals(Cursor.None, planCache.convertToCursor( + new PaginateOperator(plan, 1, 2))); + } + + @Test + void converted_plan_is_executable() { + // planCache.convertToPlan(buildCursor(Map.of())); + var plan = planCache.convertToPlan("n:" + compress(testCursor)); + // TODO + } + + @ParameterizedTest + @MethodSource("generateIncorrectCursors") + void throws_on_parsing_damaged_cursor(String cursor) { + assertThrows(Throwable.class, () -> planCache.convertToPlan(cursor)); + } + + private static Stream generateIncorrectCursors() { + return Stream.of( + compress(testCursor), // a valid cursor, but without "n:" prefix + "n:" + testCursor, // a valid, but uncompressed cursor + buildCursor(Map.of("prefix", "g:")), // incorrect prefix + buildCursor(Map.of("header: paginate", "ORDER BY")), // incorrect header + buildCursor(Map.of("pageIndex", "")), // incorrect page # + buildCursor(Map.of("pageIndex", "abc")), // incorrect page # + buildCursor(Map.of("pageSize", "abc")), // incorrect page size + buildCursor(Map.of("pageSize", "null")), // incorrect page size + buildCursor(Map.of("pageSize", "10 ")), // incorrect page size + buildCursor(Map.of("header: project", "")), // incorrect header + buildCursor(Map.of("header: namedParseExpressions", "ololo")), // incorrect header + buildCursor(Map.of("namedParseExpressions", "pewpew")), // incorrect (unparsable) npes + buildCursor(Map.of("namedParseExpressions", "rO0ABXA=,")), // incorrect npes (extra comma) + buildCursor(Map.of("header: projectList", "")), // incorrect header + buildCursor(Map.of("projectList", "\0\0\0\0")), // incorrect project + buildCursor(Map.of("header: OpenSearchPagedIndexScan", "42")) // incorrect header + ).map(Arguments::of); + } + + + /** + * Function puts default valid values into generated cursor string. + * Values could be redefined. + * @param values A map of non-default values to use. + * @return A compressed cursor string. + */ + public static String buildCursor(Map values) { + String prefix = values.getOrDefault("prefix", "n:"); + String headerPaginate = values.getOrDefault("header: paginate", "Paginate"); + String pageIndex = values.getOrDefault("pageIndex", "1"); + String pageSize = values.getOrDefault("pageSize", "2"); + String headerProject = values.getOrDefault("header: project", "Project"); + String headerNpes = values.getOrDefault("header: namedParseExpressions", + "namedParseExpressions"); + String namedParseExpressions = values.getOrDefault("namedParseExpressions", ""); + String headerProjectList = values.getOrDefault("header: projectList", "projectList"); + String projectList = values.getOrDefault("projectList", "rO0ABXA="); // serialized `null` + String headerOspis = values.getOrDefault("header: OpenSearchPagedIndexScan", + "OpenSearchPagedIndexScan"); + String indexName = values.getOrDefault("indexName", testIndexName); + String scrollId = values.getOrDefault("scrollId", testScroll); + var cursor = String.format("(%s,%s,%s,(%s,(%s,%s),(%s,%s),(%s,%s,%s)))", headerPaginate, + pageIndex, pageSize, headerProject, headerNpes, namedParseExpressions, headerProjectList, + projectList, headerOspis, indexName, scrollId); + return prefix + compress(cursor); + } + + private static class MockedTableScanOperator extends TableScanOperator { + @Override + public boolean hasNext() { + return false; + } + + @Override + public ExprValue next() { + return null; + } + + @Override + public String explain() { + return null; + } + + @Override + public String toCursor() { + return createSection("OpenSearchPagedIndexScan", testIndexName, testScroll); + } + } + + @SneakyThrows + private static String compress(String input) { + return new PaginatedPlanCache(null).compress(input); + } + + @SneakyThrows + private static String decompress(String input) { + return new PaginatedPlanCache(null).decompress(input); + } +} diff --git a/core/src/test/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecutionTest.java b/core/src/test/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecutionTest.java index 1a2b6e3f2a..ceb53b756a 100644 --- a/core/src/test/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecutionTest.java +++ b/core/src/test/java/org/opensearch/sql/executor/streaming/MicroBatchStreamingExecutionTest.java @@ -26,6 +26,7 @@ import org.opensearch.sql.common.response.ResponseListener; import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.executor.QueryService; +import org.opensearch.sql.executor.pagination.Cursor; import org.opensearch.sql.planner.PlanContext; import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.storage.split.Split; @@ -169,7 +170,8 @@ Helper executeSuccess(Long... offsets) { ResponseListener listener = invocation.getArgument(2); listener.onResponse( - new ExecutionEngine.QueryResponse(null, Collections.emptyList())); + new ExecutionEngine.QueryResponse(null, Collections.emptyList(), 0, + Cursor.None)); PlanContext planContext = invocation.getArgument(1); assertTrue(planContext.getSplit().isPresent()); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializerTest.java b/core/src/test/java/org/opensearch/sql/expression/serialization/DefaultExpressionSerializerTest.java similarity index 94% rename from opensearch/src/test/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializerTest.java rename to core/src/test/java/org/opensearch/sql/expression/serialization/DefaultExpressionSerializerTest.java index 72a319dbfe..53a89d5421 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/serialization/DefaultExpressionSerializerTest.java +++ b/core/src/test/java/org/opensearch/sql/expression/serialization/DefaultExpressionSerializerTest.java @@ -21,6 +21,8 @@ import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.ExpressionNodeVisitor; import org.opensearch.sql.expression.env.Environment; +import org.opensearch.sql.expression.serialization.DefaultExpressionSerializer; +import org.opensearch.sql.expression.serialization.ExpressionSerializer; @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class DefaultExpressionSerializerTest { diff --git a/core/src/test/java/org/opensearch/sql/planner/DefaultImplementorTest.java b/core/src/test/java/org/opensearch/sql/planner/DefaultImplementorTest.java index 017cfb60ea..da3f5315e4 100644 --- a/core/src/test/java/org/opensearch/sql/planner/DefaultImplementorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/DefaultImplementorTest.java @@ -33,6 +33,8 @@ import java.util.Map; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; @@ -50,9 +52,11 @@ import org.opensearch.sql.expression.aggregation.NamedAggregator; import org.opensearch.sql.expression.window.WindowDefinition; import org.opensearch.sql.expression.window.ranking.RowNumberFunction; +import org.opensearch.sql.planner.logical.LogicalPaginate; import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.planner.logical.LogicalPlanDSL; import org.opensearch.sql.planner.logical.LogicalRelation; +import org.opensearch.sql.planner.physical.PaginateOperator; import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.planner.physical.PhysicalPlanDSL; import org.opensearch.sql.storage.Table; @@ -62,24 +66,16 @@ import org.opensearch.sql.storage.write.TableWriteOperator; @ExtendWith(MockitoExtension.class) +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class DefaultImplementorTest { - @Mock - private Expression filter; - - @Mock - private NamedAggregator aggregator; - - @Mock - private NamedExpression groupBy; - @Mock private Table table; private final DefaultImplementor implementor = new DefaultImplementor<>(); @Test - public void visitShouldReturnDefaultPhysicalOperator() { + public void visit_should_return_default_physical_operator() { String indexName = "test"; NamedExpression include = named("age", ref("age", INTEGER)); ReferenceExpression exclude = ref("name", STRING); @@ -157,14 +153,14 @@ public void visitShouldReturnDefaultPhysicalOperator() { } @Test - public void visitRelationShouldThrowException() { + public void visitRelation_should_throw_an_exception() { assertThrows(UnsupportedOperationException.class, () -> new LogicalRelation("test", table).accept(implementor, null)); } @SuppressWarnings({"rawtypes", "unchecked"}) @Test - public void visitWindowOperatorShouldReturnPhysicalWindowOperator() { + public void visitWindowOperator_should_return_PhysicalWindowOperator() { NamedExpression windowFunction = named(new RowNumberFunction()); WindowDefinition windowDefinition = new WindowDefinition( Collections.singletonList(ref("state", STRING)), @@ -204,7 +200,7 @@ public void visitWindowOperatorShouldReturnPhysicalWindowOperator() { } @Test - public void visitTableScanBuilderShouldBuildTableScanOperator() { + public void visitTableScanBuilder_should_build_TableScanOperator() { TableScanOperator tableScanOperator = Mockito.mock(TableScanOperator.class); TableScanBuilder tableScanBuilder = new TableScanBuilder() { @Override @@ -216,7 +212,7 @@ public TableScanOperator build() { } @Test - public void visitTableWriteBuilderShouldBuildTableWriteOperator() { + public void visitTableWriteBuilder_should_build_TableWriteOperator() { LogicalPlan child = values(); TableWriteOperator tableWriteOperator = Mockito.mock(TableWriteOperator.class); TableWriteBuilder logicalPlan = new TableWriteBuilder(child) { @@ -227,4 +223,11 @@ public TableWriteOperator build(PhysicalPlan child) { }; assertEquals(tableWriteOperator, logicalPlan.accept(implementor, null)); } + + @Test + public void visitPaginate_should_build_PaginateOperator_and_keep_page_size() { + var paginate = new LogicalPaginate(42, List.of(values())); + var plan = paginate.accept(implementor, null); + assertEquals(paginate.getPageSize(), ((PaginateOperator) plan).getPageSize()); + } } diff --git a/core/src/test/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitorTest.java b/core/src/test/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitorTest.java index 341bcbc29e..c9d74fa871 100644 --- a/core/src/test/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/logical/LogicalPlanNodeVisitorTest.java @@ -8,21 +8,23 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; +import static org.mockito.Mockito.mock; import static org.opensearch.sql.expression.DSL.named; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.Collections; -import java.util.HashMap; +import java.util.List; import java.util.Map; -import java.util.stream.Collectors; +import java.util.stream.Stream; import org.apache.commons.lang3.tuple.Pair; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; -import org.opensearch.sql.ast.expression.DataType; -import org.opensearch.sql.ast.expression.Literal; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; import org.opensearch.sql.ast.tree.RareTopN.CommandType; import org.opensearch.sql.ast.tree.Sort.SortOption; import org.opensearch.sql.data.model.ExprValueUtils; @@ -42,20 +44,24 @@ /** * Todo. Temporary added for UT coverage, Will be removed. */ -@ExtendWith(MockitoExtension.class) +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class LogicalPlanNodeVisitorTest { - @Mock - Expression expression; - @Mock - ReferenceExpression ref; - @Mock - Aggregator aggregator; - @Mock - Table table; + static Expression expression; + static ReferenceExpression ref; + static Aggregator aggregator; + static Table table; + + @BeforeAll + private static void initMocks() { + expression = mock(Expression.class); + ref = mock(ReferenceExpression.class); + aggregator = mock(Aggregator.class); + table = mock(Table.class); + } @Test - public void logicalPlanShouldTraversable() { + public void logical_plan_should_be_traversable() { LogicalPlan logicalPlan = LogicalPlanDSL.rename( LogicalPlanDSL.aggregation( @@ -72,119 +78,57 @@ public void logicalPlanShouldTraversable() { assertEquals(5, result); } - @Test - public void testAbstractPlanNodeVisitorShouldReturnNull() { + @SuppressWarnings("unchecked") + private static Stream getLogicalPlansForVisitorTest() { LogicalPlan relation = LogicalPlanDSL.relation("schema", table); - assertNull(relation.accept(new LogicalPlanNodeVisitor() { - }, null)); - LogicalPlan tableScanBuilder = new TableScanBuilder() { @Override public TableScanOperator build() { return null; } }; - assertNull(tableScanBuilder.accept(new LogicalPlanNodeVisitor() { - }, null)); - - LogicalPlan write = LogicalPlanDSL.write(null, table, Collections.emptyList()); - assertNull(write.accept(new LogicalPlanNodeVisitor() { - }, null)); - TableWriteBuilder tableWriteBuilder = new TableWriteBuilder(null) { @Override public TableWriteOperator build(PhysicalPlan child) { return null; } }; - assertNull(tableWriteBuilder.accept(new LogicalPlanNodeVisitor() { - }, null)); - + LogicalPlan write = LogicalPlanDSL.write(null, table, Collections.emptyList()); LogicalPlan filter = LogicalPlanDSL.filter(relation, expression); - assertNull(filter.accept(new LogicalPlanNodeVisitor() { - }, null)); - - LogicalPlan aggregation = - LogicalPlanDSL.aggregation( - filter, ImmutableList.of(DSL.named("avg", aggregator)), ImmutableList.of(DSL.named( - "group", expression))); - assertNull(aggregation.accept(new LogicalPlanNodeVisitor() { - }, null)); - + LogicalPlan aggregation = LogicalPlanDSL.aggregation( + filter, ImmutableList.of(DSL.named("avg", aggregator)), ImmutableList.of(DSL.named( + "group", expression))); LogicalPlan rename = LogicalPlanDSL.rename(aggregation, ImmutableMap.of(ref, ref)); - assertNull(rename.accept(new LogicalPlanNodeVisitor() { - }, null)); - LogicalPlan project = LogicalPlanDSL.project(relation, named("ref", ref)); - assertNull(project.accept(new LogicalPlanNodeVisitor() { - }, null)); - LogicalPlan remove = LogicalPlanDSL.remove(relation, ref); - assertNull(remove.accept(new LogicalPlanNodeVisitor() { - }, null)); - LogicalPlan eval = LogicalPlanDSL.eval(relation, Pair.of(ref, expression)); - assertNull(eval.accept(new LogicalPlanNodeVisitor() { - }, null)); - - LogicalPlan sort = LogicalPlanDSL.sort(relation, - Pair.of(SortOption.DEFAULT_ASC, expression)); - assertNull(sort.accept(new LogicalPlanNodeVisitor() { - }, null)); - + LogicalPlan sort = LogicalPlanDSL.sort(relation, Pair.of(SortOption.DEFAULT_ASC, expression)); LogicalPlan dedup = LogicalPlanDSL.dedupe(relation, 1, false, false, expression); - assertNull(dedup.accept(new LogicalPlanNodeVisitor() { - }, null)); - LogicalPlan window = LogicalPlanDSL.window(relation, named(expression), new WindowDefinition( ImmutableList.of(ref), ImmutableList.of(Pair.of(SortOption.DEFAULT_ASC, expression)))); - assertNull(window.accept(new LogicalPlanNodeVisitor() { - }, null)); - LogicalPlan rareTopN = LogicalPlanDSL.rareTopN( relation, CommandType.TOP, ImmutableList.of(expression), expression); - assertNull(rareTopN.accept(new LogicalPlanNodeVisitor() { - }, null)); - - Map args = new HashMap<>(); LogicalPlan highlight = new LogicalHighlight(filter, - new LiteralExpression(ExprValueUtils.stringValue("fieldA")), args); - assertNull(highlight.accept(new LogicalPlanNodeVisitor() { - }, null)); - - LogicalPlan mlCommons = new LogicalMLCommons(LogicalPlanDSL.relation("schema", table), - "kmeans", - ImmutableMap.builder() - .put("centroids", new Literal(3, DataType.INTEGER)) - .put("iterations", new Literal(3, DataType.DOUBLE)) - .put("distance_type", new Literal(null, DataType.STRING)) - .build()); - assertNull(mlCommons.accept(new LogicalPlanNodeVisitor() { - }, null)); - - LogicalPlan ad = new LogicalAD(LogicalPlanDSL.relation("schema", table), - new HashMap() {{ - put("shingle_size", new Literal(8, DataType.INTEGER)); - put("time_decay", new Literal(0.0001, DataType.DOUBLE)); - put("time_field", new Literal(null, DataType.STRING)); - } - }); - assertNull(ad.accept(new LogicalPlanNodeVisitor() { - }, null)); + new LiteralExpression(ExprValueUtils.stringValue("fieldA")), Map.of()); + LogicalPlan mlCommons = new LogicalMLCommons(relation, "kmeans", Map.of()); + LogicalPlan ad = new LogicalAD(relation, Map.of()); + LogicalPlan ml = new LogicalML(relation, Map.of()); + LogicalPlan paginate = new LogicalPaginate(42, List.of(relation)); + + return Stream.of( + relation, tableScanBuilder, write, tableWriteBuilder, filter, aggregation, rename, project, + remove, eval, sort, dedup, window, rareTopN, highlight, mlCommons, ad, ml, paginate + ).map(Arguments::of); + } - LogicalPlan ml = new LogicalML(LogicalPlanDSL.relation("schema", table), - new HashMap() {{ - put("action", new Literal("train", DataType.STRING)); - put("algorithm", new Literal("rcf", DataType.STRING)); - put("shingle_size", new Literal(8, DataType.INTEGER)); - put("time_decay", new Literal(0.0001, DataType.DOUBLE)); - put("time_field", new Literal(null, DataType.STRING)); - } - }); - assertNull(ml.accept(new LogicalPlanNodeVisitor() { + @ParameterizedTest + @MethodSource("getLogicalPlansForVisitorTest") + public void abstract_plan_node_visitor_should_return_null(LogicalPlan plan) { + assertNull(plan.accept(new LogicalPlanNodeVisitor() { }, null)); } + private static class NodesCount extends LogicalPlanNodeVisitor { @Override public Integer visitRelation(LogicalRelation plan, Object context) { @@ -195,32 +139,28 @@ public Integer visitRelation(LogicalRelation plan, Object context) { public Integer visitFilter(LogicalFilter plan, Object context) { return 1 + plan.getChild().stream() - .map(child -> child.accept(this, context)) - .collect(Collectors.summingInt(Integer::intValue)); + .map(child -> child.accept(this, context)).mapToInt(Integer::intValue).sum(); } @Override public Integer visitAggregation(LogicalAggregation plan, Object context) { return 1 + plan.getChild().stream() - .map(child -> child.accept(this, context)) - .collect(Collectors.summingInt(Integer::intValue)); + .map(child -> child.accept(this, context)).mapToInt(Integer::intValue).sum(); } @Override public Integer visitRename(LogicalRename plan, Object context) { return 1 + plan.getChild().stream() - .map(child -> child.accept(this, context)) - .collect(Collectors.summingInt(Integer::intValue)); + .map(child -> child.accept(this, context)).mapToInt(Integer::intValue).sum(); } @Override public Integer visitRareTopN(LogicalRareTopN plan, Object context) { return 1 + plan.getChild().stream() - .map(child -> child.accept(this, context)) - .collect(Collectors.summingInt(Integer::intValue)); + .map(child -> child.accept(this, context)).mapToInt(Integer::intValue).sum(); } } } diff --git a/core/src/test/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizerTest.java b/core/src/test/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizerTest.java index 7516aa1809..1ee9b9aa3b 100644 --- a/core/src/test/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizerTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/optimizer/LogicalPlanOptimizerTest.java @@ -7,8 +7,11 @@ package org.opensearch.sql.planner.optimizer; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.Mockito.lenient; import static org.mockito.Mockito.when; import static org.opensearch.sql.data.model.ExprValueUtils.integerValue; import static org.opensearch.sql.data.model.ExprValueUtils.longValue; @@ -26,9 +29,12 @@ import com.google.common.collect.ImmutableList; import java.util.Collections; +import java.util.List; import java.util.Map; import org.apache.commons.lang3.tuple.Pair; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; @@ -38,13 +44,16 @@ import org.opensearch.sql.ast.tree.Sort; import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.expression.DSL; +import org.opensearch.sql.planner.logical.LogicalPaginate; import org.opensearch.sql.planner.logical.LogicalPlan; +import org.opensearch.sql.planner.logical.LogicalRelation; import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.storage.Table; import org.opensearch.sql.storage.read.TableScanBuilder; import org.opensearch.sql.storage.write.TableWriteBuilder; @ExtendWith(MockitoExtension.class) +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class LogicalPlanOptimizerTest { @Mock @@ -55,7 +64,7 @@ class LogicalPlanOptimizerTest { @BeforeEach void setUp() { - when(table.createScanBuilder()).thenReturn(tableScanBuilder); + lenient().when(table.createScanBuilder()).thenReturn(tableScanBuilder); } /** @@ -255,7 +264,6 @@ void table_scan_builder_support_highlight_push_down_can_apply_its_rule() { @Test void table_not_support_scan_builder_should_not_be_impact() { - Mockito.reset(table, tableScanBuilder); Table table = new Table() { @Override public Map getFieldTypes() { @@ -276,7 +284,6 @@ public PhysicalPlan implement(LogicalPlan plan) { @Test void table_support_write_builder_should_be_replaced() { - Mockito.reset(table, tableScanBuilder); TableWriteBuilder writeBuilder = Mockito.mock(TableWriteBuilder.class); when(table.createWriteBuilder(any())).thenReturn(writeBuilder); @@ -288,7 +295,6 @@ void table_support_write_builder_should_be_replaced() { @Test void table_not_support_write_builder_should_report_error() { - Mockito.reset(table, tableScanBuilder); Table table = new Table() { @Override public Map getFieldTypes() { @@ -305,6 +311,52 @@ public PhysicalPlan implement(LogicalPlan plan) { () -> table.createWriteBuilder(null)); } + @Test + void paged_table_scan_builder_support_project_push_down_can_apply_its_rule() { + when(tableScanBuilder.pushDownProject(any())).thenReturn(true); + when(table.createPagedScanBuilder(anyInt())).thenReturn(tableScanBuilder); + + var relation = new LogicalRelation("schema", table); + relation.setPageSize(anyInt()); + + assertEquals( + tableScanBuilder, + LogicalPlanOptimizer.paginationCreate().optimize(project(relation)) + ); + } + + @Test + void push_page_size() { + var relation = new LogicalRelation("schema", table); + var paginate = new LogicalPaginate(42, List.of(project(relation))); + assertNull(relation.getPageSize()); + LogicalPlanOptimizer.paginationCreate().optimize(paginate); + assertEquals(42, relation.getPageSize()); + } + + @Test + void push_page_size_noop_if_no_relation() { + var paginate = new LogicalPaginate(42, List.of(project(values()))); + LogicalPlanOptimizer.paginationCreate().optimize(paginate); + } + + @Test + void push_page_size_noop_if_no_sub_plans() { + var paginate = new LogicalPaginate(42, List.of()); + LogicalPlanOptimizer.paginationCreate().optimize(paginate); + } + + @Test + void table_scan_builder_support_offset_push_down_can_apply_its_rule() { + when(table.createPagedScanBuilder(anyInt())).thenReturn(tableScanBuilder); + + var optimized = LogicalPlanOptimizer.paginationCreate() + .optimize(new LogicalPaginate(42, List.of(project(relation("schema", table))))); + // `optimized` structure: LogicalPaginate -> LogicalProject -> TableScanBuilder + // LogicalRelation replaced by a TableScanBuilder instance + assertEquals(tableScanBuilder, optimized.getChild().get(0).getChild().get(0)); + } + private LogicalPlan optimize(LogicalPlan plan) { final LogicalPlanOptimizer optimizer = LogicalPlanOptimizer.create(); final LogicalPlan optimize = optimizer.optimize(plan); diff --git a/core/src/test/java/org/opensearch/sql/planner/optimizer/pattern/PatternsTest.java b/core/src/test/java/org/opensearch/sql/planner/optimizer/pattern/PatternsTest.java index 9f90fd8d05..1fd572e7da 100644 --- a/core/src/test/java/org/opensearch/sql/planner/optimizer/pattern/PatternsTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/optimizer/pattern/PatternsTest.java @@ -6,35 +6,49 @@ package org.opensearch.sql.planner.optimizer.pattern; +import static org.junit.jupiter.api.Assertions.assertAll; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.util.Collections; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.junit.jupiter.MockitoExtension; import org.opensearch.sql.planner.logical.LogicalFilter; +import org.opensearch.sql.planner.logical.LogicalPaginate; import org.opensearch.sql.planner.logical.LogicalPlan; -@ExtendWith(MockitoExtension.class) +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class PatternsTest { - @Mock - LogicalPlan plan; - @Test void source_is_empty() { + var plan = mock(LogicalPlan.class); when(plan.getChild()).thenReturn(Collections.emptyList()); - assertFalse(Patterns.source().getFunction().apply(plan).isPresent()); - assertFalse(Patterns.source(null).getProperty().getFunction().apply(plan).isPresent()); + assertAll( + () -> assertFalse(Patterns.source().getFunction().apply(plan).isPresent()), + () -> assertFalse(Patterns.source(null).getProperty().getFunction().apply(plan).isPresent()) + ); } @Test void table_is_empty() { - plan = Mockito.mock(LogicalFilter.class); - assertFalse(Patterns.table().getFunction().apply(plan).isPresent()); - assertFalse(Patterns.writeTable().getFunction().apply(plan).isPresent()); + var plan = mock(LogicalFilter.class); + assertAll( + () -> assertFalse(Patterns.table().getFunction().apply(plan).isPresent()), + () -> assertFalse(Patterns.writeTable().getFunction().apply(plan).isPresent()) + ); + } + + @Test + void pagination() { + assertAll( + () -> assertTrue(Patterns.pagination().getFunction() + .apply(mock(LogicalPaginate.class)).isPresent()), + () -> assertFalse(Patterns.pagination().getFunction() + .apply(mock(LogicalFilter.class)).isPresent()) + ); } } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/FilterOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/FilterOperatorTest.java index 288b4bf661..f541f6a15f 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/FilterOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/FilterOperatorTest.java @@ -17,22 +17,30 @@ import com.google.common.collect.ImmutableMap; import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.AdditionalAnswers; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; +import org.opensearch.sql.data.model.ExprIntegerValue; import org.opensearch.sql.data.model.ExprTupleValue; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.data.model.ExprValueUtils; import org.opensearch.sql.expression.DSL; @ExtendWith(MockitoExtension.class) +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class FilterOperatorTest extends PhysicalPlanTestBase { @Mock private PhysicalPlan inputPlan; @Test - public void filterTest() { + public void filter_test() { FilterOperator plan = new FilterOperator(new TestScan(), DSL.equal(DSL.ref("response", INTEGER), DSL.literal(404))); List result = execute(plan); @@ -41,10 +49,11 @@ public void filterTest() { .tupleValue(ImmutableMap .of("ip", "209.160.24.63", "action", "GET", "response", 404, "referer", "www.amazon.com")))); + assertEquals(1, plan.getTotalHits()); } @Test - public void nullValueShouldBeenIgnored() { + public void null_value_should_been_ignored() { LinkedHashMap value = new LinkedHashMap<>(); value.put("response", LITERAL_NULL); when(inputPlan.hasNext()).thenReturn(true, false); @@ -54,10 +63,11 @@ public void nullValueShouldBeenIgnored() { DSL.equal(DSL.ref("response", INTEGER), DSL.literal(404))); List result = execute(plan); assertEquals(0, result.size()); + assertEquals(0, plan.getTotalHits()); } @Test - public void missingValueShouldBeenIgnored() { + public void missing_value_should_been_ignored() { LinkedHashMap value = new LinkedHashMap<>(); value.put("response", LITERAL_MISSING); when(inputPlan.hasNext()).thenReturn(true, false); @@ -67,5 +77,21 @@ public void missingValueShouldBeenIgnored() { DSL.equal(DSL.ref("response", INTEGER), DSL.literal(404))); List result = execute(plan); assertEquals(0, result.size()); + assertEquals(0, plan.getTotalHits()); + } + + @Test + public void totalHits() { + when(inputPlan.hasNext()).thenReturn(true, true, true, true, true, false); + var answers = Stream.of(200, 240, 300, 403, 404).map(c -> + new ExprTupleValue(new LinkedHashMap<>(Map.of("response", new ExprIntegerValue(c))))) + .collect(Collectors.toList()); + when(inputPlan.next()).thenAnswer(AdditionalAnswers.returnsElementsOf(answers)); + + FilterOperator plan = new FilterOperator(inputPlan, + DSL.less(DSL.ref("response", INTEGER), DSL.literal(400))); + List result = execute(plan); + assertEquals(3, result.size()); + assertEquals(3, plan.getTotalHits()); } } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/PaginateOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/PaginateOperatorTest.java new file mode 100644 index 0000000000..3e0efc3b50 --- /dev/null +++ b/core/src/test/java/org/opensearch/sql/planner/physical/PaginateOperatorTest.java @@ -0,0 +1,104 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + + +package org.opensearch.sql.planner.physical; + +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.CALLS_REAL_METHODS; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.withSettings; +import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; +import static org.opensearch.sql.data.type.ExprCoreType.STRING; +import static org.opensearch.sql.planner.physical.PhysicalPlanDSL.project; + +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; +import org.opensearch.sql.data.model.ExprIntegerValue; +import org.opensearch.sql.expression.DSL; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +public class PaginateOperatorTest extends PhysicalPlanTestBase { + + @Test + public void accept() { + var visitor = new PhysicalPlanNodeVisitor() {}; + assertNull(new PaginateOperator(null, 42).accept(visitor, null)); + } + + @Test + public void hasNext_a_page() { + var plan = mock(PhysicalPlan.class); + when(plan.hasNext()).thenReturn(true); + when(plan.next()).thenReturn(new ExprIntegerValue(42)).thenReturn(null); + var paginate = new PaginateOperator(plan, 1, 1); + assertTrue(paginate.hasNext()); + assertEquals(42, paginate.next().integerValue()); + paginate.next(); + assertFalse(paginate.hasNext()); + assertNull(paginate.next()); + } + + @Test + public void hasNext_no_more_entries() { + var plan = mock(PhysicalPlan.class); + when(plan.hasNext()).thenReturn(false); + var paginate = new PaginateOperator(plan, 1, 1); + assertFalse(paginate.hasNext()); + } + + @Test + public void getChild() { + var plan = mock(PhysicalPlan.class); + var paginate = new PaginateOperator(plan, 1); + assertSame(plan, paginate.getChild().get(0)); + } + + @Test + public void open() { + var plan = mock(PhysicalPlan.class); + doNothing().when(plan).open(); + new PaginateOperator(plan, 1).open(); + verify(plan, times(1)).open(); + } + + @Test + public void schema() { + PhysicalPlan project = project(null, + DSL.named("response", DSL.ref("response", INTEGER)), + DSL.named("action", DSL.ref("action", STRING), "act")); + assertEquals(project.schema(), new PaginateOperator(project, 42).schema()); + } + + @Test + public void schema_assert() { + var plan = mock(PhysicalPlan.class, withSettings().defaultAnswer(CALLS_REAL_METHODS)); + assertThrows(Throwable.class, () -> new PaginateOperator(plan, 42).schema()); + } + + @Test + public void toCursor() { + var plan = mock(PhysicalPlan.class); + when(plan.toCursor()).thenReturn("Great plan, Walter, reliable as a swiss watch!", "", null); + var po = new PaginateOperator(plan, 2); + assertAll( + () -> assertEquals("(Paginate,1,2,Great plan, Walter, reliable as a swiss watch!)", + po.toCursor()), + () -> assertNull(po.toCursor()), + () -> assertNull(po.toCursor()) + ); + } +} diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitorTest.java index 735b914d3e..3dfe0b5c0f 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanNodeVisitorTest.java @@ -158,6 +158,14 @@ public void test_visitML() { assertNull(physicalPlanNodeVisitor.visitML(plan, null)); } + @Test + public void test_visitPaginate() { + PhysicalPlanNodeVisitor physicalPlanNodeVisitor = + new PhysicalPlanNodeVisitor() {}; + + assertNull(physicalPlanNodeVisitor.visitPaginate(new PaginateOperator(plan, 42), null)); + } + public static class PhysicalPlanPrinter extends PhysicalPlanNodeVisitor { public String print(PhysicalPlan node) { diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTest.java index 0a93c96bbb..5e70f2b9d0 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/PhysicalPlanTest.java @@ -5,9 +5,19 @@ package org.opensearch.sql.planner.physical; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.CALLS_REAL_METHODS; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import java.util.List; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; @@ -16,6 +26,7 @@ import org.opensearch.sql.storage.split.Split; @ExtendWith(MockitoExtension.class) +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class PhysicalPlanTest { @Mock Split split; @@ -46,8 +57,40 @@ public List getChild() { }; @Test - void addSplitToChildByDefault() { + void add_split_to_child_by_default() { testPlan.add(split); verify(child).add(split); } + + @Test + void get_total_hits_from_child() { + var plan = mock(PhysicalPlan.class); + when(child.getTotalHits()).thenReturn(42L); + when(plan.getChild()).thenReturn(List.of(child)); + when(plan.getTotalHits()).then(CALLS_REAL_METHODS); + assertEquals(42, plan.getTotalHits()); + verify(child).getTotalHits(); + } + + @Test + void get_total_hits_uses_default_value() { + var plan = mock(PhysicalPlan.class); + when(plan.getTotalHits()).then(CALLS_REAL_METHODS); + assertEquals(0, plan.getTotalHits()); + } + + @Test + void toCursor() { + var plan = mock(PhysicalPlan.class); + when(plan.toCursor()).then(CALLS_REAL_METHODS); + assertTrue(assertThrows(IllegalStateException.class, plan::toCursor) + .getMessage().contains("is not compatible with cursor feature")); + } + + @Test + void createSection() { + var plan = mock(PhysicalPlan.class); + when(plan.createSection(anyString(), any())).then(CALLS_REAL_METHODS); + assertEquals("(plan,one,two)", plan.createSection("plan", "one", "two")); + } } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/ProjectOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/ProjectOperatorTest.java index 24be5eb2b8..6042eba6dc 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/ProjectOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/ProjectOperatorTest.java @@ -11,6 +11,9 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.iterableWithSize; +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import static org.mockito.Mockito.when; import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_MISSING; import static org.opensearch.sql.data.model.ExprValueUtils.stringValue; @@ -30,11 +33,12 @@ import org.opensearch.sql.data.model.ExprValueUtils; import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.expression.DSL; +import org.opensearch.sql.expression.serialization.DefaultExpressionSerializer; @ExtendWith(MockitoExtension.class) class ProjectOperatorTest extends PhysicalPlanTestBase { - @Mock + @Mock(serializable = true) private PhysicalPlan inputPlan; @Test @@ -206,4 +210,20 @@ public void project_parse_missing_will_fallback() { ExprValueUtils.tupleValue(ImmutableMap.of("action", "GET", "response", "200")), ExprValueUtils.tupleValue(ImmutableMap.of("action", "POST"))))); } + + @Test + public void toCursor() { + when(inputPlan.toCursor()).thenReturn("inputPlan", "", null); + var project = DSL.named("response", DSL.ref("response", INTEGER)); + var npe = DSL.named("action", DSL.ref("action", STRING)); + var po = project(inputPlan, List.of(project), List.of(npe)); + var serializer = new DefaultExpressionSerializer(); + var expected = String.format("(Project,(namedParseExpressions,%s),(projectList,%s),%s)", + serializer.serialize(npe), serializer.serialize(project), "inputPlan"); + assertAll( + () -> assertEquals(expected, po.toCursor()), + () -> assertNull(po.toCursor()), + () -> assertNull(po.toCursor()) + ); + } } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/RemoveOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/RemoveOperatorTest.java index bf046bf0a6..ec950e6016 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/RemoveOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/RemoveOperatorTest.java @@ -113,12 +113,11 @@ public void remove_nothing_with_none_tuple_value() { @Test public void invalid_to_retrieve_schema_from_remove() { - PhysicalPlan plan = remove(inputPlan, DSL.ref("response", STRING), DSL.ref("referer", STRING)); + PhysicalPlan plan = remove(inputPlan); IllegalStateException exception = assertThrows(IllegalStateException.class, () -> plan.schema()); assertEquals( - "[BUG] schema can been only applied to ProjectOperator, " - + "instead of RemoveOperator(input=inputPlan, removeList=[response, referer])", + "[BUG] schema can been only applied to ProjectOperator, instead of RemoveOperator", exception.getMessage()); } } diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/ValuesOperatorTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/ValuesOperatorTest.java index 9acab03d2b..bf6d28a23c 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/ValuesOperatorTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/ValuesOperatorTest.java @@ -9,6 +9,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.opensearch.sql.data.model.ExprValueUtils.collectionValue; import static org.opensearch.sql.expression.DSL.literal; @@ -44,6 +45,7 @@ public void iterateSingleRow() { results, contains(collectionValue(Arrays.asList(1, "abc"))) ); + assertThat(values.getTotalHits(), equalTo(1L)); } } diff --git a/core/src/test/java/org/opensearch/sql/storage/StorageEngineTest.java b/core/src/test/java/org/opensearch/sql/storage/StorageEngineTest.java index 0e969c6dac..9c96459d06 100644 --- a/core/src/test/java/org/opensearch/sql/storage/StorageEngineTest.java +++ b/core/src/test/java/org/opensearch/sql/storage/StorageEngineTest.java @@ -13,11 +13,16 @@ public class StorageEngineTest { - @Test void testFunctionsMethod() { StorageEngine k = (dataSourceSchemaName, tableName) -> null; Assertions.assertEquals(Collections.emptyList(), k.getFunctions()); } + @Test + void getTableScan() { + StorageEngine k = (dataSourceSchemaName, tableName) -> null; + Assertions.assertThrows(UnsupportedOperationException.class, + () -> k.getTableScan("indexName", "scrollId")); + } } diff --git a/core/src/test/java/org/opensearch/sql/storage/TableTest.java b/core/src/test/java/org/opensearch/sql/storage/TableTest.java new file mode 100644 index 0000000000..2a2b555014 --- /dev/null +++ b/core/src/test/java/org/opensearch/sql/storage/TableTest.java @@ -0,0 +1,25 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.storage; + +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.withSettings; + +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; +import org.mockito.invocation.InvocationOnMock; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +public class TableTest { + + @Test + public void createPagedScanBuilder_throws() { + var table = mock(Table.class, withSettings().defaultAnswer(InvocationOnMock::callRealMethod)); + assertThrows(Throwable.class, () -> table.createPagedScanBuilder(0)); + } +} diff --git a/core/src/testFixtures/java/org/opensearch/sql/executor/DefaultExecutionEngine.java b/core/src/testFixtures/java/org/opensearch/sql/executor/DefaultExecutionEngine.java index e4f9a185a3..3849d686a6 100644 --- a/core/src/testFixtures/java/org/opensearch/sql/executor/DefaultExecutionEngine.java +++ b/core/src/testFixtures/java/org/opensearch/sql/executor/DefaultExecutionEngine.java @@ -9,6 +9,7 @@ import java.util.List; import org.opensearch.sql.common.response.ResponseListener; import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.executor.pagination.Cursor; import org.opensearch.sql.planner.physical.PhysicalPlan; /** @@ -32,7 +33,8 @@ public void execute( while (plan.hasNext()) { result.add(plan.next()); } - QueryResponse response = new QueryResponse(new Schema(new ArrayList<>()), new ArrayList<>()); + QueryResponse response = new QueryResponse(new Schema(new ArrayList<>()), new ArrayList<>(), + 0, Cursor.None); listener.onResponse(response); } catch (Exception e) { listener.onFailure(e); diff --git a/integ-test/build.gradle b/integ-test/build.gradle index 5a707a17b0..0f1ee8cb1c 100644 --- a/integ-test/build.gradle +++ b/integ-test/build.gradle @@ -120,6 +120,11 @@ compileTestJava { testClusters.all { testDistribution = 'archive' + + // debug with command, ./gradlew opensearch-sql:run -DdebugJVM. --debug-jvm does not work with keystore. + if (System.getProperty("debugJVM") != null) { + jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005' + } } testClusters.integTest { @@ -178,10 +183,16 @@ integTest { // Tell the test JVM if the cluster JVM is running under a debugger so that tests can use longer timeouts for // requests. The 'doFirst' delays reading the debug setting on the cluster till execution time. - doFirst { systemProperty 'cluster.debug', getDebug() } + doFirst { + if (System.getProperty("debug-jvm") != null) { + setDebug(true); + } + systemProperty 'cluster.debug', getDebug() + } + if (System.getProperty("test.debug") != null) { - jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005' + jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5006' } if (System.getProperty("tests.rest.bwcsuite") == null) { diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/CursorIT.java b/integ-test/src/test/java/org/opensearch/sql/legacy/CursorIT.java index 113a19885a..5b9a583d04 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/CursorIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/CursorIT.java @@ -123,11 +123,16 @@ public void validNumberOfPages() throws IOException { String selectQuery = StringUtils.format("SELECT firstname, state FROM %s", TEST_INDEX_ACCOUNT); JSONObject response = new JSONObject(executeFetchQuery(selectQuery, 50, JDBC)); String cursor = response.getString(CURSOR); + verifyIsV1Cursor(cursor); + int pageCount = 1; while (!cursor.isEmpty()) { //this condition also checks that there is no cursor on last page response = executeCursorQuery(cursor); cursor = response.optString(CURSOR); + if (!cursor.isEmpty()) { + verifyIsV1Cursor(cursor); + } pageCount++; } @@ -136,12 +141,16 @@ public void validNumberOfPages() throws IOException { // using random value here, with fetch size of 28 we should get 36 pages (ceil of 1000/28) response = new JSONObject(executeFetchQuery(selectQuery, 28, JDBC)); cursor = response.getString(CURSOR); + verifyIsV1Cursor(cursor); System.out.println(response); pageCount = 1; while (!cursor.isEmpty()) { response = executeCursorQuery(cursor); cursor = response.optString(CURSOR); + if (!cursor.isEmpty()) { + verifyIsV1Cursor(cursor); + } pageCount++; } assertThat(pageCount, equalTo(36)); @@ -223,6 +232,7 @@ public void testCursorWithPreparedStatement() throws IOException { "}", TestsConstants.TEST_INDEX_ACCOUNT)); assertTrue(response.has(CURSOR)); + verifyIsV1Cursor(response.getString(CURSOR)); } @Test @@ -244,11 +254,13 @@ public void testRegressionOnDateFormatChange() throws IOException { StringUtils.format("SELECT login_time FROM %s LIMIT 500", TEST_INDEX_DATE_TIME); JSONObject response = new JSONObject(executeFetchQuery(selectQuery, 1, JDBC)); String cursor = response.getString(CURSOR); + verifyIsV1Cursor(cursor); actualDateList.add(response.getJSONArray(DATAROWS).getJSONArray(0).getString(0)); while (!cursor.isEmpty()) { response = executeCursorQuery(cursor); cursor = response.optString(CURSOR); + verifyIsV1Cursor(cursor); actualDateList.add(response.getJSONArray(DATAROWS).getJSONArray(0).getString(0)); } @@ -274,7 +286,6 @@ public void defaultBehaviorWhenCursorSettingIsDisabled() throws IOException { query = StringUtils.format("SELECT firstname, email, state FROM %s", TEST_INDEX_ACCOUNT); response = new JSONObject(executeFetchQuery(query, 100, JDBC)); assertTrue(response.has(CURSOR)); - wipeAllClusterSettings(); } @@ -305,12 +316,14 @@ public void testDefaultFetchSizeFromClusterSettings() throws IOException { JSONObject response = new JSONObject(executeFetchLessQuery(query, JDBC)); JSONArray datawRows = response.optJSONArray(DATAROWS); assertThat(datawRows.length(), equalTo(1000)); + verifyIsV1Cursor(response.getString(CURSOR)); updateClusterSettings(new ClusterSetting(TRANSIENT, "opensearch.sql.cursor.fetch_size", "786")); response = new JSONObject(executeFetchLessQuery(query, JDBC)); datawRows = response.optJSONArray(DATAROWS); assertThat(datawRows.length(), equalTo(786)); assertTrue(response.has(CURSOR)); + verifyIsV1Cursor(response.getString(CURSOR)); wipeAllClusterSettings(); } @@ -323,11 +336,12 @@ public void testCursorCloseAPI() throws IOException { "SELECT firstname, state FROM %s WHERE balance > 100 and age < 40", TEST_INDEX_ACCOUNT); JSONObject result = new JSONObject(executeFetchQuery(selectQuery, 50, JDBC)); String cursor = result.getString(CURSOR); - + verifyIsV1Cursor(cursor); // Retrieving next 10 pages out of remaining 19 pages for (int i = 0; i < 10; i++) { result = executeCursorQuery(cursor); cursor = result.optString(CURSOR); + verifyIsV1Cursor(cursor); } //Closing the cursor JSONObject closeResp = executeCursorCloseQuery(cursor); @@ -386,12 +400,14 @@ public void respectLimitPassedInSelectClause() throws IOException { StringUtils.format("SELECT age, balance FROM %s LIMIT %s", TEST_INDEX_ACCOUNT, limit); JSONObject response = new JSONObject(executeFetchQuery(selectQuery, 50, JDBC)); String cursor = response.getString(CURSOR); + verifyIsV1Cursor(cursor); int actualDataRowCount = response.getJSONArray(DATAROWS).length(); int pageCount = 1; while (!cursor.isEmpty()) { response = executeCursorQuery(cursor); cursor = response.optString(CURSOR); + verifyIsV1Cursor(cursor); actualDataRowCount += response.getJSONArray(DATAROWS).length(); pageCount++; } @@ -432,10 +448,12 @@ public void verifyWithAndWithoutPaginationResponse(String sqlQuery, String curso response.optJSONArray(DATAROWS).forEach(dataRows::put); String cursor = response.getString(CURSOR); + verifyIsV1Cursor(cursor); while (!cursor.isEmpty()) { response = executeCursorQuery(cursor); response.optJSONArray(DATAROWS).forEach(dataRows::put); cursor = response.optString(CURSOR); + verifyIsV1Cursor(cursor); } verifySchema(withoutCursorResponse.optJSONArray(SCHEMA), @@ -465,6 +483,13 @@ public String executeFetchAsStringQuery(String query, String fetchSize, String r return responseString; } + private void verifyIsV1Cursor(String cursor) { + if (cursor.isEmpty()) { + return; + } + assertTrue("The cursor '" + cursor + "' is not from v1 engine.", cursor.startsWith("d:")); + } + private String makeRequest(String query, String fetch_size) { return String.format("{" + " \"fetch_size\": \"%s\"," + diff --git a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java index 360497300e..dbd37835a7 100644 --- a/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java +++ b/integ-test/src/test/java/org/opensearch/sql/legacy/SQLIntegTestCase.java @@ -257,6 +257,17 @@ protected String executeFetchQuery(String query, int fetchSize, String requestTy return responseString; } + protected JSONObject executeQueryTemplate(String queryTemplate, String index, int fetchSize) + throws IOException { + var query = String.format(queryTemplate, index); + return new JSONObject(executeFetchQuery(query, fetchSize, "jdbc")); + } + + protected JSONObject executeQueryTemplate(String queryTemplate, String index) throws IOException { + var query = String.format(queryTemplate, index); + return executeQueryTemplate(queryTemplate, index, 4); + } + protected String executeFetchLessQuery(String query, String requestType) throws IOException { String endpoint = "/_plugins/_sql?format=" + requestType; diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java index 0c900ea234..ee568b7dbd 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java @@ -29,40 +29,41 @@ import org.opensearch.common.inject.Singleton; import org.opensearch.sql.analysis.Analyzer; import org.opensearch.sql.analysis.ExpressionAnalyzer; -import org.opensearch.sql.common.response.ResponseListener; -import org.opensearch.sql.common.setting.Settings; -import org.opensearch.sql.datasource.DataSourceMetadataStorage; -import org.opensearch.sql.datasource.DataSourceService; -import org.opensearch.sql.datasource.DataSourceServiceImpl; -import org.opensearch.sql.datasource.DataSourceUserAuthorizationHelper; -import org.opensearch.sql.datasource.model.DataSourceMetadata; -import org.opensearch.sql.executor.ExecutionEngine; -import org.opensearch.sql.executor.ExecutionEngine.QueryResponse; import org.opensearch.sql.executor.QueryManager; import org.opensearch.sql.executor.QueryService; import org.opensearch.sql.executor.execution.QueryPlanFactory; +import org.opensearch.sql.executor.pagination.PaginatedPlanCache; import org.opensearch.sql.expression.function.BuiltinFunctionRepository; import org.opensearch.sql.monitor.AlwaysHealthyMonitor; import org.opensearch.sql.monitor.ResourceMonitor; -import org.opensearch.sql.opensearch.client.OpenSearchClient; -import org.opensearch.sql.opensearch.client.OpenSearchRestClient; import org.opensearch.sql.opensearch.executor.OpenSearchExecutionEngine; import org.opensearch.sql.opensearch.executor.protector.ExecutionProtector; import org.opensearch.sql.opensearch.executor.protector.OpenSearchExecutionProtector; -import org.opensearch.sql.opensearch.security.SecurityAccess; -import org.opensearch.sql.opensearch.storage.OpenSearchDataSourceFactory; import org.opensearch.sql.opensearch.storage.OpenSearchStorageEngine; import org.opensearch.sql.planner.Planner; import org.opensearch.sql.planner.optimizer.LogicalPlanOptimizer; import org.opensearch.sql.ppl.antlr.PPLSyntaxParser; -import org.opensearch.sql.ppl.domain.PPLQueryRequest; -import org.opensearch.sql.protocol.response.QueryResult; -import org.opensearch.sql.protocol.response.format.SimpleJsonResponseFormatter; import org.opensearch.sql.sql.SQLService; import org.opensearch.sql.sql.antlr.SQLSyntaxParser; -import org.opensearch.sql.storage.DataSourceFactory; import org.opensearch.sql.storage.StorageEngine; import org.opensearch.sql.util.ExecuteOnCallerThreadQueryManager; +import org.opensearch.sql.common.response.ResponseListener; +import org.opensearch.sql.common.setting.Settings; +import org.opensearch.sql.datasource.DataSourceMetadataStorage; +import org.opensearch.sql.datasource.DataSourceService; +import org.opensearch.sql.datasource.DataSourceServiceImpl; +import org.opensearch.sql.datasource.DataSourceUserAuthorizationHelper; +import org.opensearch.sql.datasource.model.DataSourceMetadata; +import org.opensearch.sql.executor.ExecutionEngine; +import org.opensearch.sql.executor.ExecutionEngine.QueryResponse; +import org.opensearch.sql.opensearch.client.OpenSearchClient; +import org.opensearch.sql.opensearch.client.OpenSearchRestClient; +import org.opensearch.sql.opensearch.security.SecurityAccess; +import org.opensearch.sql.opensearch.storage.OpenSearchDataSourceFactory; +import org.opensearch.sql.ppl.domain.PPLQueryRequest; +import org.opensearch.sql.protocol.response.QueryResult; +import org.opensearch.sql.protocol.response.format.SimpleJsonResponseFormatter; +import org.opensearch.sql.storage.DataSourceFactory; /** * Run PPL with query engine outside OpenSearch cluster. This IT doesn't require our plugin @@ -71,13 +72,11 @@ */ public class StandaloneIT extends PPLIntegTestCase { - private RestHighLevelClient restClient; - private PPLService pplService; @Override public void init() { - restClient = new InternalRestHighLevelClient(client()); + RestHighLevelClient restClient = new InternalRestHighLevelClient(client()); OpenSearchClient client = new OpenSearchRestClient(restClient); DataSourceService dataSourceService = new DataSourceServiceImpl( new ImmutableSet.Builder() @@ -198,8 +197,9 @@ public StorageEngine storageEngine(OpenSearchClient client) { } @Provides - public ExecutionEngine executionEngine(OpenSearchClient client, ExecutionProtector protector) { - return new OpenSearchExecutionEngine(client, protector); + public ExecutionEngine executionEngine(OpenSearchClient client, ExecutionProtector protector, + PaginatedPlanCache paginatedPlanCache) { + return new OpenSearchExecutionEngine(client, protector, paginatedPlanCache); } @Provides @@ -229,17 +229,24 @@ public SQLService sqlService(QueryManager queryManager, QueryPlanFactory queryPl } @Provides - public QueryPlanFactory queryPlanFactory(ExecutionEngine executionEngine) { + public PaginatedPlanCache paginatedPlanCache(StorageEngine storageEngine) { + return new PaginatedPlanCache(storageEngine); + } + + @Provides + public QueryPlanFactory queryPlanFactory(ExecutionEngine executionEngine, + PaginatedPlanCache paginatedPlanCache) { Analyzer analyzer = new Analyzer( new ExpressionAnalyzer(functionRepository), dataSourceService, functionRepository); Planner planner = new Planner(LogicalPlanOptimizer.create()); - return new QueryPlanFactory(new QueryService(analyzer, executionEngine, planner)); + Planner paginationPlanner = new Planner(LogicalPlanOptimizer.paginationCreate()); + QueryService queryService = new QueryService(analyzer, executionEngine, planner, paginationPlanner); + return new QueryPlanFactory(queryService, paginatedPlanCache); } } - - private DataSourceMetadataStorage getDataSourceMetadataStorage() { + public static DataSourceMetadataStorage getDataSourceMetadataStorage() { return new DataSourceMetadataStorage() { @Override public List getDataSourceMetadata() { @@ -268,7 +275,7 @@ public void deleteDataSourceMetadata(String datasourceName) { }; } - private DataSourceUserAuthorizationHelper getDataSourceUserRoleHelper() { + public static DataSourceUserAuthorizationHelper getDataSourceUserRoleHelper() { return new DataSourceUserAuthorizationHelper() { @Override public void authorizeDataSource(DataSourceMetadata dataSourceMetadata) { @@ -276,5 +283,4 @@ public void authorizeDataSource(DataSourceMetadata dataSourceMetadata) { } }; } - } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java index 809e2dc7c5..0ab6d5c70f 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/HighlightFunctionIT.java @@ -64,7 +64,7 @@ public void highlight_multiple_optional_arguments_test() { schema("highlight(Body, pre_tags='', " + "post_tags='')", null, "nested")); - assertEquals(1, response.getInt("total")); + assertEquals(1, response.getInt("size")); verifyDataRows(response, rows(new JSONArray(List.of("What are the differences between an IPA" + " and its variants?")), diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java new file mode 100644 index 0000000000..d8213b1fe4 --- /dev/null +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationBlackboxIT.java @@ -0,0 +1,117 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + + +package org.opensearch.sql.sql; + +import static org.opensearch.sql.legacy.TestUtils.getResponseBody; +import static org.opensearch.sql.legacy.TestUtils.isIndexExist; +import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ONLINE; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import lombok.SneakyThrows; +import org.json.JSONArray; +import org.json.JSONObject; +import org.junit.Test; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.opensearch.client.Request; +import org.opensearch.sql.legacy.SQLIntegTestCase; + +// This class has only one test case, because it is parametrized and takes significant time +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +public class PaginationBlackboxIT extends SQLIntegTestCase { + + private final String index; + private final Integer pageSize; + + public PaginationBlackboxIT(@Name("index") String index, + @Name("pageSize") Integer pageSize) { + this.index = index; + this.pageSize = pageSize; + } + + @ParametersFactory(argumentFormatting = "index = %1$s, page_size = %2$d") + public static Iterable compareTwoDates() { + var indices = new PaginationBlackboxHelper().getIndices(); + var pageSizes = List.of(5, 10, 100, 1000); + var testData = new ArrayList(); + for (var index : indices) { + for (var pageSize : pageSizes) { + testData.add(new Object[] { index, pageSize }); + } + } + return testData; + } + + @Test + @SneakyThrows + public void test_pagination_blackbox() { + var response = executeJdbcRequest(String.format("select * from %s", index)); + var indexSize = response.getInt("total"); + var rows = response.getJSONArray("datarows"); + var schema = response.getJSONArray("schema"); + var testReportPrefix = String.format("index: %s, page size: %d || ", index, pageSize); + var rowsPaged = new JSONArray(); + var rowsReturned = 0; + response = new JSONObject(executeFetchQuery( + String.format("select * from %s", index), pageSize, "jdbc")); + var responseCounter = 1; + this.logger.info(testReportPrefix + "first response"); + while (response.has("cursor")) { + assertEquals(indexSize, response.getInt("total")); + assertTrue("Paged response schema doesn't match to non-paged", + schema.similar(response.getJSONArray("schema"))); + var cursor = response.getString("cursor"); + assertTrue(testReportPrefix + "Cursor returned from legacy engine", + cursor.startsWith("n:")); + rowsReturned += response.getInt("size"); + var datarows = response.getJSONArray("datarows"); + for (int i = 0; i < datarows.length(); i++) { + rowsPaged.put(datarows.get(i)); + } + response = executeCursorQuery(cursor); + this.logger.info(testReportPrefix + + String.format("subsequent response %d/%d", responseCounter++, (indexSize / pageSize) + 1)); + } + assertTrue("Paged response schema doesn't match to non-paged", + schema.similar(response.getJSONArray("schema"))); + assertEquals(0, response.getInt("total")); + + assertEquals(testReportPrefix + "Last page is not empty", + 0, response.getInt("size")); + assertEquals(testReportPrefix + "Last page is not empty", + 0, response.getJSONArray("datarows").length()); + assertEquals(testReportPrefix + "Paged responses return another row count that non-paged", + indexSize, rowsReturned); + assertTrue(testReportPrefix + "Paged accumulated result has other rows than non-paged", + rows.similar(rowsPaged)); + } + + // A dummy class created, because accessing to `client()` isn't available from a static context, + // but it is needed before an instance of `PaginationBlackboxIT` is created. + private static class PaginationBlackboxHelper extends SQLIntegTestCase { + + @SneakyThrows + private List getIndices() { + initClient(); + loadIndex(Index.ACCOUNT); + loadIndex(Index.BEER); + loadIndex(Index.BANK); + if (!isIndexExist(client(), "empty")) { + executeRequest(new Request("PUT", "/empty")); + } + return Arrays.stream(getResponseBody(client().performRequest(new Request("GET", "_cat/indices?h=i")), true).split("\n")) + // exclude this index, because it is too big and extends test time too long (almost 10k docs) + .map(String::trim).filter(i -> !i.equals(TEST_INDEX_ONLINE)).collect(Collectors.toList()); + } + } +} diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java new file mode 100644 index 0000000000..33d9c5f6a8 --- /dev/null +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationFallbackIT.java @@ -0,0 +1,131 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.sql; + +import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX; +import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ONLINE; +import static org.opensearch.sql.util.TestUtils.verifyIsV1Cursor; +import static org.opensearch.sql.util.TestUtils.verifyIsV2Cursor; + +import java.io.IOException; +import org.json.JSONObject; +import org.junit.Test; +import org.opensearch.sql.legacy.SQLIntegTestCase; +import org.opensearch.sql.util.TestUtils; + +public class PaginationFallbackIT extends SQLIntegTestCase { + @Override + public void init() throws IOException { + loadIndex(Index.PHRASE); + loadIndex(Index.ONLINE); + } + + @Test + public void testWhereClause() throws IOException { + var response = executeQueryTemplate("SELECT * FROM %s WHERE 1 = 1", TEST_INDEX_ONLINE); + verifyIsV1Cursor(response); + } + + @Test + public void testSelectAll() throws IOException { + var response = executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_ONLINE); + verifyIsV2Cursor(response); + } + + @Test + public void testSelectWithOpenSearchFuncInFilter() throws IOException { + var response = executeQueryTemplate( + "SELECT * FROM %s WHERE `11` = match_phrase('96')", TEST_INDEX_ONLINE); + verifyIsV1Cursor(response); + } + + @Test + public void testSelectWithHighlight() throws IOException { + var response = executeQueryTemplate( + "SELECT highlight(`11`) FROM %s WHERE match_query(`11`, '96')", TEST_INDEX_ONLINE); + // As of 2023-03-08, WHERE clause sends the query to legacy engine and legacy engine + // does not support highlight as an expression. + assertTrue(response.has("error")); + } + + @Test + public void testSelectWithFullTextSearch() throws IOException { + var response = executeQueryTemplate( + "SELECT * FROM %s WHERE match_phrase(`11`, '96')", TEST_INDEX_ONLINE); + verifyIsV1Cursor(response); + } + + @Test + public void testSelectFromIndexWildcard() throws IOException { + var response = executeQueryTemplate("SELECT * FROM %s*", TEST_INDEX); + verifyIsV2Cursor(response); + } + + @Test + public void testSelectFromDataSource() throws IOException { + var response = executeQueryTemplate("SELECT * FROM @opensearch.%s", + TEST_INDEX_ONLINE); + verifyIsV2Cursor(response); + } + + @Test + public void testSelectColumnReference() throws IOException { + var response = executeQueryTemplate("SELECT `107` from %s", TEST_INDEX_ONLINE); + verifyIsV1Cursor(response); + } + + @Test + public void testSubquery() throws IOException { + var response = executeQueryTemplate("SELECT `107` from (SELECT * FROM %s)", + TEST_INDEX_ONLINE); + verifyIsV1Cursor(response); + } + + @Test + public void testSelectExpression() throws IOException { + var response = executeQueryTemplate("SELECT 1 + 1 - `107` from %s", + TEST_INDEX_ONLINE); + verifyIsV1Cursor(response); + } + + @Test + public void testGroupBy() throws IOException { + // GROUP BY is not paged by either engine. + var response = executeQueryTemplate("SELECT * FROM %s GROUP BY `107`", + TEST_INDEX_ONLINE); + TestUtils.verifyNoCursor(response); + } + + @Test + public void testGroupByHaving() throws IOException { + // GROUP BY is not paged by either engine. + var response = executeQueryTemplate("SELECT * FROM %s GROUP BY `107` HAVING `107` > 400", + TEST_INDEX_ONLINE); + TestUtils.verifyNoCursor(response); + } + + @Test + public void testLimit() throws IOException { + var response = executeQueryTemplate("SELECT * FROM %s LIMIT 8", TEST_INDEX_ONLINE); + verifyIsV1Cursor(response); + } + + @Test + public void testLimitOffset() throws IOException { + var response = executeQueryTemplate("SELECT * FROM %s LIMIT 8 OFFSET 4", + TEST_INDEX_ONLINE); + verifyIsV1Cursor(response); + } + + @Test + public void testOrderBy() throws IOException { + var response = executeQueryTemplate("SELECT * FROM %s ORDER By `107`", + TEST_INDEX_ONLINE); + verifyIsV1Cursor(response); + } + + +} diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java new file mode 100644 index 0000000000..a1d353cde8 --- /dev/null +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationIT.java @@ -0,0 +1,79 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.sql; + +import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_CALCS; +import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_ONLINE; + +import java.io.IOException; +import org.json.JSONObject; +import org.junit.Ignore; +import org.junit.Test; +import org.opensearch.client.ResponseException; +import org.opensearch.sql.common.setting.Settings; +import org.opensearch.sql.legacy.SQLIntegTestCase; +import org.opensearch.sql.util.TestUtils; + +public class PaginationIT extends SQLIntegTestCase { + @Override + public void init() throws IOException { + loadIndex(Index.CALCS); + loadIndex(Index.ONLINE); + } + + @Test + public void testSmallDataSet() throws IOException { + var query = "SELECT * from " + TEST_INDEX_CALCS; + var response = new JSONObject(executeFetchQuery(query, 4, "jdbc")); + assertTrue(response.has("cursor")); + assertEquals(4, response.getInt("size")); + TestUtils.verifyIsV2Cursor(response); + } + + @Test + public void testLargeDataSetV1() throws IOException { + var v1query = "SELECT * from " + TEST_INDEX_ONLINE + " WHERE 1 = 1"; + var v1response = new JSONObject(executeFetchQuery(v1query, 4, "jdbc")); + assertEquals(4, v1response.getInt("size")); + TestUtils.verifyIsV1Cursor(v1response); + } + + @Test + public void testLargeDataSetV2() throws IOException { + var query = "SELECT * from " + TEST_INDEX_ONLINE; + var response = new JSONObject(executeFetchQuery(query, 4, "jdbc")); + assertEquals(4, response.getInt("size")); + TestUtils.verifyIsV2Cursor(response); + } + + @Ignore("Scroll may not expire after timeout") + // Scroll keep alive parameter guarantees that scroll context would be kept for that time, + // but doesn't define how fast it will be expired after time out. + // With KA = 1s scroll may be kept up to 30 sec or more. We can't test exact expiration. + // I disable the test to prevent it waiting for a minute and delay all CI. + public void testCursorTimeout() throws IOException, InterruptedException { + updateClusterSettings( + new ClusterSetting(PERSISTENT, Settings.Key.SQL_CURSOR_KEEP_ALIVE.getKeyValue(), "1s")); + + var query = "SELECT * from " + TEST_INDEX_CALCS; + var response = new JSONObject(executeFetchQuery(query, 4, "jdbc")); + assertTrue(response.has("cursor")); + var cursor = response.getString("cursor"); + Thread.sleep(2222L); // > 1s + + ResponseException exception = + expectThrows(ResponseException.class, () -> executeCursorQuery(cursor)); + response = new JSONObject(TestUtils.getResponseBody(exception.getResponse())); + assertEquals(response.getJSONObject("error").getString("reason"), + "Error occurred in OpenSearch engine: all shards failed"); + assertTrue(response.getJSONObject("error").getString("details") + .contains("SearchContextMissingException[No search context found for id")); + assertEquals(response.getJSONObject("error").getString("type"), + "SearchPhaseExecutionException"); + + wipeAllClusterSettings(); + } +} diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/PaginationWindowIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationWindowIT.java new file mode 100644 index 0000000000..724451ef65 --- /dev/null +++ b/integ-test/src/test/java/org/opensearch/sql/sql/PaginationWindowIT.java @@ -0,0 +1,98 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.sql; + +import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_PHRASE; + +import java.io.IOException; +import org.json.JSONObject; +import org.junit.After; +import org.junit.Test; +import org.opensearch.client.ResponseException; +import org.opensearch.sql.legacy.SQLIntegTestCase; + +public class PaginationWindowIT extends SQLIntegTestCase { + @Override + public void init() throws IOException { + loadIndex(Index.PHRASE); + } + + @After + void resetParams() throws IOException { + resetMaxResultWindow(TEST_INDEX_PHRASE); + resetQuerySizeLimit(); + } + + @Test + public void testFetchSizeLessThanMaxResultWindow() throws IOException { + setMaxResultWindow(TEST_INDEX_PHRASE, 6); + JSONObject response = executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, 5); + + String cursor = ""; + int numRows = 0; + do { + // Process response + cursor = response.getString("cursor"); + numRows += response.getJSONArray("datarows").length(); + response = executeCursorQuery(cursor); + } while (response.has("cursor")); + + var countRows = executeJdbcRequest("SELECT COUNT(*) FROM " + TEST_INDEX_PHRASE) + .getJSONArray("datarows") + .getJSONArray(0) + .get(0); + assertEquals(countRows, numRows); + } + + @Test + public void testQuerySizeLimitDoesNotEffectTotalRowsReturned() throws IOException { + int querySizeLimit = 4; + setQuerySizeLimit(querySizeLimit); + JSONObject response = executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, 5); + assertTrue(response.getInt("size") > querySizeLimit); + + String cursor = ""; + int numRows = 0; + do { + // Process response + cursor = response.getString("cursor"); + numRows += response.getJSONArray("datarows").length(); + response = executeCursorQuery(cursor); + } while (response.has("cursor")); + + var countRows = executeJdbcRequest("SELECT COUNT(*) FROM " + TEST_INDEX_PHRASE) + .getJSONArray("datarows") + .getJSONArray(0) + .get(0); + assertEquals(countRows, numRows); + assertTrue(numRows > querySizeLimit); + } + + @Test + public void testQuerySizeLimitDoesNotEffectPageSize() throws IOException { + setQuerySizeLimit(3); + setMaxResultWindow(TEST_INDEX_PHRASE, 4); + var response + = executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, 4); + assertEquals(4, response.getInt("size")); + + var response2 + = executeQueryTemplate("SELECT * FROM %s", TEST_INDEX_PHRASE, 2); + assertEquals(2, response2.getInt("size")); + } + + @Test + public void testFetchSizeLargerThanResultWindowFails() throws IOException { + final int window = 2; + setMaxResultWindow(TEST_INDEX_PHRASE, 2); + assertThrows(ResponseException.class, + () -> executeQueryTemplate("SELECT * FROM %s", + TEST_INDEX_PHRASE, window + 1)); + resetMaxResultWindow(TEST_INDEX_PHRASE); + } + + +} diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java new file mode 100644 index 0000000000..16eb9d1ff8 --- /dev/null +++ b/integ-test/src/test/java/org/opensearch/sql/sql/StandalonePaginationIT.java @@ -0,0 +1,171 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.sql; + +import static org.opensearch.sql.datasource.model.DataSourceMetadata.defaultOpenSearchDataSourceMetadata; +import static org.opensearch.sql.ppl.StandaloneIT.getDataSourceMetadataStorage; +import static org.opensearch.sql.ppl.StandaloneIT.getDataSourceUserRoleHelper; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import java.io.IOException; +import java.util.List; +import java.util.Map; +import lombok.Getter; +import lombok.SneakyThrows; +import org.json.JSONObject; +import org.junit.Test; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.opensearch.client.Request; +import org.opensearch.client.ResponseException; +import org.opensearch.client.RestHighLevelClient; +import org.opensearch.common.inject.Injector; +import org.opensearch.common.inject.ModulesBuilder; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.sql.common.response.ResponseListener; +import org.opensearch.sql.common.setting.Settings; +import org.opensearch.sql.data.type.ExprCoreType; +import org.opensearch.sql.datasource.DataSourceService; +import org.opensearch.sql.datasource.DataSourceServiceImpl; +import org.opensearch.sql.executor.ExecutionEngine; +import org.opensearch.sql.executor.pagination.PaginatedPlanCache; +import org.opensearch.sql.executor.QueryService; +import org.opensearch.sql.expression.DSL; +import org.opensearch.sql.legacy.SQLIntegTestCase; +import org.opensearch.sql.opensearch.client.OpenSearchClient; +import org.opensearch.sql.opensearch.client.OpenSearchRestClient; +import org.opensearch.sql.executor.pagination.Cursor; +import org.opensearch.sql.opensearch.storage.OpenSearchDataSourceFactory; +import org.opensearch.sql.opensearch.storage.OpenSearchIndex; +import org.opensearch.sql.planner.PlanContext; +import org.opensearch.sql.planner.logical.LogicalPaginate; +import org.opensearch.sql.planner.logical.LogicalPlan; +import org.opensearch.sql.planner.logical.LogicalProject; +import org.opensearch.sql.planner.logical.LogicalRelation; +import org.opensearch.sql.planner.physical.PhysicalPlan; +import org.opensearch.sql.storage.DataSourceFactory; +import org.opensearch.sql.util.InternalRestHighLevelClient; +import org.opensearch.sql.util.StandaloneModule; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +public class StandalonePaginationIT extends SQLIntegTestCase { + + private QueryService queryService; + + private PaginatedPlanCache paginatedPlanCache; + + private OpenSearchClient client; + + @Override + @SneakyThrows + public void init() { + RestHighLevelClient restClient = new InternalRestHighLevelClient(client()); + client = new OpenSearchRestClient(restClient); + DataSourceService dataSourceService = new DataSourceServiceImpl( + new ImmutableSet.Builder() + .add(new OpenSearchDataSourceFactory(client, defaultSettings())) + .build(), + getDataSourceMetadataStorage(), + getDataSourceUserRoleHelper() + ); + dataSourceService.createDataSource(defaultOpenSearchDataSourceMetadata()); + + ModulesBuilder modules = new ModulesBuilder(); + modules.add(new StandaloneModule(new InternalRestHighLevelClient(client()), defaultSettings(), dataSourceService)); + Injector injector = modules.createInjector(); + + queryService = injector.getInstance(QueryService.class); + paginatedPlanCache = injector.getInstance(PaginatedPlanCache.class); + } + + @Test + public void test_pagination_whitebox() throws IOException { + class TestResponder + implements ResponseListener { + @Getter + Cursor cursor = Cursor.None; + @Override + public void onResponse(ExecutionEngine.QueryResponse response) { + cursor = response.getCursor(); + } + + @Override + public void onFailure(Exception e) { + e.printStackTrace(); + fail(e.getMessage()); + } + }; + + // arrange + { + Request request1 = new Request("PUT", "/test/_doc/1?refresh=true"); + request1.setJsonEntity("{\"name\": \"hello\", \"age\": 20}"); + client().performRequest(request1); + Request request2 = new Request("PUT", "/test/_doc/2?refresh=true"); + request2.setJsonEntity("{\"name\": \"world\", \"age\": 30}"); + client().performRequest(request2); + } + + // act 1, asserts in firstResponder + var t = new OpenSearchIndex(client, defaultSettings(), "test"); + LogicalPlan p = new LogicalPaginate(1, List.of( + new LogicalProject( + new LogicalRelation("test", t), List.of( + DSL.named("name", DSL.ref("name", ExprCoreType.STRING)), + DSL.named("age", DSL.ref("age", ExprCoreType.LONG))), + List.of() + ))); + var firstResponder = new TestResponder(); + queryService.executePlan(p, PlanContext.emptyPlanContext(), firstResponder); + + // act 2, asserts in secondResponder + + PhysicalPlan plan = paginatedPlanCache.convertToPlan(firstResponder.getCursor().toString()); + var secondResponder = new TestResponder(); + queryService.executePlan(plan, secondResponder); + + // act 3: confirm that there's no cursor. + } + + @Test + @SneakyThrows + public void test_explain_not_supported() { + var request = new Request("POST", "_plugins/_sql/_explain"); + // Request should be rejected before index names are resolved + request.setJsonEntity("{ \"query\": \"select * from something\", \"fetch_size\": 10 }"); + var exception = assertThrows(ResponseException.class, () -> client().performRequest(request)); + var response = new JSONObject(new String(exception.getResponse().getEntity().getContent().readAllBytes())); + assertEquals("`explain` feature for paginated requests is not implemented yet.", + response.getJSONObject("error").getString("details")); + + // Request should be rejected before cursor parsed + request.setJsonEntity("{ \"cursor\" : \"n:0000\" }"); + exception = assertThrows(ResponseException.class, () -> client().performRequest(request)); + response = new JSONObject(new String(exception.getResponse().getEntity().getContent().readAllBytes())); + assertEquals("Explain of a paged query continuation is not supported. Use `explain` for the initial query request.", + response.getJSONObject("error").getString("details")); + } + + private Settings defaultSettings() { + return new Settings() { + private final Map defaultSettings = new ImmutableMap.Builder() + .put(Key.QUERY_SIZE_LIMIT, 200) + .put(Key.SQL_CURSOR_KEEP_ALIVE, TimeValue.timeValueMinutes(1)) + .build(); + + @Override + public T getSettingValue(Key key) { + return (T) defaultSettings.get(key); + } + + @Override + public List getSettings() { + return (List) defaultSettings; + } + }; + } +} diff --git a/integ-test/src/test/java/org/opensearch/sql/util/InternalRestHighLevelClient.java b/integ-test/src/test/java/org/opensearch/sql/util/InternalRestHighLevelClient.java new file mode 100644 index 0000000000..57726089ae --- /dev/null +++ b/integ-test/src/test/java/org/opensearch/sql/util/InternalRestHighLevelClient.java @@ -0,0 +1,19 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.util; + +import java.util.Collections; +import org.opensearch.client.RestClient; +import org.opensearch.client.RestHighLevelClient; + +/** + * Internal RestHighLevelClient only for testing purpose. + */ +public class InternalRestHighLevelClient extends RestHighLevelClient { + public InternalRestHighLevelClient(RestClient restClient) { + super(restClient, RestClient::close, Collections.emptyList()); + } +} diff --git a/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java b/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java new file mode 100644 index 0000000000..c7515b461f --- /dev/null +++ b/integ-test/src/test/java/org/opensearch/sql/util/StandaloneModule.java @@ -0,0 +1,123 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.util; + +import lombok.RequiredArgsConstructor; +import org.opensearch.client.RestHighLevelClient; +import org.opensearch.common.inject.AbstractModule; +import org.opensearch.common.inject.Provides; +import org.opensearch.common.inject.Singleton; +import org.opensearch.sql.analysis.Analyzer; +import org.opensearch.sql.analysis.ExpressionAnalyzer; +import org.opensearch.sql.common.setting.Settings; +import org.opensearch.sql.datasource.DataSourceService; +import org.opensearch.sql.executor.ExecutionEngine; +import org.opensearch.sql.executor.pagination.PaginatedPlanCache; +import org.opensearch.sql.executor.QueryManager; +import org.opensearch.sql.executor.QueryService; +import org.opensearch.sql.executor.execution.QueryPlanFactory; +import org.opensearch.sql.expression.function.BuiltinFunctionRepository; +import org.opensearch.sql.monitor.AlwaysHealthyMonitor; +import org.opensearch.sql.monitor.ResourceMonitor; +import org.opensearch.sql.opensearch.client.OpenSearchClient; +import org.opensearch.sql.opensearch.client.OpenSearchRestClient; +import org.opensearch.sql.opensearch.executor.OpenSearchExecutionEngine; +import org.opensearch.sql.opensearch.executor.protector.ExecutionProtector; +import org.opensearch.sql.opensearch.executor.protector.OpenSearchExecutionProtector; +import org.opensearch.sql.opensearch.storage.OpenSearchStorageEngine; +import org.opensearch.sql.planner.Planner; +import org.opensearch.sql.planner.optimizer.LogicalPlanOptimizer; +import org.opensearch.sql.ppl.PPLService; +import org.opensearch.sql.ppl.antlr.PPLSyntaxParser; +import org.opensearch.sql.sql.SQLService; +import org.opensearch.sql.sql.antlr.SQLSyntaxParser; +import org.opensearch.sql.storage.StorageEngine; + +/** + * A utility class which registers SQL engine singletons as `OpenSearchPluginModule` does. + * It is needed to get access to those instances in test and validate their behavior. + */ +@RequiredArgsConstructor +public class StandaloneModule extends AbstractModule { + + private final RestHighLevelClient client; + + private final Settings settings; + + private final DataSourceService dataSourceService; + + private final BuiltinFunctionRepository functionRepository = + BuiltinFunctionRepository.getInstance(); + + @Override + protected void configure() { + } + + @Provides + public OpenSearchClient openSearchClient() { + return new OpenSearchRestClient(client); + } + + @Provides + public StorageEngine storageEngine(OpenSearchClient client) { + return new OpenSearchStorageEngine(client, settings); + } + + @Provides + public ExecutionEngine executionEngine(OpenSearchClient client, ExecutionProtector protector, + PaginatedPlanCache paginatedPlanCache) { + return new OpenSearchExecutionEngine(client, protector, paginatedPlanCache); + } + + @Provides + public ResourceMonitor resourceMonitor() { + return new AlwaysHealthyMonitor(); + } + + @Provides + public ExecutionProtector protector(ResourceMonitor resourceMonitor) { + return new OpenSearchExecutionProtector(resourceMonitor); + } + + @Provides + @Singleton + public QueryManager queryManager() { + return new ExecuteOnCallerThreadQueryManager(); + } + + @Provides + public PPLService pplService(QueryManager queryManager, QueryPlanFactory queryPlanFactory) { + return new PPLService(new PPLSyntaxParser(), queryManager, queryPlanFactory); + } + + @Provides + public SQLService sqlService(QueryManager queryManager, QueryPlanFactory queryPlanFactory) { + return new SQLService(new SQLSyntaxParser(), queryManager, queryPlanFactory); + } + + @Provides + public PaginatedPlanCache paginatedPlanCache(StorageEngine storageEngine) { + return new PaginatedPlanCache(storageEngine); + } + + @Provides + public QueryPlanFactory queryPlanFactory(ExecutionEngine executionEngine, + PaginatedPlanCache paginatedPlanCache, + QueryService qs) { + + return new QueryPlanFactory(qs, paginatedPlanCache); + } + + @Provides + public QueryService queryService(ExecutionEngine executionEngine) { + Analyzer analyzer = + new Analyzer( + new ExpressionAnalyzer(functionRepository), dataSourceService, functionRepository); + Planner planner = new Planner(LogicalPlanOptimizer.create()); + Planner paginationPlanner = new Planner(LogicalPlanOptimizer.paginationCreate()); + return new QueryService(analyzer, executionEngine, planner, paginationPlanner); + } +} diff --git a/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java b/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java index bd75ead43b..80ce24ecac 100644 --- a/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java +++ b/integ-test/src/test/java/org/opensearch/sql/util/TestUtils.java @@ -7,6 +7,8 @@ package org.opensearch.sql.util; import static com.google.common.base.Strings.isNullOrEmpty; +import static org.junit.Assert.assertTrue; +import static org.opensearch.sql.executor.pagination.PaginatedPlanCache.CURSOR_PREFIX; import java.io.BufferedReader; import java.io.File; @@ -20,22 +22,21 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; +import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.stream.Collectors; import org.json.JSONObject; -import org.junit.Assert; import org.opensearch.action.bulk.BulkRequest; import org.opensearch.action.bulk.BulkResponse; import org.opensearch.action.index.IndexRequest; import org.opensearch.client.Client; import org.opensearch.client.Request; -import org.opensearch.client.RequestOptions; import org.opensearch.client.Response; import org.opensearch.client.RestClient; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.rest.RestStatus; +import org.opensearch.sql.legacy.cursor.CursorType; public class TestUtils { @@ -839,4 +840,28 @@ public static List> getPermutations(final List items) { return result; } + + public static void verifyIsV1Cursor(JSONObject response) { + var legacyCursorPrefixes = Arrays.stream(CursorType.values()) + .map(c -> c.getId() + ":").collect(Collectors.toList()); + verifyCursor(response, legacyCursorPrefixes, "v1"); + } + + + public static void verifyIsV2Cursor(JSONObject response) { + verifyCursor(response, List.of(CURSOR_PREFIX), "v2"); + } + + private static void verifyCursor(JSONObject response, List validCursorPrefix, String engineName) { + assertTrue("'cursor' property does not exist", response.has("cursor")); + + var cursor = response.getString("cursor"); + assertTrue("'cursor' property is empty", !cursor.isEmpty()); + assertTrue("The cursor '" + cursor + "' is not from " + engineName + " engine.", + validCursorPrefix.stream().anyMatch(cursor::startsWith)); + } + + public static void verifyNoCursor(JSONObject response) { + assertTrue(!response.has("cursor")); + } } diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java index bc97f71b47..cbbc8c7b9c 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSQLQueryAction.java @@ -24,6 +24,7 @@ import org.opensearch.sql.common.antlr.SyntaxCheckException; import org.opensearch.sql.common.response.ResponseListener; import org.opensearch.sql.common.utils.QueryContext; +import org.opensearch.sql.exception.UnsupportedCursorRequestException; import org.opensearch.sql.executor.ExecutionEngine.ExplainResponse; import org.opensearch.sql.legacy.metrics.MetricName; import org.opensearch.sql.legacy.metrics.Metrics; @@ -119,14 +120,14 @@ private ResponseListener fallBackListener( return new ResponseListener() { @Override public void onResponse(T response) { - LOG.error("[{}] Request is handled by new SQL query engine", + LOG.info("[{}] Request is handled by new SQL query engine", QueryContext.getRequestId()); next.onResponse(response); } @Override public void onFailure(Exception e) { - if (e instanceof SyntaxCheckException) { + if (e instanceof SyntaxCheckException || e instanceof UnsupportedCursorRequestException) { fallBackHandler.accept(channel, e); } else { next.onFailure(e); @@ -172,7 +173,8 @@ private ResponseListener createQueryResponseListener( @Override public void onResponse(QueryResponse response) { sendResponse(channel, OK, - formatter.format(new QueryResult(response.getSchema(), response.getResults()))); + formatter.format(new QueryResult(response.getSchema(), response.getResults(), + response.getCursor(), response.getTotal()))); } @Override diff --git a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlAction.java b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlAction.java index 88ed42010b..e1c72f0f1e 100644 --- a/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlAction.java +++ b/legacy/src/main/java/org/opensearch/sql/legacy/plugin/RestSqlAction.java @@ -42,6 +42,7 @@ import org.opensearch.sql.legacy.antlr.SqlAnalysisConfig; import org.opensearch.sql.legacy.antlr.SqlAnalysisException; import org.opensearch.sql.legacy.antlr.semantic.types.Type; +import org.opensearch.sql.legacy.cursor.CursorType; import org.opensearch.sql.legacy.domain.ColumnTypeProvider; import org.opensearch.sql.legacy.domain.QueryActionRequest; import org.opensearch.sql.legacy.esdomain.LocalClusterState; @@ -132,7 +133,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli } final SqlRequest sqlRequest = SqlRequestFactory.getSqlRequest(request); - if (sqlRequest.cursor() != null) { + if (isLegacyCursor(sqlRequest)) { if (isExplainRequest(request)) { throw new IllegalArgumentException("Invalid request. Cannot explain cursor"); } else { @@ -148,14 +149,14 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli // Route request to new query engine if it's supported already SQLQueryRequest newSqlRequest = new SQLQueryRequest(sqlRequest.getJsonContent(), - sqlRequest.getSql(), request.path(), request.params()); + sqlRequest.getSql(), request.path(), request.params(), sqlRequest.cursor()); return newSqlQueryHandler.prepareRequest(newSqlRequest, (restChannel, exception) -> { try{ if (newSqlRequest.isExplainRequest()) { LOG.info("Request is falling back to old SQL engine due to: " + exception.getMessage()); } - LOG.debug("[{}] Request {} is not supported and falling back to old SQL engine", + LOG.info("[{}] Request {} is not supported and falling back to old SQL engine", QueryContext.getRequestId(), newSqlRequest); QueryAction queryAction = explainRequest(client, sqlRequest, format); executeSqlRequest(request, queryAction, client, restChannel); @@ -175,6 +176,17 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli } } + + /** + * @param sqlRequest client request + * @return true if this cursor was generated by the legacy engine, false otherwise. + */ + private static boolean isLegacyCursor(SqlRequest sqlRequest) { + String cursor = sqlRequest.cursor(); + return cursor != null + && CursorType.getById(cursor.substring(0, 1)) != CursorType.NULL; + } + @Override protected Set responseParams() { Set responseParams = new HashSet<>(super.responseParams()); diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java new file mode 100644 index 0000000000..a11f4c47d7 --- /dev/null +++ b/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionCursorFallbackTest.java @@ -0,0 +1,127 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.legacy.plugin; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static org.opensearch.sql.legacy.plugin.RestSqlAction.QUERY_API_ENDPOINT; + +import java.io.IOException; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicBoolean; +import org.json.JSONObject; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; +import org.opensearch.client.node.NodeClient; +import org.opensearch.common.Strings; +import org.opensearch.common.inject.Injector; +import org.opensearch.common.inject.ModulesBuilder; +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestChannel; +import org.opensearch.rest.RestRequest; +import org.opensearch.sql.common.antlr.SyntaxCheckException; +import org.opensearch.sql.executor.QueryManager; +import org.opensearch.sql.executor.execution.QueryPlanFactory; +import org.opensearch.sql.sql.SQLService; +import org.opensearch.sql.sql.antlr.SQLSyntaxParser; +import org.opensearch.sql.sql.domain.SQLQueryRequest; +import org.opensearch.threadpool.ThreadPool; + +/** + * A test suite that verifies fallback behaviour of cursor queries. + */ +@RunWith(MockitoJUnitRunner.class) +public class RestSQLQueryActionCursorFallbackTest extends BaseRestHandler { + + private NodeClient nodeClient; + + @Mock + private ThreadPool threadPool; + + @Mock + private QueryManager queryManager; + + @Mock + private QueryPlanFactory factory; + + @Mock + private RestChannel restChannel; + + private Injector injector; + + @Before + public void setup() { + nodeClient = new NodeClient(org.opensearch.common.settings.Settings.EMPTY, threadPool); + ModulesBuilder modules = new ModulesBuilder(); + modules.add(b -> { + b.bind(SQLService.class).toInstance(new SQLService(new SQLSyntaxParser(), queryManager, factory)); + }); + injector = modules.createInjector(); + Mockito.lenient().when(threadPool.getThreadContext()) + .thenReturn(new ThreadContext(org.opensearch.common.settings.Settings.EMPTY)); + } + + // Initial page request test cases + + @Test + public void no_fallback_with_column_reference() throws Exception { + String query = "SELECT name FROM test1"; + SQLQueryRequest request = createSqlQueryRequest(query, Optional.empty(), + Optional.of(5)); + + assertFalse(doesQueryFallback(request)); + } + + private static SQLQueryRequest createSqlQueryRequest(String query, Optional cursorId, + Optional fetchSize) throws IOException { + var builder = XContentFactory.jsonBuilder() + .startObject() + .field("query").value(query); + if (cursorId.isPresent()) { + builder.field("cursor").value(cursorId.get()); + } + + if (fetchSize.isPresent()) { + builder.field("fetch_size").value(fetchSize.get()); + } + builder.endObject(); + JSONObject jsonContent = new JSONObject(Strings.toString(builder)); + + return new SQLQueryRequest(jsonContent, query, QUERY_API_ENDPOINT, + Map.of("format", "jdbc"), cursorId.orElse("")); + } + + boolean doesQueryFallback(SQLQueryRequest request) throws Exception { + AtomicBoolean fallback = new AtomicBoolean(false); + RestSQLQueryAction queryAction = new RestSQLQueryAction(injector); + queryAction.prepareRequest(request, (channel, exception) -> { + fallback.set(true); + }, (channel, exception) -> { + }).accept(restChannel); + return fallback.get(); + } + + @Override + public String getName() { + // do nothing, RestChannelConsumer is protected which required to extend BaseRestHandler + return null; + } + + @Override + protected BaseRestHandler.RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient nodeClient) + { + // do nothing, RestChannelConsumer is protected which required to extend BaseRestHandler + return null; + } +} diff --git a/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionTest.java b/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionTest.java index 1bc34edf50..be572f3dfb 100644 --- a/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionTest.java +++ b/legacy/src/test/java/org/opensearch/sql/legacy/plugin/RestSQLQueryActionTest.java @@ -74,7 +74,7 @@ public void handleQueryThatCanSupport() throws Exception { new JSONObject("{\"query\": \"SELECT -123\"}"), "SELECT -123", QUERY_API_ENDPOINT, - ""); + "jdbc"); RestSQLQueryAction queryAction = new RestSQLQueryAction(injector); queryAction.prepareRequest(request, (channel, exception) -> { @@ -90,7 +90,7 @@ public void handleExplainThatCanSupport() throws Exception { new JSONObject("{\"query\": \"SELECT -123\"}"), "SELECT -123", EXPLAIN_API_ENDPOINT, - ""); + "jdbc"); RestSQLQueryAction queryAction = new RestSQLQueryAction(injector); queryAction.prepareRequest(request, (channel, exception) -> { @@ -107,7 +107,7 @@ public void queryThatNotSupportIsHandledByFallbackHandler() throws Exception { "{\"query\": \"SELECT name FROM test1 JOIN test2 ON test1.name = test2.name\"}"), "SELECT name FROM test1 JOIN test2 ON test1.name = test2.name", QUERY_API_ENDPOINT, - ""); + "jdbc"); AtomicBoolean fallback = new AtomicBoolean(false); RestSQLQueryAction queryAction = new RestSQLQueryAction(injector); @@ -128,7 +128,7 @@ public void queryExecutionFailedIsHandledByExecutionErrorHandler() throws Except "{\"query\": \"SELECT -123\"}"), "SELECT -123", QUERY_API_ENDPOINT, - ""); + "jdbc"); doThrow(new IllegalStateException("execution exception")) .when(queryManager) diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java index 8818c394a1..e4f25dabbd 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClient.java @@ -43,7 +43,7 @@ public class OpenSearchNodeClient implements OpenSearchClient { private final NodeClient client; /** - * Constructor of ElasticsearchNodeClient. + * Constructor of OpenSearchNodeClient. */ public OpenSearchNodeClient(NodeClient client) { this.client = client; @@ -172,7 +172,14 @@ public Map meta() { @Override public void cleanup(OpenSearchRequest request) { - request.clean(scrollId -> client.prepareClearScroll().addScrollId(scrollId).get()); + request.clean(scrollId -> { + try { + client.prepareClearScroll().addScrollId(scrollId).get(); + } catch (Exception e) { + throw new IllegalStateException( + "Failed to clean up resources for search request " + request, e); + } + }); } @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java index d9f9dbbe5d..757ea99c1b 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/client/OpenSearchRestClient.java @@ -184,7 +184,6 @@ public void cleanup(OpenSearchRequest request) { "Failed to clean up resources for search request " + request, e); } }); - } @Override diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngine.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngine.java index 9a136a3bec..103e15e6cd 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngine.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngine.java @@ -15,6 +15,7 @@ import org.opensearch.sql.executor.ExecutionContext; import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.executor.Explain; +import org.opensearch.sql.executor.pagination.PaginatedPlanCache; import org.opensearch.sql.opensearch.client.OpenSearchClient; import org.opensearch.sql.opensearch.executor.protector.ExecutionProtector; import org.opensearch.sql.planner.physical.PhysicalPlan; @@ -27,6 +28,7 @@ public class OpenSearchExecutionEngine implements ExecutionEngine { private final OpenSearchClient client; private final ExecutionProtector executionProtector; + private final PaginatedPlanCache paginatedPlanCache; @Override public void execute(PhysicalPlan physicalPlan, ResponseListener listener) { @@ -49,7 +51,8 @@ public void execute(PhysicalPlan physicalPlan, ExecutionContext context, result.add(plan.next()); } - QueryResponse response = new QueryResponse(physicalPlan.schema(), result); + QueryResponse response = new QueryResponse(physicalPlan.schema(), result, + plan.getTotalHits(), paginatedPlanCache.convertToCursor(plan)); listener.onResponse(response); } catch (Exception e) { listener.onFailure(e); diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtector.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtector.java index f06ecb8576..4d6925f1aa 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtector.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtector.java @@ -16,6 +16,7 @@ import org.opensearch.sql.planner.physical.EvalOperator; import org.opensearch.sql.planner.physical.FilterOperator; import org.opensearch.sql.planner.physical.LimitOperator; +import org.opensearch.sql.planner.physical.PaginateOperator; import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.planner.physical.ProjectOperator; import org.opensearch.sql.planner.physical.RareTopNOperator; @@ -63,6 +64,12 @@ public PhysicalPlan visitRename(RenameOperator node, Object context) { return new RenameOperator(visitInput(node.getInput(), context), node.getMapping()); } + @Override + public PhysicalPlan visitPaginate(PaginateOperator node, Object context) { + return new PaginateOperator(visitInput(node.getInput(), context), node.getPageSize(), + node.getPageIndex()); + } + /** * Decorate with {@link ResourceMonitorPlan}. */ diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java index 9c59e4acaf..3d880d82b9 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/executor/protector/ResourceMonitorPlan.java @@ -82,4 +82,14 @@ public ExprValue next() { } return delegate.next(); } + + @Override + public long getTotalHits() { + return delegate.getTotalHits(); + } + + @Override + public String toCursor() { + return delegate.toCursor(); + } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/ContinuePageRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/ContinuePageRequest.java new file mode 100644 index 0000000000..1ad6207682 --- /dev/null +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/ContinuePageRequest.java @@ -0,0 +1,76 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.opensearch.request; + +import java.util.function.Consumer; +import java.util.function.Function; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import lombok.ToString; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.search.SearchScrollRequest; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; +import org.opensearch.sql.opensearch.response.OpenSearchResponse; + +/** + * Scroll (cursor) request is used to page the search. This request is not configurable and has + * no search query. It just handles paging through responses to the initial request. + * It is used on second and next pagination (cursor) requests. + * First (initial) request is handled by {@link InitialPageRequestBuilder}. + */ +@EqualsAndHashCode +@RequiredArgsConstructor +public class ContinuePageRequest implements OpenSearchRequest { + private final String initialScrollId; + private final TimeValue scrollTimeout; + // ScrollId that OpenSearch returns after search. + private String responseScrollId; + + @EqualsAndHashCode.Exclude + @ToString.Exclude + @Getter + private final OpenSearchExprValueFactory exprValueFactory; + + @EqualsAndHashCode.Exclude + private boolean scrollFinished = false; + + @Override + public OpenSearchResponse search(Function searchAction, + Function scrollAction) { + SearchResponse openSearchResponse = scrollAction.apply(new SearchScrollRequest(initialScrollId) + .scroll(scrollTimeout)); + + // TODO if terminated_early - something went wrong, e.g. no scroll returned. + var response = new OpenSearchResponse(openSearchResponse, exprValueFactory); + // on the last empty page, we should close the scroll + scrollFinished = response.isEmpty(); + responseScrollId = openSearchResponse.getScrollId(); + return response; + } + + @Override + public void clean(Consumer cleanAction) { + if (scrollFinished) { + cleanAction.accept(responseScrollId); + } + } + + @Override + public SearchSourceBuilder getSourceBuilder() { + throw new UnsupportedOperationException( + "SearchSourceBuilder is unavailable for ContinueScrollRequest"); + } + + @Override + public String toCursor() { + // on the last page, we shouldn't return the scroll to user, it is kept for closing (clean) + return scrollFinished ? null : responseScrollId; + } +} diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/ContinuePageRequestBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/ContinuePageRequestBuilder.java new file mode 100644 index 0000000000..a0c19c1d0a --- /dev/null +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/ContinuePageRequestBuilder.java @@ -0,0 +1,40 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.opensearch.request; + +import lombok.Getter; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.sql.common.setting.Settings; +import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; + +/** + * Builds a {@link ContinuePageRequest} to handle subsequent pagination/scroll/cursor requests. + * Initial search requests is handled by {@link InitialPageRequestBuilder}. + */ +public class ContinuePageRequestBuilder extends PagedRequestBuilder { + + @Getter + private final OpenSearchRequest.IndexName indexName; + private final String scrollId; + private final TimeValue scrollTimeout; + private final OpenSearchExprValueFactory exprValueFactory; + + /** Constructor. */ + public ContinuePageRequestBuilder(OpenSearchRequest.IndexName indexName, + String scrollId, + Settings settings, + OpenSearchExprValueFactory exprValueFactory) { + this.indexName = indexName; + this.scrollId = scrollId; + this.scrollTimeout = settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); + this.exprValueFactory = exprValueFactory; + } + + @Override + public OpenSearchRequest build() { + return new ContinuePageRequest(scrollId, scrollTimeout, exprValueFactory); + } +} diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/InitialPageRequestBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/InitialPageRequestBuilder.java new file mode 100644 index 0000000000..8023a86006 --- /dev/null +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/InitialPageRequestBuilder.java @@ -0,0 +1,71 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.opensearch.request; + +import static org.opensearch.sql.opensearch.request.OpenSearchRequestBuilder.DEFAULT_QUERY_TIMEOUT; + +import java.util.Map; +import java.util.Set; +import lombok.Getter; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.sql.common.setting.Settings; +import org.opensearch.sql.expression.ReferenceExpression; +import org.opensearch.sql.opensearch.data.type.OpenSearchDataType; +import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; + +/** + * This builder assists creating the initial OpenSearch paging (scrolling) request. + * It is used only on the first page (pagination request). + * Subsequent requests (cursor requests) use {@link ContinuePageRequestBuilder}. + */ +public class InitialPageRequestBuilder extends PagedRequestBuilder { + + @Getter + private final OpenSearchRequest.IndexName indexName; + private final SearchSourceBuilder sourceBuilder; + private final OpenSearchExprValueFactory exprValueFactory; + private final TimeValue scrollTimeout; + + /** + * Constructor. + * + * @param indexName index being scanned + * @param exprValueFactory value factory + */ + // TODO accept indexName as string (same way as `OpenSearchRequestBuilder` does)? + public InitialPageRequestBuilder(OpenSearchRequest.IndexName indexName, + int pageSize, + Settings settings, + OpenSearchExprValueFactory exprValueFactory) { + this.indexName = indexName; + this.exprValueFactory = exprValueFactory; + this.scrollTimeout = settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); + this.sourceBuilder = new SearchSourceBuilder() + .from(0) + .size(pageSize) + .timeout(DEFAULT_QUERY_TIMEOUT); + } + + @Override + public OpenSearchScrollRequest build() { + return new OpenSearchScrollRequest(indexName, scrollTimeout, sourceBuilder, exprValueFactory); + } + + /** + * Push down project expression to OpenSearch. + */ + @Override + public void pushDownProjects(Set projects) { + sourceBuilder.fetchSource(projects.stream().map(ReferenceExpression::getAttr) + .distinct().toArray(String[]::new), new String[0]); + } + + @Override + public void pushTypeMapping(Map typeMapping) { + exprValueFactory.extendTypeMapping(typeMapping); + } +} diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java index 6f6fea841b..0795ce7cdc 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequest.java @@ -6,6 +6,8 @@ package org.opensearch.sql.opensearch.request; +import static org.opensearch.sql.opensearch.request.OpenSearchRequestBuilder.DEFAULT_QUERY_TIMEOUT; + import com.google.common.annotations.VisibleForTesting; import java.util.function.Consumer; import java.util.function.Function; @@ -15,7 +17,6 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.SearchScrollRequest; -import org.opensearch.common.unit.TimeValue; import org.opensearch.search.SearchHits; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; @@ -32,11 +33,6 @@ @ToString public class OpenSearchQueryRequest implements OpenSearchRequest { - /** - * Default query timeout in minutes. - */ - public static final TimeValue DEFAULT_QUERY_TIMEOUT = TimeValue.timeValueMinutes(1L); - /** * {@link OpenSearchRequest.IndexName}. */ diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java index ce990780c1..c5b6d60af3 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequest.java @@ -50,9 +50,13 @@ OpenSearchResponse search(Function searchAction, */ OpenSearchExprValueFactory getExprValueFactory(); + default String toCursor() { + return ""; + } + /** * OpenSearch Index Name. - * Indices are seperated by ",". + * Indices are separated by ",". */ @EqualsAndHashCode class IndexName { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java index 97aeee3747..6d5a8cf005 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilder.java @@ -9,7 +9,6 @@ import static org.opensearch.search.sort.FieldSortBuilder.DOC_FIELD_NAME; import static org.opensearch.search.sort.SortOrder.ASC; -import com.google.common.collect.Lists; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -26,7 +25,6 @@ import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder; -import org.opensearch.search.sort.FieldSortBuilder; import org.opensearch.search.sort.SortBuilder; import org.opensearch.search.sort.SortBuilders; import org.opensearch.sql.ast.expression.Literal; @@ -41,10 +39,10 @@ /** * OpenSearch search request builder. */ -@EqualsAndHashCode +@EqualsAndHashCode(callSuper = false) @Getter @ToString -public class OpenSearchRequestBuilder { +public class OpenSearchRequestBuilder implements PushDownRequestBuilder { /** * Default query timeout in minutes. @@ -74,15 +72,21 @@ public class OpenSearchRequestBuilder { private final OpenSearchExprValueFactory exprValueFactory; /** - * Query size of the request. + * Query size of the request -- how many rows will be returned. */ - private Integer querySize; + private int querySize; + + /** + * Scroll context life time. + */ + private final TimeValue scrollTimeout; public OpenSearchRequestBuilder(String indexName, Integer maxResultWindow, Settings settings, OpenSearchExprValueFactory exprValueFactory) { - this(new OpenSearchRequest.IndexName(indexName), maxResultWindow, settings, exprValueFactory); + this(new OpenSearchRequest.IndexName(indexName), maxResultWindow, settings, + exprValueFactory); } /** @@ -94,12 +98,13 @@ public OpenSearchRequestBuilder(OpenSearchRequest.IndexName indexName, OpenSearchExprValueFactory exprValueFactory) { this.indexName = indexName; this.maxResultWindow = maxResultWindow; - this.sourceBuilder = new SearchSourceBuilder(); this.exprValueFactory = exprValueFactory; + this.scrollTimeout = settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE); this.querySize = settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT); - sourceBuilder.from(0); - sourceBuilder.size(querySize); - sourceBuilder.timeout(DEFAULT_QUERY_TIMEOUT); + this.sourceBuilder = new SearchSourceBuilder() + .from(0) + .size(querySize) + .timeout(DEFAULT_QUERY_TIMEOUT); } /** @@ -111,11 +116,12 @@ public OpenSearchRequest build() { Integer from = sourceBuilder.from(); Integer size = sourceBuilder.size(); - if (from + size <= maxResultWindow) { - return new OpenSearchQueryRequest(indexName, sourceBuilder, exprValueFactory); - } else { + if (from + size > maxResultWindow) { sourceBuilder.size(maxResultWindow - from); - return new OpenSearchScrollRequest(indexName, sourceBuilder, exprValueFactory); + return new OpenSearchScrollRequest( + indexName, scrollTimeout, sourceBuilder, exprValueFactory); + } else { + return new OpenSearchQueryRequest(indexName, sourceBuilder, exprValueFactory); } } @@ -124,7 +130,8 @@ public OpenSearchRequest build() { * * @param query query request */ - public void pushDown(QueryBuilder query) { + @Override + public void pushDownFilter(QueryBuilder query) { QueryBuilder current = sourceBuilder.query(); if (current == null) { @@ -149,6 +156,7 @@ public void pushDown(QueryBuilder query) { * * @param aggregationBuilder pair of aggregation query and aggregation parser. */ + @Override public void pushDownAggregation( Pair, OpenSearchAggregationResponseParser> aggregationBuilder) { aggregationBuilder.getLeft().forEach(builder -> sourceBuilder.aggregation(builder)); @@ -161,6 +169,7 @@ public void pushDownAggregation( * * @param sortBuilders sortBuilders. */ + @Override public void pushDownSort(List> sortBuilders) { // TODO: Sort by _doc is added when filter push down. Remove both logic once doctest fixed. if (isSortByDocOnly()) { @@ -175,6 +184,7 @@ public void pushDownSort(List> sortBuilders) { /** * Push down size (limit) and from (offset) to DSL request. */ + @Override public void pushDownLimit(Integer limit, Integer offset) { querySize = limit; sourceBuilder.from(offset).size(limit); @@ -184,6 +194,7 @@ public void pushDownLimit(Integer limit, Integer offset) { * Add highlight to DSL requests. * @param field name of the field to highlight */ + @Override public void pushDownHighlight(String field, Map arguments) { String unquotedField = StringUtils.unquoteText(field); if (sourceBuilder.highlighter() != null) { @@ -214,22 +225,20 @@ public void pushDownHighlight(String field, Map arguments) { } /** - * Push down project list to DSL requets. + * Push down project list to DSL requests. */ + @Override public void pushDownProjects(Set projects) { final Set projectsSet = projects.stream().map(ReferenceExpression::getAttr).collect(Collectors.toSet()); sourceBuilder.fetchSource(projectsSet.toArray(new String[0]), new String[0]); } + @Override public void pushTypeMapping(Map typeMapping) { exprValueFactory.extendTypeMapping(typeMapping); } - private boolean isBoolFilterQuery(QueryBuilder current) { - return (current instanceof BoolQueryBuilder); - } - private boolean isSortByDocOnly() { List> sorts = sourceBuilder.sorts(); if (sorts != null) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java index 4509e443c0..2e723c949c 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequest.java @@ -11,7 +11,6 @@ import java.util.function.Function; import lombok.EqualsAndHashCode; import lombok.Getter; -import lombok.RequiredArgsConstructor; import lombok.Setter; import lombok.ToString; import org.opensearch.action.search.SearchRequest; @@ -33,8 +32,8 @@ @ToString public class OpenSearchScrollRequest implements OpenSearchRequest { - /** Default scroll context timeout in minutes. */ - public static final TimeValue DEFAULT_SCROLL_TIMEOUT = TimeValue.timeValueMinutes(1L); + /** Scroll context timeout. */ + private final TimeValue scrollTimeout; /** * {@link OpenSearchRequest.IndexName}. @@ -51,27 +50,21 @@ public class OpenSearchScrollRequest implements OpenSearchRequest { * multi-thread so this state has to be maintained here. */ @Setter + @Getter private String scrollId; + private boolean needClean = false; + /** Search request source builder. */ private final SearchSourceBuilder sourceBuilder; - /** Constructor. */ - public OpenSearchScrollRequest(IndexName indexName, OpenSearchExprValueFactory exprValueFactory) { - this.indexName = indexName; - this.sourceBuilder = new SearchSourceBuilder(); - this.exprValueFactory = exprValueFactory; - } - - public OpenSearchScrollRequest(String indexName, OpenSearchExprValueFactory exprValueFactory) { - this(new IndexName(indexName), exprValueFactory); - } - /** Constructor. */ public OpenSearchScrollRequest(IndexName indexName, + TimeValue scrollTimeout, SearchSourceBuilder sourceBuilder, OpenSearchExprValueFactory exprValueFactory) { this.indexName = indexName; + this.scrollTimeout = scrollTimeout; this.sourceBuilder = sourceBuilder; this.exprValueFactory = exprValueFactory; } @@ -81,21 +74,26 @@ public OpenSearchScrollRequest(IndexName indexName, public OpenSearchResponse search(Function searchAction, Function scrollAction) { SearchResponse openSearchResponse; - if (isScrollStarted()) { + if (isScroll()) { openSearchResponse = scrollAction.apply(scrollRequest()); } else { openSearchResponse = searchAction.apply(searchRequest()); } - setScrollId(openSearchResponse.getScrollId()); - return new OpenSearchResponse(openSearchResponse, exprValueFactory); + var response = new OpenSearchResponse(openSearchResponse, exprValueFactory); + if (!(needClean = response.isEmpty())) { + setScrollId(openSearchResponse.getScrollId()); + } + return response; } @Override public void clean(Consumer cleanAction) { try { - if (isScrollStarted()) { + // clean on the last page only, to prevent closing the scroll/cursor in the middle of paging. + if (needClean && isScroll()) { cleanAction.accept(getScrollId()); + setScrollId(null); } } finally { reset(); @@ -110,7 +108,7 @@ public void clean(Consumer cleanAction) { public SearchRequest searchRequest() { return new SearchRequest() .indices(indexName.getIndexNames()) - .scroll(DEFAULT_SCROLL_TIMEOUT) + .scroll(scrollTimeout) .source(sourceBuilder); } @@ -119,8 +117,8 @@ public SearchRequest searchRequest() { * * @return true if scroll started */ - public boolean isScrollStarted() { - return (scrollId != null); + public boolean isScroll() { + return scrollId != null; } /** @@ -130,7 +128,7 @@ public boolean isScrollStarted() { */ public SearchScrollRequest scrollRequest() { Objects.requireNonNull(scrollId, "Scroll id cannot be null"); - return new SearchScrollRequest().scroll(DEFAULT_SCROLL_TIMEOUT).scrollId(scrollId); + return new SearchScrollRequest().scroll(scrollTimeout).scrollId(scrollId); } /** @@ -140,4 +138,13 @@ public SearchScrollRequest scrollRequest() { public void reset() { scrollId = null; } + + /** + * Convert a scroll request to string that can be included in a cursor. + * @return a string representing the scroll request. + */ + @Override + public String toCursor() { + return scrollId; + } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/PagedRequestBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/PagedRequestBuilder.java new file mode 100644 index 0000000000..69309bd7c9 --- /dev/null +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/PagedRequestBuilder.java @@ -0,0 +1,12 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.opensearch.request; + +public abstract class PagedRequestBuilder implements PushDownRequestBuilder { + public abstract OpenSearchRequest build(); + + public abstract OpenSearchRequest.IndexName getIndexName(); +} diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/request/PushDownRequestBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/PushDownRequestBuilder.java new file mode 100644 index 0000000000..ab1805ce4e --- /dev/null +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/request/PushDownRequestBuilder.java @@ -0,0 +1,63 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.opensearch.request; + +import java.util.List; +import java.util.Map; +import java.util.Set; +import lombok.Getter; +import org.apache.commons.lang3.tuple.Pair; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.search.aggregations.AggregationBuilder; +import org.opensearch.search.sort.SortBuilder; +import org.opensearch.sql.ast.expression.Literal; +import org.opensearch.sql.data.type.ExprType; +import org.opensearch.sql.expression.ReferenceExpression; +import org.opensearch.sql.opensearch.data.type.OpenSearchDataType; +import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; +import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser; + +public interface PushDownRequestBuilder { + + default boolean isBoolFilterQuery(QueryBuilder current) { + return (current instanceof BoolQueryBuilder); + } + + private String throwUnsupported(String operation) { + return String.format("%s: push down %s in cursor requests is not supported", + getClass().getSimpleName(), operation); + } + + default void pushDownFilter(QueryBuilder query) { + throw new UnsupportedOperationException(throwUnsupported("filter")); + } + + default void pushDownAggregation( + Pair, OpenSearchAggregationResponseParser> aggregationBuilder) { + throw new UnsupportedOperationException(throwUnsupported("aggregation")); + } + + default void pushDownSort(List> sortBuilders) { + throw new UnsupportedOperationException(throwUnsupported("sort")); + } + + default void pushDownLimit(Integer limit, Integer offset) { + throw new UnsupportedOperationException(throwUnsupported("limit")); + } + + default void pushDownHighlight(String field, Map arguments) { + throw new UnsupportedOperationException(throwUnsupported("highlight")); + } + + default void pushDownProjects(Set projects) { + throw new UnsupportedOperationException(throwUnsupported("projects")); + } + + default void pushTypeMapping(Map typeMapping) { + throw new UnsupportedOperationException(throwUnsupported("type mapping")); + } +} \ No newline at end of file diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java index aadd73efdd..61d4459a86 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/response/OpenSearchResponse.java @@ -39,13 +39,13 @@ public class OpenSearchResponse implements Iterable { private final Aggregations aggregations; /** - * ElasticsearchExprValueFactory used to build ExprValue from search result. + * OpenSearchExprValueFactory used to build ExprValue from search result. */ @EqualsAndHashCode.Exclude private final OpenSearchExprValueFactory exprValueFactory; /** - * Constructor of ElasticsearchResponse. + * Constructor of OpenSearchResponse. */ public OpenSearchResponse(SearchResponse searchResponse, OpenSearchExprValueFactory exprValueFactory) { @@ -55,7 +55,7 @@ public OpenSearchResponse(SearchResponse searchResponse, } /** - * Constructor of ElasticsearchResponse with SearchHits. + * Constructor of OpenSearchResponse with SearchHits. */ public OpenSearchResponse(SearchHits hits, OpenSearchExprValueFactory exprValueFactory) { this.hits = hits; @@ -73,6 +73,10 @@ public boolean isEmpty() { return (hits.getHits() == null) || (hits.getHits().length == 0) && aggregations == null; } + public long getTotalHits() { + return hits.getTotalHits().value; + } + public boolean isAggregationResponse() { return aggregations != null; } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java index 9ed8adf3ee..110d3d640f 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndex.java @@ -19,9 +19,14 @@ import org.opensearch.sql.opensearch.planner.physical.ADOperator; import org.opensearch.sql.opensearch.planner.physical.MLCommonsOperator; import org.opensearch.sql.opensearch.planner.physical.MLOperator; +import org.opensearch.sql.opensearch.request.InitialPageRequestBuilder; import org.opensearch.sql.opensearch.request.OpenSearchRequest; +import org.opensearch.sql.opensearch.request.OpenSearchRequestBuilder; import org.opensearch.sql.opensearch.request.system.OpenSearchDescribeIndexRequest; +import org.opensearch.sql.opensearch.storage.scan.OpenSearchIndexScan; import org.opensearch.sql.opensearch.storage.scan.OpenSearchIndexScanBuilder; +import org.opensearch.sql.opensearch.storage.scan.OpenSearchPagedIndexScan; +import org.opensearch.sql.opensearch.storage.scan.OpenSearchPagedIndexScanBuilder; import org.opensearch.sql.planner.DefaultImplementor; import org.opensearch.sql.planner.logical.LogicalAD; import org.opensearch.sql.planner.logical.LogicalML; @@ -151,11 +156,21 @@ public LogicalPlan optimize(LogicalPlan plan) { @Override public TableScanBuilder createScanBuilder() { - OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, settings, indexName, - getMaxResultWindow(), new OpenSearchExprValueFactory(getFieldOpenSearchTypes())); + var requestBuilder = new OpenSearchRequestBuilder(indexName, getMaxResultWindow(), + settings, new OpenSearchExprValueFactory(getFieldOpenSearchTypes())); + OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, requestBuilder); + return new OpenSearchIndexScanBuilder(indexScan); } + @Override + public TableScanBuilder createPagedScanBuilder(int pageSize) { + var requestBuilder = new InitialPageRequestBuilder(indexName, pageSize, settings, + new OpenSearchExprValueFactory(getFieldOpenSearchTypes())); + var indexScan = new OpenSearchPagedIndexScan(client, requestBuilder); + return new OpenSearchPagedIndexScanBuilder(indexScan); + } + @VisibleForTesting @RequiredArgsConstructor public static class OpenSearchDefaultImplementor diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java index 4a3393abc9..14535edb79 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngine.java @@ -12,9 +12,14 @@ import org.opensearch.sql.DataSourceSchemaName; import org.opensearch.sql.common.setting.Settings; import org.opensearch.sql.opensearch.client.OpenSearchClient; +import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; +import org.opensearch.sql.opensearch.request.ContinuePageRequestBuilder; +import org.opensearch.sql.opensearch.request.OpenSearchRequest; +import org.opensearch.sql.opensearch.storage.scan.OpenSearchPagedIndexScan; import org.opensearch.sql.opensearch.storage.system.OpenSearchSystemIndex; import org.opensearch.sql.storage.StorageEngine; import org.opensearch.sql.storage.Table; +import org.opensearch.sql.storage.TableScanOperator; /** OpenSearch storage engine implementation. */ @RequiredArgsConstructor @@ -33,4 +38,15 @@ public Table getTable(DataSourceSchemaName dataSourceSchemaName, String name) { return new OpenSearchIndex(client, settings, name); } } + + @Override + public TableScanOperator getTableScan(String indexName, String scrollId) { + // TODO call `getTable` here? + var index = new OpenSearchIndex(client, settings, indexName); + var requestBuilder = new ContinuePageRequestBuilder( + new OpenSearchRequest.IndexName(indexName), + scrollId, settings, + new OpenSearchExprValueFactory(index.getFieldOpenSearchTypes())); + return new OpenSearchPagedIndexScan(client, requestBuilder); + } } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexScan.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java similarity index 75% rename from opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexScan.java rename to opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java index e9746e1fae..27529bdffd 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexScan.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScan.java @@ -4,7 +4,7 @@ */ -package org.opensearch.sql.opensearch.storage; +package org.opensearch.sql.opensearch.storage.scan; import java.util.Collections; import java.util.Iterator; @@ -52,25 +52,9 @@ public class OpenSearchIndexScan extends TableScanOperator { /** Search response for current batch. */ private Iterator iterator; - /** - * Constructor. - */ - public OpenSearchIndexScan(OpenSearchClient client, Settings settings, - String indexName, Integer maxResultWindow, - OpenSearchExprValueFactory exprValueFactory) { - this(client, settings, - new OpenSearchRequest.IndexName(indexName),maxResultWindow, exprValueFactory); - } - - /** - * Constructor. - */ - public OpenSearchIndexScan(OpenSearchClient client, Settings settings, - OpenSearchRequest.IndexName indexName, Integer maxResultWindow, - OpenSearchExprValueFactory exprValueFactory) { + public OpenSearchIndexScan(OpenSearchClient client, OpenSearchRequestBuilder builder) { this.client = client; - this.requestBuilder = new OpenSearchRequestBuilder( - indexName, maxResultWindow, settings,exprValueFactory); + this.requestBuilder = builder; } @Override @@ -99,6 +83,12 @@ public ExprValue next() { return iterator.next(); } + @Override + public long getTotalHits() { + // ignore response.getTotalHits(), because response returns entire index, regardless of LIMIT + return queryCount; + } + private void fetchNextBatch() { OpenSearchResponse response = client.search(request); if (!response.isEmpty()) { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java index e52fc566cd..4571961e5f 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanAggregationBuilder.java @@ -15,10 +15,9 @@ import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.ReferenceExpression; import org.opensearch.sql.expression.aggregation.NamedAggregator; +import org.opensearch.sql.expression.serialization.DefaultExpressionSerializer; import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser; -import org.opensearch.sql.opensearch.storage.OpenSearchIndexScan; import org.opensearch.sql.opensearch.storage.script.aggregation.AggregationQueryBuilder; -import org.opensearch.sql.opensearch.storage.serialization.DefaultExpressionSerializer; import org.opensearch.sql.planner.logical.LogicalAggregation; import org.opensearch.sql.planner.logical.LogicalSort; import org.opensearch.sql.storage.TableScanOperator; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java index d7483cfcf0..41edbfc768 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanBuilder.java @@ -8,7 +8,6 @@ import com.google.common.annotations.VisibleForTesting; import lombok.EqualsAndHashCode; import org.opensearch.sql.expression.ReferenceExpression; -import org.opensearch.sql.opensearch.storage.OpenSearchIndexScan; import org.opensearch.sql.planner.logical.LogicalAggregation; import org.opensearch.sql.planner.logical.LogicalFilter; import org.opensearch.sql.planner.logical.LogicalHighlight; diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java index 7190d58000..f2e5139d01 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanQueryBuilder.java @@ -20,10 +20,9 @@ import org.opensearch.sql.expression.ExpressionNodeVisitor; import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.ReferenceExpression; -import org.opensearch.sql.opensearch.storage.OpenSearchIndexScan; +import org.opensearch.sql.expression.serialization.DefaultExpressionSerializer; import org.opensearch.sql.opensearch.storage.script.filter.FilterQueryBuilder; import org.opensearch.sql.opensearch.storage.script.sort.SortQueryBuilder; -import org.opensearch.sql.opensearch.storage.serialization.DefaultExpressionSerializer; import org.opensearch.sql.planner.logical.LogicalFilter; import org.opensearch.sql.planner.logical.LogicalHighlight; import org.opensearch.sql.planner.logical.LogicalLimit; @@ -62,7 +61,7 @@ public boolean pushDownFilter(LogicalFilter filter) { FilterQueryBuilder queryBuilder = new FilterQueryBuilder( new DefaultExpressionSerializer()); QueryBuilder query = queryBuilder.build(filter.getCondition()); - indexScan.getRequestBuilder().pushDown(query); + indexScan.getRequestBuilder().pushDownFilter(query); return true; } diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchPagedIndexScan.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchPagedIndexScan.java new file mode 100644 index 0000000000..e9d3fd52d3 --- /dev/null +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchPagedIndexScan.java @@ -0,0 +1,84 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.opensearch.storage.scan; + +import java.util.Collections; +import java.util.Iterator; +import lombok.EqualsAndHashCode; +import lombok.ToString; +import org.apache.commons.lang3.NotImplementedException; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.opensearch.client.OpenSearchClient; +import org.opensearch.sql.opensearch.request.OpenSearchRequest; +import org.opensearch.sql.opensearch.request.PagedRequestBuilder; +import org.opensearch.sql.opensearch.response.OpenSearchResponse; +import org.opensearch.sql.storage.TableScanOperator; + +@EqualsAndHashCode(onlyExplicitlyIncluded = true, callSuper = false) +@ToString(onlyExplicitlyIncluded = true) +public class OpenSearchPagedIndexScan extends TableScanOperator { + private final OpenSearchClient client; + private final PagedRequestBuilder requestBuilder; + @EqualsAndHashCode.Include + @ToString.Include + private OpenSearchRequest request; + private Iterator iterator; + private long totalHits = 0; + + public OpenSearchPagedIndexScan(OpenSearchClient client, + PagedRequestBuilder requestBuilder) { + this.client = client; + this.requestBuilder = requestBuilder; + } + + @Override + public String explain() { + throw new NotImplementedException("Implement OpenSearchPagedIndexScan.explain"); + } + + @Override + public boolean hasNext() { + return iterator.hasNext(); + } + + @Override + public ExprValue next() { + return iterator.next(); + } + + @Override + public void open() { + super.open(); + request = requestBuilder.build(); + OpenSearchResponse response = client.search(request); + if (!response.isEmpty()) { + iterator = response.iterator(); + totalHits = response.getTotalHits(); + } else { + iterator = Collections.emptyIterator(); + } + } + + @Override + public void close() { + super.close(); + client.cleanup(request); + } + + @Override + public long getTotalHits() { + return totalHits; + } + + @Override + public String toCursor() { + // TODO this assumes exactly one index is scanned. + var indexName = requestBuilder.getIndexName().getIndexNames()[0]; + var cursor = request.toCursor(); + return cursor == null || cursor.isEmpty() + ? "" : createSection("OpenSearchPagedIndexScan", indexName, cursor); + } +} diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchPagedIndexScanBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchPagedIndexScanBuilder.java new file mode 100644 index 0000000000..779df4ebec --- /dev/null +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchPagedIndexScanBuilder.java @@ -0,0 +1,29 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.opensearch.storage.scan; + +import lombok.EqualsAndHashCode; +import org.opensearch.sql.storage.TableScanOperator; +import org.opensearch.sql.storage.read.TableScanBuilder; + +/** + * Builder for a paged OpenSearch request. + * Override pushDown* methods from TableScanBuilder as more features + * support pagination. + */ +public class OpenSearchPagedIndexScanBuilder extends TableScanBuilder { + @EqualsAndHashCode.Include + OpenSearchPagedIndexScan indexScan; + + public OpenSearchPagedIndexScanBuilder(OpenSearchPagedIndexScan indexScan) { + this.indexScan = indexScan; + } + + @Override + public TableScanOperator build() { + return indexScan; + } +} diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngine.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngine.java index 855aae645d..9e8b47f6b0 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngine.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngine.java @@ -16,9 +16,9 @@ import org.opensearch.script.ScriptContext; import org.opensearch.script.ScriptEngine; import org.opensearch.sql.expression.Expression; +import org.opensearch.sql.expression.serialization.ExpressionSerializer; import org.opensearch.sql.opensearch.storage.script.aggregation.ExpressionAggregationScriptFactory; import org.opensearch.sql.opensearch.storage.script.filter.ExpressionFilterScriptFactory; -import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; /** * Custom expression script engine that supports using core engine expression code in DSL diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java index 1efa5b65d5..bc9741dee5 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilder.java @@ -24,13 +24,12 @@ import org.opensearch.search.aggregations.bucket.missing.MissingOrder; import org.opensearch.search.sort.SortOrder; import org.opensearch.sql.ast.tree.Sort; -import org.opensearch.sql.data.type.ExprCoreType; -import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.expression.Expression; import org.opensearch.sql.expression.ExpressionNodeVisitor; import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.ReferenceExpression; import org.opensearch.sql.expression.aggregation.NamedAggregator; +import org.opensearch.sql.expression.serialization.ExpressionSerializer; import org.opensearch.sql.opensearch.data.type.OpenSearchDataType; import org.opensearch.sql.opensearch.response.agg.CompositeAggregationParser; import org.opensearch.sql.opensearch.response.agg.MetricParser; @@ -38,7 +37,6 @@ import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser; import org.opensearch.sql.opensearch.storage.script.aggregation.dsl.BucketAggregationBuilder; import org.opensearch.sql.opensearch.storage.script.aggregation.dsl.MetricAggregationBuilder; -import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; /** * Build the AggregationBuilder from the list of {@link NamedAggregator} diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/AggregationBuilderHelper.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/AggregationBuilderHelper.java index 156b565976..83dd927632 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/AggregationBuilderHelper.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/AggregationBuilderHelper.java @@ -17,8 +17,8 @@ import org.opensearch.sql.expression.FunctionExpression; import org.opensearch.sql.expression.LiteralExpression; import org.opensearch.sql.expression.ReferenceExpression; +import org.opensearch.sql.expression.serialization.ExpressionSerializer; import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; -import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; /** * Abstract Aggregation Builder. diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java index 1a6a82be96..215be3b356 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilder.java @@ -23,8 +23,8 @@ import org.opensearch.search.sort.SortOrder; import org.opensearch.sql.ast.expression.SpanUnit; import org.opensearch.sql.expression.NamedExpression; +import org.opensearch.sql.expression.serialization.ExpressionSerializer; import org.opensearch.sql.expression.span.SpanExpression; -import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; /** * Bucket Aggregation Builder. diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilder.java index 5e7d34abce..db8d1fdf1e 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilder.java @@ -25,13 +25,13 @@ import org.opensearch.sql.expression.LiteralExpression; import org.opensearch.sql.expression.ReferenceExpression; import org.opensearch.sql.expression.aggregation.NamedAggregator; +import org.opensearch.sql.expression.serialization.ExpressionSerializer; import org.opensearch.sql.opensearch.response.agg.FilterParser; import org.opensearch.sql.opensearch.response.agg.MetricParser; import org.opensearch.sql.opensearch.response.agg.SingleValueParser; import org.opensearch.sql.opensearch.response.agg.StatsParser; import org.opensearch.sql.opensearch.response.agg.TopHitsParser; import org.opensearch.sql.opensearch.storage.script.filter.FilterQueryBuilder; -import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; /** * Build the Metric Aggregation and List of {@link MetricParser} from {@link NamedAggregator}. diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilder.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilder.java index 5f36954d4a..a82869ec03 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilder.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilder.java @@ -24,6 +24,7 @@ import org.opensearch.sql.expression.FunctionExpression; import org.opensearch.sql.expression.function.BuiltinFunctionName; import org.opensearch.sql.expression.function.FunctionName; +import org.opensearch.sql.expression.serialization.ExpressionSerializer; import org.opensearch.sql.opensearch.storage.script.filter.lucene.LikeQuery; import org.opensearch.sql.opensearch.storage.script.filter.lucene.LuceneQuery; import org.opensearch.sql.opensearch.storage.script.filter.lucene.RangeQuery; @@ -38,7 +39,6 @@ import org.opensearch.sql.opensearch.storage.script.filter.lucene.relevance.QueryStringQuery; import org.opensearch.sql.opensearch.storage.script.filter.lucene.relevance.SimpleQueryStringQuery; import org.opensearch.sql.opensearch.storage.script.filter.lucene.relevance.WildcardQuery; -import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; @RequiredArgsConstructor public class FilterQueryBuilder extends ExpressionNodeVisitor { diff --git a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java index eb4cb865e2..eba5eb126d 100644 --- a/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java +++ b/opensearch/src/main/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScan.java @@ -31,9 +31,13 @@ public class OpenSearchSystemIndexScan extends TableScanOperator { */ private Iterator iterator; + private long totalHits = 0; + @Override public void open() { - iterator = request.search().iterator(); + var response = request.search(); + totalHits = response.size(); + iterator = response.iterator(); } @Override @@ -46,6 +50,11 @@ public ExprValue next() { return iterator.next(); } + @Override + public long getTotalHits() { + return totalHits; + } + @Override public String explain() { return request.toString(); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java index 1c79a28f3f..dc9d7a5b5e 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchNodeClientTest.java @@ -34,8 +34,12 @@ import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; +import lombok.SneakyThrows; +import org.apache.commons.lang3.reflect.FieldUtils; import org.apache.lucene.search.TotalHits; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.InOrder; @@ -57,6 +61,7 @@ import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; @@ -65,6 +70,7 @@ import org.opensearch.index.IndexNotFoundException; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; +import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.sql.data.model.ExprIntegerValue; import org.opensearch.sql.data.model.ExprTupleValue; import org.opensearch.sql.data.model.ExprValue; @@ -72,10 +78,12 @@ import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; import org.opensearch.sql.opensearch.mapping.IndexMapping; +import org.opensearch.sql.opensearch.request.OpenSearchRequest; import org.opensearch.sql.opensearch.request.OpenSearchScrollRequest; import org.opensearch.sql.opensearch.response.OpenSearchResponse; @ExtendWith(MockitoExtension.class) +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class OpenSearchNodeClientTest { private static final String TEST_MAPPING_FILE = "mappings/accounts.json"; @@ -107,7 +115,7 @@ void setUp() { } @Test - void isIndexExist() { + void is_index_exist() { when(nodeClient.admin().indices() .exists(any(IndicesExistsRequest.class)).actionGet()) .thenReturn(new IndicesExistsResponse(true)); @@ -116,7 +124,7 @@ void isIndexExist() { } @Test - void isIndexNotExist() { + void is_index_not_exist() { String indexName = "test"; when(nodeClient.admin().indices() .exists(any(IndicesExistsRequest.class)).actionGet()) @@ -126,14 +134,14 @@ void isIndexNotExist() { } @Test - void isIndexExistWithException() { + void is_index_exist_with_exception() { when(nodeClient.admin().indices().exists(any())).thenThrow(RuntimeException.class); assertThrows(IllegalStateException.class, () -> client.exists("test")); } @Test - void createIndex() { + void create_index() { String indexName = "test"; Map mappings = ImmutableMap.of( "properties", @@ -146,7 +154,7 @@ void createIndex() { } @Test - void createIndexWithException() { + void create_index_with_exception() { when(nodeClient.admin().indices().create(any())).thenThrow(RuntimeException.class); assertThrows(IllegalStateException.class, @@ -154,7 +162,7 @@ void createIndexWithException() { } @Test - void getIndexMappings() throws IOException { + void get_index_mappings() throws IOException { URL url = Resources.getResource(TEST_MAPPING_FILE); String mappings = Resources.toString(url, Charsets.UTF_8); String indexName = "test"; @@ -225,7 +233,7 @@ void getIndexMappings() throws IOException { } @Test - void getIndexMappingsWithEmptyMapping() { + void get_index_mappings_with_empty_mapping() { String indexName = "test"; mockNodeClientIndicesMappings(indexName, ""); Map indexMappings = client.getIndexMappings(indexName); @@ -236,7 +244,7 @@ void getIndexMappingsWithEmptyMapping() { } @Test - void getIndexMappingsWithIOException() { + void get_index_mappings_with_IOException() { String indexName = "test"; when(nodeClient.admin().indices()).thenThrow(RuntimeException.class); @@ -244,7 +252,7 @@ void getIndexMappingsWithIOException() { } @Test - void getIndexMappingsWithNonExistIndex() { + void get_index_mappings_with_non_exist_index() { when(nodeClient.admin().indices() .prepareGetMappings(any()) .setLocal(anyBoolean()) @@ -255,7 +263,7 @@ void getIndexMappingsWithNonExistIndex() { } @Test - void getIndexMaxResultWindows() throws IOException { + void get_index_max_result_windows() throws IOException { URL url = Resources.getResource(TEST_MAPPING_SETTINGS_FILE); String indexMetadata = Resources.toString(url, Charsets.UTF_8); String indexName = "accounts"; @@ -269,7 +277,7 @@ void getIndexMaxResultWindows() throws IOException { } @Test - void getIndexMaxResultWindowsWithDefaultSettings() throws IOException { + void get_index_max_result_windows_with_default_settings() throws IOException { URL url = Resources.getResource(TEST_MAPPING_FILE); String indexMetadata = Resources.toString(url, Charsets.UTF_8); String indexName = "accounts"; @@ -283,7 +291,7 @@ void getIndexMaxResultWindowsWithDefaultSettings() throws IOException { } @Test - void getIndexMaxResultWindowsWithIOException() { + void get_index_max_result_windows_with_IOException() { String indexName = "test"; when(nodeClient.admin().indices()).thenThrow(RuntimeException.class); @@ -292,7 +300,7 @@ void getIndexMaxResultWindowsWithIOException() { /** Jacoco enforce this constant lambda be tested. */ @Test - void testAllFieldsPredicate() { + void test_all_fields_predicate() { assertTrue(OpenSearchNodeClient.ALL_FIELDS.apply("any_index").test("any_field")); } @@ -314,11 +322,12 @@ void search() { // Mock second scroll request followed SearchResponse scrollResponse = mock(SearchResponse.class); when(nodeClient.searchScroll(any()).actionGet()).thenReturn(scrollResponse); - when(scrollResponse.getScrollId()).thenReturn("scroll456"); when(scrollResponse.getHits()).thenReturn(SearchHits.empty()); // Verify response for first scroll request - OpenSearchScrollRequest request = new OpenSearchScrollRequest("test", factory); + OpenSearchScrollRequest request = new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), factory); OpenSearchResponse response1 = client.search(request); assertFalse(response1.isEmpty()); @@ -328,6 +337,7 @@ void search() { assertFalse(hits.hasNext()); // Verify response for second scroll request + request.setScrollId("scroll123"); OpenSearchResponse response2 = client.search(request); assertTrue(response2.isEmpty()); } @@ -343,16 +353,21 @@ void schedule() { } @Test + @SneakyThrows void cleanup() { ClearScrollRequestBuilder requestBuilder = mock(ClearScrollRequestBuilder.class); when(nodeClient.prepareClearScroll()).thenReturn(requestBuilder); when(requestBuilder.addScrollId(any())).thenReturn(requestBuilder); when(requestBuilder.get()).thenReturn(null); - OpenSearchScrollRequest request = new OpenSearchScrollRequest("test", factory); + OpenSearchScrollRequest request = new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), factory); request.setScrollId("scroll123"); + // Enforce cleaning by setting a private field. + FieldUtils.writeField(request, "needClean", true, true); client.cleanup(request); - assertFalse(request.isScrollStarted()); + assertFalse(request.isScroll()); InOrder inOrder = Mockito.inOrder(nodeClient, requestBuilder); inOrder.verify(nodeClient).prepareClearScroll(); @@ -361,14 +376,30 @@ void cleanup() { } @Test - void cleanupWithoutScrollId() { - OpenSearchScrollRequest request = new OpenSearchScrollRequest("test", factory); + void cleanup_without_scrollId() { + OpenSearchScrollRequest request = new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), factory); client.cleanup(request); verify(nodeClient, never()).prepareClearScroll(); } @Test - void getIndices() { + @SneakyThrows + void cleanup_rethrows_exception() { + when(nodeClient.prepareClearScroll()).thenThrow(new RuntimeException()); + + OpenSearchScrollRequest request = new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), factory); + request.setScrollId("scroll123"); + // Enforce cleaning by setting a private field. + FieldUtils.writeField(request, "needClean", true, true); + assertThrows(IllegalStateException.class, () -> client.cleanup(request)); + } + + @Test + void get_indices() { AliasMetadata aliasMetadata = mock(AliasMetadata.class); ImmutableOpenMap.Builder> builder = ImmutableOpenMap.builder(); builder.fPut("index",Arrays.asList(aliasMetadata)); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java index f2da6fd1e0..6abd17a6fb 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/client/OpenSearchRestClientTest.java @@ -30,8 +30,12 @@ import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; +import lombok.SneakyThrows; +import org.apache.commons.lang3.reflect.FieldUtils; import org.apache.lucene.search.TotalHits; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; @@ -51,12 +55,14 @@ import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; +import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.sql.data.model.ExprIntegerValue; import org.opensearch.sql.data.model.ExprTupleValue; import org.opensearch.sql.data.model.ExprValue; @@ -64,10 +70,12 @@ import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; import org.opensearch.sql.opensearch.mapping.IndexMapping; +import org.opensearch.sql.opensearch.request.OpenSearchRequest; import org.opensearch.sql.opensearch.request.OpenSearchScrollRequest; import org.opensearch.sql.opensearch.response.OpenSearchResponse; @ExtendWith(MockitoExtension.class) +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class OpenSearchRestClientTest { private static final String TEST_MAPPING_FILE = "mappings/accounts.json"; @@ -95,7 +103,7 @@ void setUp() { } @Test - void isIndexExist() throws IOException { + void is_index_exist() throws IOException { when(restClient.indices() .exists(any(), any())) // use any() because missing equals() in GetIndexRequest .thenReturn(true); @@ -104,7 +112,7 @@ void isIndexExist() throws IOException { } @Test - void isIndexNotExist() throws IOException { + void is_index_not_exist() throws IOException { when(restClient.indices() .exists(any(), any())) // use any() because missing equals() in GetIndexRequest .thenReturn(false); @@ -113,14 +121,14 @@ void isIndexNotExist() throws IOException { } @Test - void isIndexExistWithException() throws IOException { + void is_index_exist_with_exception() throws IOException { when(restClient.indices().exists(any(), any())).thenThrow(IOException.class); assertThrows(IllegalStateException.class, () -> client.exists("test")); } @Test - void createIndex() throws IOException { + void create_index() throws IOException { String indexName = "test"; Map mappings = ImmutableMap.of( "properties", @@ -133,7 +141,7 @@ void createIndex() throws IOException { } @Test - void createIndexWithIOException() throws IOException { + void create_index_with_IOException() throws IOException { when(restClient.indices().create(any(), any())).thenThrow(IOException.class); assertThrows(IllegalStateException.class, @@ -141,7 +149,7 @@ void createIndexWithIOException() throws IOException { } @Test - void getIndexMappings() throws IOException { + void get_index_mappings() throws IOException { URL url = Resources.getResource(TEST_MAPPING_FILE); String mappings = Resources.toString(url, Charsets.UTF_8); String indexName = "test"; @@ -216,14 +224,14 @@ void getIndexMappings() throws IOException { } @Test - void getIndexMappingsWithIOException() throws IOException { + void get_index_mappings_with_IOException() throws IOException { when(restClient.indices().getMapping(any(GetMappingsRequest.class), any())) .thenThrow(new IOException()); assertThrows(IllegalStateException.class, () -> client.getIndexMappings("test")); } @Test - void getIndexMaxResultWindowsSettings() throws IOException { + void get_index_max_result_windows_settings() throws IOException { String indexName = "test"; Integer maxResultWindow = 1000; @@ -247,7 +255,7 @@ void getIndexMaxResultWindowsSettings() throws IOException { } @Test - void getIndexMaxResultWindowsDefaultSettings() throws IOException { + void get_index_max_result_windows_default_settings() throws IOException { String indexName = "test"; Integer maxResultWindow = 10000; @@ -271,7 +279,7 @@ void getIndexMaxResultWindowsDefaultSettings() throws IOException { } @Test - void getIndexMaxResultWindowsWithIOException() throws IOException { + void get_index_max_result_windows_with_IOException() throws IOException { when(restClient.indices().getSettings(any(GetSettingsRequest.class), any())) .thenThrow(new IOException()); assertThrows(IllegalStateException.class, () -> client.getIndexMaxResultWindows("test")); @@ -295,11 +303,12 @@ void search() throws IOException { // Mock second scroll request followed SearchResponse scrollResponse = mock(SearchResponse.class); when(restClient.scroll(any(), any())).thenReturn(scrollResponse); - when(scrollResponse.getScrollId()).thenReturn("scroll456"); when(scrollResponse.getHits()).thenReturn(SearchHits.empty()); // Verify response for first scroll request - OpenSearchScrollRequest request = new OpenSearchScrollRequest("test", factory); + OpenSearchScrollRequest request = new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), factory); OpenSearchResponse response1 = client.search(request); assertFalse(response1.isEmpty()); @@ -309,20 +318,23 @@ void search() throws IOException { assertFalse(hits.hasNext()); // Verify response for second scroll request + request.setScrollId("scroll123"); OpenSearchResponse response2 = client.search(request); assertTrue(response2.isEmpty()); } @Test - void searchWithIOException() throws IOException { + void search_with_IOException() throws IOException { when(restClient.search(any(), any())).thenThrow(new IOException()); assertThrows( IllegalStateException.class, - () -> client.search(new OpenSearchScrollRequest("test", factory))); + () -> client.search(new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), factory))); } @Test - void scrollWithIOException() throws IOException { + void scroll_with_IOException() throws IOException { // Mock first scroll request SearchResponse searchResponse = mock(SearchResponse.class); when(restClient.search(any(), any())).thenReturn(searchResponse); @@ -338,7 +350,9 @@ void scrollWithIOException() throws IOException { when(restClient.scroll(any(), any())).thenThrow(new IOException()); // First request run successfully - OpenSearchScrollRequest scrollRequest = new OpenSearchScrollRequest("test", factory); + OpenSearchScrollRequest scrollRequest = new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), factory); client.search(scrollRequest); assertThrows( IllegalStateException.class, () -> client.search(scrollRequest)); @@ -355,32 +369,44 @@ void schedule() { } @Test - void cleanup() throws IOException { - OpenSearchScrollRequest request = new OpenSearchScrollRequest("test", factory); + @SneakyThrows + void cleanup() { + OpenSearchScrollRequest request = new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), factory); + // Enforce cleaning by setting a private field. + FieldUtils.writeField(request, "needClean", true, true); request.setScrollId("scroll123"); client.cleanup(request); verify(restClient).clearScroll(any(), any()); - assertFalse(request.isScrollStarted()); + assertFalse(request.isScroll()); } @Test - void cleanupWithoutScrollId() throws IOException { - OpenSearchScrollRequest request = new OpenSearchScrollRequest("test", factory); + void cleanup_without_scrollId() throws IOException { + OpenSearchScrollRequest request = new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), factory); client.cleanup(request); verify(restClient, never()).clearScroll(any(), any()); } @Test - void cleanupWithIOException() throws IOException { + @SneakyThrows + void cleanup_with_IOException() { when(restClient.clearScroll(any(), any())).thenThrow(new IOException()); - OpenSearchScrollRequest request = new OpenSearchScrollRequest("test", factory); + OpenSearchScrollRequest request = new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), factory); + // Enforce cleaning by setting a private field. + FieldUtils.writeField(request, "needClean", true, true); request.setScrollId("scroll123"); assertThrows(IllegalStateException.class, () -> client.cleanup(request)); } @Test - void getIndices() throws IOException { + void get_indices() throws IOException { when(restClient.indices().get(any(GetIndexRequest.class), any(RequestOptions.class))) .thenReturn(getIndexResponse); when(getIndexResponse.getIndices()).thenReturn(new String[] {"index"}); @@ -390,7 +416,7 @@ void getIndices() throws IOException { } @Test - void getIndicesWithIOException() throws IOException { + void get_indices_with_IOException() throws IOException { when(restClient.indices().get(any(GetIndexRequest.class), any(RequestOptions.class))) .thenThrow(new IOException()); assertThrows(IllegalStateException.class, () -> client.indices()); @@ -409,7 +435,7 @@ void meta() throws IOException { } @Test - void metaWithIOException() throws IOException { + void meta_with_IOException() throws IOException { when(restClient.cluster().getSettings(any(), any(RequestOptions.class))) .thenThrow(new IOException()); @@ -417,7 +443,7 @@ void metaWithIOException() throws IOException { } @Test - void mlWithException() { + void ml_with_exception() { assertThrows(UnsupportedOperationException.class, () -> client.getNodeClient()); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngineTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngineTest.java index 4a0c6e24f1..d762fbe2fa 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngineTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/OpenSearchExecutionEngineTest.java @@ -18,6 +18,7 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.opensearch.sql.common.setting.Settings.Key.QUERY_SIZE_LIMIT; +import static org.opensearch.sql.common.setting.Settings.Key.SQL_CURSOR_KEEP_ALIVE; import static org.opensearch.sql.data.model.ExprValueUtils.tupleValue; import static org.opensearch.sql.executor.ExecutionEngine.QueryResponse; @@ -25,30 +26,36 @@ import java.util.Arrays; import java.util.Iterator; import java.util.List; -import java.util.Map; import java.util.Optional; import java.util.concurrent.atomic.AtomicReference; import lombok.RequiredArgsConstructor; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; +import org.opensearch.common.unit.TimeValue; import org.opensearch.sql.common.response.ResponseListener; import org.opensearch.sql.common.setting.Settings; import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.executor.ExecutionContext; import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.executor.ExecutionEngine.ExplainResponse; +import org.opensearch.sql.executor.pagination.PaginatedPlanCache; import org.opensearch.sql.opensearch.client.OpenSearchClient; import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; import org.opensearch.sql.opensearch.executor.protector.OpenSearchExecutionProtector; -import org.opensearch.sql.opensearch.storage.OpenSearchIndexScan; +import org.opensearch.sql.opensearch.request.OpenSearchRequestBuilder; +import org.opensearch.sql.opensearch.storage.scan.OpenSearchIndexScan; +import org.opensearch.sql.planner.physical.PaginateOperator; import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.storage.TableScanOperator; import org.opensearch.sql.storage.split.Split; @ExtendWith(MockitoExtension.class) +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class OpenSearchExecutionEngineTest { @Mock private OpenSearchClient client; @@ -75,14 +82,15 @@ void setUp() { } @Test - void executeSuccessfully() { + void execute_successfully() { List expected = Arrays.asList( tupleValue(of("name", "John", "age", 20)), tupleValue(of("name", "Allen", "age", 30))); FakePhysicalPlan plan = new FakePhysicalPlan(expected.iterator()); when(protector.protect(plan)).thenReturn(plan); - OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector); + OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector, + new PaginatedPlanCache(null)); List actual = new ArrayList<>(); executor.execute( plan, @@ -104,13 +112,43 @@ public void onFailure(Exception e) { } @Test - void executeWithFailure() { + void execute_with_cursor() { + List expected = + Arrays.asList( + tupleValue(of("name", "John", "age", 20)), tupleValue(of("name", "Allen", "age", 30))); + FakePaginatePlan plan = new FakePaginatePlan(new FakePhysicalPlan(expected.iterator()), 10, 0); + when(protector.protect(plan)).thenReturn(plan); + + OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector, + new PaginatedPlanCache(null)); + List actual = new ArrayList<>(); + executor.execute( + plan, + new ResponseListener() { + @Override + public void onResponse(QueryResponse response) { + actual.addAll(response.getResults()); + assertTrue(response.getCursor().toString().startsWith("n:")); + } + + @Override + public void onFailure(Exception e) { + fail("Error occurred during execution", e); + } + }); + + assertEquals(expected, actual); + } + + @Test + void execute_with_failure() { PhysicalPlan plan = mock(PhysicalPlan.class); RuntimeException expected = new RuntimeException("Execution error"); when(plan.hasNext()).thenThrow(expected); when(protector.protect(plan)).thenReturn(plan); - OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector); + OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector, + new PaginatedPlanCache(null)); AtomicReference actual = new AtomicReference<>(); executor.execute( plan, @@ -130,12 +168,17 @@ public void onFailure(Exception e) { } @Test - void explainSuccessfully() { - OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector); + void explain_successfully() { + OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector, + new PaginatedPlanCache(null)); Settings settings = mock(Settings.class); when(settings.getSettingValue(QUERY_SIZE_LIMIT)).thenReturn(100); + when(settings.getSettingValue(SQL_CURSOR_KEEP_ALIVE)) + .thenReturn(TimeValue.timeValueMinutes(1)); + PhysicalPlan plan = new OpenSearchIndexScan(mock(OpenSearchClient.class), - settings, "test", 10000, mock(OpenSearchExprValueFactory.class)); + new OpenSearchRequestBuilder("test", 10000, settings, + mock(OpenSearchExprValueFactory.class))); AtomicReference result = new AtomicReference<>(); executor.explain(plan, new ResponseListener() { @@ -154,8 +197,9 @@ public void onFailure(Exception e) { } @Test - void explainWithFailure() { - OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector); + void explain_with_failure() { + OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector, + new PaginatedPlanCache(null)); PhysicalPlan plan = mock(PhysicalPlan.class); when(plan.accept(any(), any())).thenThrow(IllegalStateException.class); @@ -176,7 +220,7 @@ public void onFailure(Exception e) { } @Test - void callAddSplitAndOpenInOrder() { + void call_add_split_and_open_in_order() { List expected = Arrays.asList( tupleValue(of("name", "John", "age", 20)), tupleValue(of("name", "Allen", "age", 30))); @@ -184,7 +228,8 @@ void callAddSplitAndOpenInOrder() { when(protector.protect(plan)).thenReturn(plan); when(executionContext.getSplit()).thenReturn(Optional.of(split)); - OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector); + OpenSearchExecutionEngine executor = new OpenSearchExecutionEngine(client, protector, + new PaginatedPlanCache(null)); List actual = new ArrayList<>(); executor.execute( plan, @@ -207,6 +252,54 @@ public void onFailure(Exception e) { assertTrue(plan.hasClosed); } + private static class FakePaginatePlan extends PaginateOperator { + private final PhysicalPlan input; + private final int pageSize; + private final int pageIndex; + + public FakePaginatePlan(PhysicalPlan input, int pageSize, int pageIndex) { + super(input, pageSize, pageIndex); + this.input = input; + this.pageSize = pageSize; + this.pageIndex = pageIndex; + } + + @Override + public void open() { + input.open(); + } + + @Override + public void close() { + input.close(); + } + + @Override + public void add(Split split) { + input.add(split); + } + + @Override + public boolean hasNext() { + return input.hasNext(); + } + + @Override + public ExprValue next() { + return input.next(); + } + + @Override + public ExecutionEngine.Schema schema() { + return input.schema(); + } + + @Override + public String toCursor() { + return "FakePaginatePlan"; + } + } + @RequiredArgsConstructor private static class FakePhysicalPlan extends TableScanOperator { private final Iterator it; diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java index d4d987a7df..7b1353f4a9 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/ResourceMonitorPlanTest.java @@ -107,4 +107,16 @@ void acceptSuccess() { monitorPlan.accept(visitor, context); verify(plan, times(1)).accept(visitor, context); } + + @Test + void getTotalHitsSuccess() { + monitorPlan.getTotalHits(); + verify(plan, times(1)).getTotalHits(); + } + + @Test + void toCursorSuccess() { + monitorPlan.toCursor(); + verify(plan, times(1)).toCursor(); + } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtectorTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtectorTest.java index 857ff601e1..cf684b9409 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtectorTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/executor/protector/OpenSearchExecutionProtectorTest.java @@ -11,6 +11,8 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.opensearch.sql.ast.tree.Sort.SortOption.DEFAULT_ASC; +import static org.opensearch.sql.common.setting.Settings.Key.QUERY_SIZE_LIMIT; +import static org.opensearch.sql.common.setting.Settings.Key.SQL_CURSOR_KEEP_ALIVE; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; import static org.opensearch.sql.data.type.ExprCoreType.STRING; @@ -36,6 +38,7 @@ import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; import org.opensearch.client.node.NodeClient; +import org.opensearch.common.unit.TimeValue; import org.opensearch.sql.ast.expression.DataType; import org.opensearch.sql.ast.expression.Literal; import org.opensearch.sql.ast.tree.RareTopN.CommandType; @@ -57,8 +60,10 @@ import org.opensearch.sql.opensearch.planner.physical.ADOperator; import org.opensearch.sql.opensearch.planner.physical.MLCommonsOperator; import org.opensearch.sql.opensearch.planner.physical.MLOperator; +import org.opensearch.sql.opensearch.request.OpenSearchRequestBuilder; import org.opensearch.sql.opensearch.setting.OpenSearchSettings; -import org.opensearch.sql.opensearch.storage.OpenSearchIndexScan; +import org.opensearch.sql.opensearch.storage.scan.OpenSearchIndexScan; +import org.opensearch.sql.planner.physical.PaginateOperator; import org.opensearch.sql.planner.physical.PhysicalPlan; import org.opensearch.sql.planner.physical.PhysicalPlanDSL; @@ -86,8 +91,9 @@ public void setup() { @Test public void testProtectIndexScan() { - when(settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)).thenReturn(200); - + when(settings.getSettingValue(QUERY_SIZE_LIMIT)).thenReturn(200); + when(settings.getSettingValue(SQL_CURSOR_KEEP_ALIVE)) + .thenReturn(TimeValue.timeValueMinutes(1)); String indexName = "test"; Integer maxResultWindow = 10000; NamedExpression include = named("age", ref("age", INTEGER)); @@ -124,9 +130,11 @@ public void testProtectIndexScan() { PhysicalPlanDSL.agg( filter( resourceMonitor( - new OpenSearchIndexScan( - client, settings, indexName, - maxResultWindow, exprValueFactory)), + new OpenSearchIndexScan(client, + new OpenSearchRequestBuilder(indexName, + maxResultWindow, + settings, + exprValueFactory))), filterExpr), aggregators, groupByExprs), @@ -152,9 +160,11 @@ public void testProtectIndexScan() { PhysicalPlanDSL.rename( PhysicalPlanDSL.agg( filter( - new OpenSearchIndexScan( - client, settings, indexName, - maxResultWindow, exprValueFactory), + new OpenSearchIndexScan(client, + new OpenSearchRequestBuilder(indexName, + maxResultWindow, + settings, + exprValueFactory)), filterExpr), aggregators, groupByExprs), @@ -314,6 +324,13 @@ public void testVisitML() { executionProtector.visitML(mlOperator, null)); } + @Test + public void visitPaginate() { + var paginate = new PaginateOperator(values(List.of()), 42); + assertEquals(executionProtector.protect(paginate), + executionProtector.visitPaginate(paginate, null)); + } + PhysicalPlan resourceMonitor(PhysicalPlan input) { return new ResourceMonitorPlan(input, resourceMonitor); } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/ContinuePageRequestBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/ContinuePageRequestBuilderTest.java new file mode 100644 index 0000000000..e449126d1c --- /dev/null +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/ContinuePageRequestBuilderTest.java @@ -0,0 +1,57 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.opensearch.request; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.when; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.sql.common.setting.Settings; +import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +@ExtendWith(MockitoExtension.class) +public class ContinuePageRequestBuilderTest { + + @Mock + private OpenSearchExprValueFactory exprValueFactory; + + @Mock + private Settings settings; + + private final OpenSearchRequest.IndexName indexName = new OpenSearchRequest.IndexName("test"); + private final String scrollId = "scroll"; + + private ContinuePageRequestBuilder requestBuilder; + + @BeforeEach + void setup() { + when(settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)) + .thenReturn(TimeValue.timeValueMinutes(1)); + requestBuilder = new ContinuePageRequestBuilder( + indexName, scrollId, settings, exprValueFactory); + } + + @Test + public void build() { + assertEquals( + new ContinuePageRequest(scrollId, TimeValue.timeValueMinutes(1), exprValueFactory), + requestBuilder.build() + ); + } + + @Test + public void getIndexName() { + assertEquals(indexName, requestBuilder.getIndexName()); + } +} diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/ContinuePageRequestTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/ContinuePageRequestTest.java new file mode 100644 index 0000000000..e991fc5787 --- /dev/null +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/ContinuePageRequestTest.java @@ -0,0 +1,126 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.opensearch.request; + +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.lenient; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.util.function.Consumer; +import java.util.function.Function; +import lombok.SneakyThrows; +import org.apache.commons.lang3.reflect.FieldUtils; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.search.SearchScrollRequest; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.search.SearchHit; +import org.opensearch.search.SearchHits; +import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; +import org.opensearch.sql.opensearch.response.OpenSearchResponse; + +@ExtendWith(MockitoExtension.class) +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +public class ContinuePageRequestTest { + + @Mock + private Function searchAction; + + @Mock + private Function scrollAction; + + @Mock + private Consumer cleanAction; + + @Mock + private SearchResponse searchResponse; + + @Mock + private SearchHits searchHits; + + @Mock + private SearchHit searchHit; + + @Mock + private OpenSearchExprValueFactory factory; + + private final String scroll = "scroll"; + private final String nextScroll = "nextScroll"; + + private final ContinuePageRequest request = new ContinuePageRequest( + scroll, TimeValue.timeValueMinutes(1), factory); + + @Test + public void search_with_non_empty_response() { + when(scrollAction.apply(any())).thenReturn(searchResponse); + when(searchResponse.getHits()).thenReturn(searchHits); + when(searchHits.getHits()).thenReturn(new SearchHit[] {searchHit}); + when(searchResponse.getScrollId()).thenReturn(nextScroll); + + OpenSearchResponse searchResponse = request.search(searchAction, scrollAction); + assertAll( + () -> assertFalse(searchResponse.isEmpty()), + () -> assertEquals(nextScroll, request.toCursor()), + () -> verify(scrollAction, times(1)).apply(any()), + () -> verify(searchAction, never()).apply(any()) + ); + } + + @Test + // Empty response means scroll search is done and no cursor/scroll should be set + public void search_with_empty_response() { + when(scrollAction.apply(any())).thenReturn(searchResponse); + when(searchResponse.getHits()).thenReturn(searchHits); + when(searchHits.getHits()).thenReturn(null); + lenient().when(searchResponse.getScrollId()).thenReturn(nextScroll); + + OpenSearchResponse searchResponse = request.search(searchAction, scrollAction); + assertAll( + () -> assertTrue(searchResponse.isEmpty()), + () -> assertNull(request.toCursor()), + () -> verify(scrollAction, times(1)).apply(any()), + () -> verify(searchAction, never()).apply(any()) + ); + } + + @Test + @SneakyThrows + public void clean() { + request.clean(cleanAction); + verify(cleanAction, never()).accept(any()); + // Enforce cleaning by setting a private field. + FieldUtils.writeField(request, "scrollFinished", true, true); + request.clean(cleanAction); + verify(cleanAction, times(1)).accept(any()); + } + + @Test + // Added for coverage only + public void getters() { + factory = mock(); + assertAll( + () -> assertThrows(Throwable.class, request::getSourceBuilder), + () -> assertSame(factory, new ContinuePageRequest("", null, factory).getExprValueFactory()) + ); + } +} diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/InitialPageRequestBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/InitialPageRequestBuilderTest.java new file mode 100644 index 0000000000..9d4c0b8dbe --- /dev/null +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/InitialPageRequestBuilderTest.java @@ -0,0 +1,117 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.opensearch.request; + +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; +import static org.opensearch.sql.opensearch.request.OpenSearchRequestBuilder.DEFAULT_QUERY_TIMEOUT; + +import java.util.Map; +import java.util.Set; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.sql.common.setting.Settings; +import org.opensearch.sql.data.type.ExprType; +import org.opensearch.sql.expression.DSL; +import org.opensearch.sql.expression.ReferenceExpression; +import org.opensearch.sql.opensearch.data.type.OpenSearchDataType; +import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +@ExtendWith(MockitoExtension.class) +public class InitialPageRequestBuilderTest { + + @Mock + private OpenSearchExprValueFactory exprValueFactory; + + @Mock + private Settings settings; + + private final int pageSize = 42; + + private final OpenSearchRequest.IndexName indexName = new OpenSearchRequest.IndexName("test"); + + private InitialPageRequestBuilder requestBuilder; + + @BeforeEach + void setup() { + when(settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)) + .thenReturn(TimeValue.timeValueMinutes(1)); + requestBuilder = new InitialPageRequestBuilder( + indexName, pageSize, settings, exprValueFactory); + } + + @Test + public void build() { + assertEquals( + new OpenSearchScrollRequest(indexName, TimeValue.timeValueMinutes(1), + new SearchSourceBuilder() + .from(0) + .size(pageSize) + .timeout(DEFAULT_QUERY_TIMEOUT), + exprValueFactory), + requestBuilder.build() + ); + } + + @Test + public void pushDown_not_supported() { + assertAll( + () -> assertThrows(UnsupportedOperationException.class, + () -> requestBuilder.pushDownFilter(mock())), + () -> assertThrows(UnsupportedOperationException.class, + () -> requestBuilder.pushDownAggregation(mock())), + () -> assertThrows(UnsupportedOperationException.class, + () -> requestBuilder.pushDownSort(mock())), + () -> assertThrows(UnsupportedOperationException.class, + () -> requestBuilder.pushDownLimit(1, 2)), + () -> assertThrows(UnsupportedOperationException.class, + () -> requestBuilder.pushDownHighlight("", Map.of())) + ); + } + + @Test + public void pushTypeMapping() { + Map typeMapping = Map.of("intA", OpenSearchDataType.of(INTEGER)); + requestBuilder.pushTypeMapping(typeMapping); + + verify(exprValueFactory).extendTypeMapping(typeMapping); + } + + @Test + public void pushDownProject() { + Set references = Set.of(DSL.ref("intA", INTEGER)); + requestBuilder.pushDownProjects(references); + + assertEquals( + new OpenSearchScrollRequest(indexName, TimeValue.timeValueMinutes(1), + new SearchSourceBuilder() + .from(0) + .size(pageSize) + .timeout(DEFAULT_QUERY_TIMEOUT) + .fetchSource(new String[]{"intA"}, new String[0]), + exprValueFactory), + requestBuilder.build() + ); + } + + @Test + public void getIndexName() { + assertEquals(indexName, requestBuilder.getIndexName()); + } +} diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java index 1ba26e33dc..c6a9a06a70 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchQueryRequestTest.java @@ -14,6 +14,7 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import static org.opensearch.sql.opensearch.request.OpenSearchRequestBuilder.DEFAULT_QUERY_TIMEOUT; import java.util.function.Consumer; import java.util.function.Function; @@ -85,7 +86,7 @@ void searchRequest() { new SearchRequest() .indices("test") .source(new SearchSourceBuilder() - .timeout(OpenSearchQueryRequest.DEFAULT_QUERY_TIMEOUT) + .timeout(DEFAULT_QUERY_TIMEOUT) .from(0) .size(200) .query(QueryBuilders.termQuery("name", "John"))), diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java index 980d68ed80..49283e61b9 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestBuilderTest.java @@ -19,6 +19,8 @@ import java.util.Set; import org.apache.commons.lang3.tuple.Pair; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; @@ -44,6 +46,7 @@ import org.opensearch.sql.opensearch.response.agg.SingleValueParser; @ExtendWith(MockitoExtension.class) +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) public class OpenSearchRequestBuilderTest { private static final TimeValue DEFAULT_QUERY_TIMEOUT = TimeValue.timeValueMinutes(1L); @@ -62,13 +65,15 @@ public class OpenSearchRequestBuilderTest { @BeforeEach void setup() { when(settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)).thenReturn(200); + when(settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)) + .thenReturn(TimeValue.timeValueMinutes(1)); requestBuilder = new OpenSearchRequestBuilder( "test", MAX_RESULT_WINDOW, settings, exprValueFactory); } @Test - void buildQueryRequest() { + void build_query_request() { Integer limit = 200; Integer offset = 0; requestBuilder.pushDownLimit(limit, offset); @@ -85,14 +90,14 @@ void buildQueryRequest() { } @Test - void buildScrollRequestWithCorrectSize() { + void build_scroll_request_with_correct_size() { Integer limit = 800; Integer offset = 10; requestBuilder.pushDownLimit(limit, offset); assertEquals( new OpenSearchScrollRequest( - new OpenSearchRequest.IndexName("test"), + new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), new SearchSourceBuilder() .from(offset) .size(MAX_RESULT_WINDOW - offset) @@ -102,9 +107,9 @@ void buildScrollRequestWithCorrectSize() { } @Test - void testPushDownQuery() { + void test_push_down_query() { QueryBuilder query = QueryBuilders.termQuery("intA", 1); - requestBuilder.pushDown(query); + requestBuilder.pushDownFilter(query); assertEquals( new SearchSourceBuilder() @@ -118,7 +123,7 @@ void testPushDownQuery() { } @Test - void testPushDownAggregation() { + void test_push_down_aggregation() { AggregationBuilder aggBuilder = AggregationBuilders.composite( "composite_buckets", Collections.singletonList(new TermsValuesSourceBuilder("longA"))); @@ -139,9 +144,9 @@ void testPushDownAggregation() { } @Test - void testPushDownQueryAndSort() { + void test_push_down_query_and_sort() { QueryBuilder query = QueryBuilders.termQuery("intA", 1); - requestBuilder.pushDown(query); + requestBuilder.pushDownFilter(query); FieldSortBuilder sortBuilder = SortBuilders.fieldSort("intA"); requestBuilder.pushDownSort(List.of(sortBuilder)); @@ -157,7 +162,7 @@ void testPushDownQueryAndSort() { } @Test - void testPushDownSort() { + void test_push_down_sort() { FieldSortBuilder sortBuilder = SortBuilders.fieldSort("intA"); requestBuilder.pushDownSort(List.of(sortBuilder)); @@ -171,7 +176,7 @@ void testPushDownSort() { } @Test - void testPushDownNonFieldSort() { + void test_push_down_non_field_sort() { ScoreSortBuilder sortBuilder = SortBuilders.scoreSort(); requestBuilder.pushDownSort(List.of(sortBuilder)); @@ -185,7 +190,7 @@ void testPushDownNonFieldSort() { } @Test - void testPushDownMultipleSort() { + void test_push_down_multiple_sort() { requestBuilder.pushDownSort(List.of( SortBuilders.fieldSort("intA"), SortBuilders.fieldSort("intB"))); @@ -201,7 +206,7 @@ void testPushDownMultipleSort() { } @Test - void testPushDownProject() { + void test_push_down_project() { Set references = Set.of(DSL.ref("intA", INTEGER)); requestBuilder.pushDownProjects(references); @@ -215,7 +220,7 @@ void testPushDownProject() { } @Test - void testPushTypeMapping() { + void test_push_type_mapping() { Map typeMapping = Map.of("intA", OpenSearchDataType.of(INTEGER)); requestBuilder.pushTypeMapping(typeMapping); diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestTest.java new file mode 100644 index 0000000000..d0a274ce2a --- /dev/null +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchRequestTest.java @@ -0,0 +1,23 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + + +package org.opensearch.sql.opensearch.request; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.CALLS_REAL_METHODS; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.withSettings; + +import org.junit.jupiter.api.Test; + +public class OpenSearchRequestTest { + + @Test + void toCursor() { + var request = mock(OpenSearchRequest.class, withSettings().defaultAnswer(CALLS_REAL_METHODS)); + assertEquals("", request.toCursor()); + } +} diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java index 0fc9c92810..3ad6ad226d 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/OpenSearchScrollRequestTest.java @@ -8,26 +8,40 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import java.util.concurrent.atomic.AtomicBoolean; +import org.apache.lucene.search.TotalHits; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.SearchScrollRequest; +import org.opensearch.common.unit.TimeValue; import org.opensearch.index.query.QueryBuilders; +import org.opensearch.search.SearchHit; +import org.opensearch.search.SearchHits; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; @ExtendWith(MockitoExtension.class) +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class OpenSearchScrollRequestTest { @Mock private OpenSearchExprValueFactory factory; - private final OpenSearchScrollRequest request = - new OpenSearchScrollRequest("test", factory); + private final OpenSearchScrollRequest request = new OpenSearchScrollRequest( + new OpenSearchRequest.IndexName("test"), TimeValue.timeValueMinutes(1), + new SearchSourceBuilder(), factory); @Test void searchRequest() { @@ -36,17 +50,20 @@ void searchRequest() { assertEquals( new SearchRequest() .indices("test") - .scroll(OpenSearchScrollRequest.DEFAULT_SCROLL_TIMEOUT) + .scroll(TimeValue.timeValueMinutes(1)) .source(new SearchSourceBuilder().query(QueryBuilders.termQuery("name", "John"))), request.searchRequest()); } @Test void isScrollStarted() { - assertFalse(request.isScrollStarted()); + assertFalse(request.isScroll()); request.setScrollId("scroll123"); - assertTrue(request.isScrollStarted()); + assertTrue(request.isScroll()); + + request.reset(); + assertFalse(request.isScroll()); } @Test @@ -54,8 +71,64 @@ void scrollRequest() { request.setScrollId("scroll123"); assertEquals( new SearchScrollRequest() - .scroll(OpenSearchScrollRequest.DEFAULT_SCROLL_TIMEOUT) + .scroll(TimeValue.timeValueMinutes(1)) .scrollId("scroll123"), request.scrollRequest()); } + + @Test + void toCursor() { + request.setScrollId("scroll123"); + assertEquals("scroll123", request.toCursor()); + + request.reset(); + assertNull(request.toCursor()); + } + + @Test + void clean_on_empty_response() { + // This could happen on sequential search calls + SearchResponse searchResponse = mock(); + when(searchResponse.getScrollId()).thenReturn("scroll1", "scroll2"); + when(searchResponse.getHits()).thenReturn( + new SearchHits(new SearchHit[1], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F), + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); + + request.search((x) -> searchResponse, (x) -> searchResponse); + assertEquals("scroll1", request.getScrollId()); + request.search((x) -> searchResponse, (x) -> searchResponse); + assertEquals("scroll1", request.getScrollId()); + + AtomicBoolean cleanCalled = new AtomicBoolean(false); + request.clean((s) -> cleanCalled.set(true)); + + assertNull(request.getScrollId()); + assertTrue(cleanCalled.get()); + } + + @Test + void no_clean_on_non_empty_response() { + SearchResponse searchResponse = mock(); + when(searchResponse.getScrollId()).thenReturn("scroll"); + when(searchResponse.getHits()).thenReturn( + new SearchHits(new SearchHit[1], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F)); + + request.search((x) -> searchResponse, (x) -> searchResponse); + assertEquals("scroll", request.getScrollId()); + + request.clean((s) -> fail()); + assertNull(request.getScrollId()); + } + + @Test + void no_clean_if_no_scroll_in_response() { + SearchResponse searchResponse = mock(); + when(searchResponse.getHits()).thenReturn( + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1F)); + + request.search((x) -> searchResponse, (x) -> searchResponse); + assertNull(request.getScrollId()); + + request.clean((s) -> fail()); + } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/request/PushDownRequestBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/PushDownRequestBuilderTest.java new file mode 100644 index 0000000000..8112de197a --- /dev/null +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/request/PushDownRequestBuilderTest.java @@ -0,0 +1,44 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + + +package org.opensearch.sql.opensearch.request; + +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.CALLS_REAL_METHODS; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.withSettings; + +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; + +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +public class PushDownRequestBuilderTest { + + @Test + public void throw_unsupported2() { + var builder = mock(PushDownRequestBuilder.class, + withSettings().defaultAnswer(CALLS_REAL_METHODS)); + + assertAll( + () -> assertThrows(UnsupportedOperationException.class, () -> + builder.pushDownFilter(null)), + () -> assertThrows(UnsupportedOperationException.class, () -> + builder.pushDownAggregation(null)), + () -> assertThrows(UnsupportedOperationException.class, () -> + builder.pushDownSort(null)), + () -> assertThrows(UnsupportedOperationException.class, () -> + builder.pushDownLimit(null, null)), + () -> assertThrows(UnsupportedOperationException.class, () -> + builder.pushDownHighlight(null, null)), + () -> assertThrows(UnsupportedOperationException.class, () -> + builder.pushDownProjects(null)), + () -> assertThrows(UnsupportedOperationException.class, () -> + builder.pushTypeMapping(null)) + ); + } +} diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java index 0a60503415..2d1d6145f3 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/response/OpenSearchResponseTest.java @@ -74,20 +74,29 @@ void isEmpty() { new TotalHits(2L, TotalHits.Relation.EQUAL_TO), 1.0F)); - assertFalse(new OpenSearchResponse(searchResponse, factory).isEmpty()); + var response = new OpenSearchResponse(searchResponse, factory); + assertFalse(response.isEmpty()); + assertEquals(2L, response.getTotalHits()); when(searchResponse.getHits()).thenReturn(SearchHits.empty()); when(searchResponse.getAggregations()).thenReturn(null); - assertTrue(new OpenSearchResponse(searchResponse, factory).isEmpty()); + + response = new OpenSearchResponse(searchResponse, factory); + assertTrue(response.isEmpty()); + assertEquals(0L, response.getTotalHits()); when(searchResponse.getHits()) .thenReturn(new SearchHits(null, new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0)); - OpenSearchResponse response3 = new OpenSearchResponse(searchResponse, factory); - assertTrue(response3.isEmpty()); + response = new OpenSearchResponse(searchResponse, factory); + assertTrue(response.isEmpty()); + assertEquals(0L, response.getTotalHits()); when(searchResponse.getHits()).thenReturn(SearchHits.empty()); when(searchResponse.getAggregations()).thenReturn(new Aggregations(emptyList())); - assertFalse(new OpenSearchResponse(searchResponse, factory).isEmpty()); + + response = new OpenSearchResponse(searchResponse, factory); + assertFalse(response.isEmpty()); + assertEquals(0L, response.getTotalHits()); } @Test @@ -104,7 +113,8 @@ void iterator() { when(factory.construct(any())).thenReturn(exprTupleValue1).thenReturn(exprTupleValue2); int i = 0; - for (ExprValue hit : new OpenSearchResponse(searchResponse, factory)) { + var response = new OpenSearchResponse(searchResponse, factory); + for (ExprValue hit : response) { if (i == 0) { assertEquals(exprTupleValue1, hit); } else if (i == 1) { @@ -114,6 +124,7 @@ void iterator() { } i++; } + assertEquals(2L, response.getTotalHits()); } @Test diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java index 8d4dad48a9..7181bd5e56 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexTest.java @@ -14,6 +14,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.lenient; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.opensearch.sql.data.type.ExprCoreType.DOUBLE; import static org.opensearch.sql.data.type.ExprCoreType.INTEGER; @@ -41,6 +42,7 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; +import org.opensearch.common.unit.TimeValue; import org.opensearch.sql.ast.tree.Sort; import org.opensearch.sql.common.setting.Settings; import org.opensearch.sql.data.model.ExprBooleanValue; @@ -56,6 +58,12 @@ import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; import org.opensearch.sql.opensearch.mapping.IndexMapping; +import org.opensearch.sql.opensearch.request.InitialPageRequestBuilder; +import org.opensearch.sql.opensearch.request.OpenSearchRequest; +import org.opensearch.sql.opensearch.request.OpenSearchRequestBuilder; +import org.opensearch.sql.opensearch.request.PagedRequestBuilder; +import org.opensearch.sql.opensearch.storage.scan.OpenSearchIndexScan; +import org.opensearch.sql.opensearch.storage.scan.OpenSearchPagedIndexScan; import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.planner.logical.LogicalPlanDSL; import org.opensearch.sql.planner.physical.PhysicalPlanDSL; @@ -186,30 +194,54 @@ void checkCacheUsedForFieldMappings() { @Test void implementRelationOperatorOnly() { when(settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)).thenReturn(200); + when(settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)) + .thenReturn(TimeValue.timeValueMinutes(1)); when(client.getIndexMaxResultWindows("test")).thenReturn(Map.of("test", 10000)); LogicalPlan plan = index.createScanBuilder(); Integer maxResultWindow = index.getMaxResultWindow(); - assertEquals( - new OpenSearchIndexScan(client, settings, indexName, maxResultWindow, exprValueFactory), - index.implement(plan)); + OpenSearchRequestBuilder + builder = new OpenSearchRequestBuilder(indexName, maxResultWindow, + settings, exprValueFactory); + assertEquals(new OpenSearchIndexScan(client, builder), index.implement(plan)); + } + + @Test + void implementPagedRelationOperatorOnly() { + when(client.getIndexMaxResultWindows("test")).thenReturn(Map.of("test", 10000)); + when(settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)) + .thenReturn(TimeValue.timeValueMinutes(1)); + + LogicalPlan plan = index.createPagedScanBuilder(42); + Integer maxResultWindow = index.getMaxResultWindow(); + PagedRequestBuilder builder = new InitialPageRequestBuilder( + new OpenSearchRequest.IndexName(indexName), + maxResultWindow, mock(), exprValueFactory); + assertEquals(new OpenSearchPagedIndexScan(client, builder), index.implement(plan)); } @Test void implementRelationOperatorWithOptimization() { when(settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)).thenReturn(200); + when(settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)) + .thenReturn(TimeValue.timeValueMinutes(1)); when(client.getIndexMaxResultWindows("test")).thenReturn(Map.of("test", 10000)); LogicalPlan plan = index.createScanBuilder(); Integer maxResultWindow = index.getMaxResultWindow(); + OpenSearchRequestBuilder + builder = new OpenSearchRequestBuilder(indexName, maxResultWindow, + settings, exprValueFactory); assertEquals( - new OpenSearchIndexScan(client, settings, indexName, maxResultWindow, exprValueFactory), + new OpenSearchIndexScan(client, builder), index.implement(index.optimize(plan))); } @Test void implementOtherLogicalOperators() { when(settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)).thenReturn(200); + when(settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)) + .thenReturn(TimeValue.timeValueMinutes(1)); when(client.getIndexMaxResultWindows("test")).thenReturn(Map.of("test", 10000)); NamedExpression include = named("age", ref("age", INTEGER)); @@ -251,8 +283,10 @@ void implementOtherLogicalOperators() { PhysicalPlanDSL.eval( PhysicalPlanDSL.remove( PhysicalPlanDSL.rename( - new OpenSearchIndexScan(client, settings, indexName, - maxResultWindow, exprValueFactory), + new OpenSearchIndexScan(client, + new OpenSearchRequestBuilder( + indexName, maxResultWindow, + settings, exprValueFactory)), mappings), exclude), newEvalField), diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java index ab87f4531c..6a8727e0fb 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchStorageEngineTest.java @@ -6,11 +6,18 @@ package org.opensearch.sql.opensearch.storage; +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; import static org.opensearch.sql.analysis.DataSourceSchemaIdentifierNameResolver.DEFAULT_DATASOURCE_NAME; import static org.opensearch.sql.utils.SystemIndexUtils.TABLE_INFO; +import java.util.Map; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; @@ -18,6 +25,8 @@ import org.opensearch.sql.DataSourceSchemaName; import org.opensearch.sql.common.setting.Settings; import org.opensearch.sql.opensearch.client.OpenSearchClient; +import org.opensearch.sql.opensearch.response.OpenSearchResponse; +import org.opensearch.sql.opensearch.storage.scan.OpenSearchPagedIndexScan; import org.opensearch.sql.opensearch.storage.system.OpenSearchSystemIndex; import org.opensearch.sql.storage.Table; @@ -35,7 +44,10 @@ public void getTable() { OpenSearchStorageEngine engine = new OpenSearchStorageEngine(client, settings); Table table = engine.getTable(new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), "test"); - assertNotNull(table); + assertAll( + () -> assertNotNull(table), + () -> assertTrue(table instanceof OpenSearchIndex) + ); } @Test @@ -43,7 +55,26 @@ public void getSystemTable() { OpenSearchStorageEngine engine = new OpenSearchStorageEngine(client, settings); Table table = engine.getTable(new DataSourceSchemaName(DEFAULT_DATASOURCE_NAME, "default"), TABLE_INFO); - assertNotNull(table); - assertTrue(table instanceof OpenSearchSystemIndex); + assertAll( + () -> assertNotNull(table), + () -> assertTrue(table instanceof OpenSearchSystemIndex) + ); + } + + @Test + public void getTableScan() { + when(client.getIndexMappings(anyString())).thenReturn(Map.of()); + OpenSearchResponse response = mock(); + when(response.isEmpty()).thenReturn(true); + when(client.search(any())).thenReturn(response); + OpenSearchStorageEngine engine = new OpenSearchStorageEngine(client, settings); + var scan = engine.getTableScan("test", "test"); + assertAll( + () -> assertTrue(scan instanceof OpenSearchPagedIndexScan), + () -> { + scan.open(); + assertFalse(scan.hasNext()); + } + ); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java index b90ca8836d..5c125ebc65 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanOptimizationTest.java @@ -66,7 +66,6 @@ import org.opensearch.sql.opensearch.response.agg.CompositeAggregationParser; import org.opensearch.sql.opensearch.response.agg.OpenSearchAggregationResponseParser; import org.opensearch.sql.opensearch.response.agg.SingleValueParser; -import org.opensearch.sql.opensearch.storage.OpenSearchIndexScan; import org.opensearch.sql.opensearch.storage.script.aggregation.AggregationQueryBuilder; import org.opensearch.sql.planner.logical.LogicalPlan; import org.opensearch.sql.planner.optimizer.LogicalPlanOptimizer; @@ -527,7 +526,7 @@ private void assertEqualsAfterOptimization(LogicalPlan expected, LogicalPlan act } private Runnable withFilterPushedDown(QueryBuilder filteringCondition) { - return () -> verify(requestBuilder, times(1)).pushDown(filteringCondition); + return () -> verify(requestBuilder, times(1)).pushDownFilter(filteringCondition); } private Runnable withAggregationPushedDown( diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexScanTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java similarity index 59% rename from opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexScanTest.java rename to opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java index 8aec6a7d13..c133897ca2 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/OpenSearchIndexScanTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchIndexScanTest.java @@ -4,12 +4,14 @@ */ -package org.opensearch.sql.opensearch.storage; +package org.opensearch.sql.opensearch.storage.scan; +import static org.junit.jupiter.api.Assertions.assertAll; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.lenient; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -21,6 +23,8 @@ import java.util.HashMap; import java.util.Map; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; @@ -28,6 +32,7 @@ import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.stubbing.Answer; import org.opensearch.common.bytes.BytesArray; +import org.opensearch.common.unit.TimeValue; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.SearchHit; @@ -43,9 +48,11 @@ import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; import org.opensearch.sql.opensearch.request.OpenSearchQueryRequest; import org.opensearch.sql.opensearch.request.OpenSearchRequest; +import org.opensearch.sql.opensearch.request.OpenSearchRequestBuilder; import org.opensearch.sql.opensearch.response.OpenSearchResponse; @ExtendWith(MockitoExtension.class) +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) class OpenSearchIndexScanTest { @Mock @@ -61,122 +68,168 @@ class OpenSearchIndexScanTest { @BeforeEach void setup() { when(settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)).thenReturn(200); + when(settings.getSettingValue(Settings.Key.SQL_CURSOR_KEEP_ALIVE)) + .thenReturn(TimeValue.timeValueMinutes(1)); } @Test - void queryEmptyResult() { - mockResponse(); - try (OpenSearchIndexScan indexScan = - new OpenSearchIndexScan(client, settings, "test", 3, exprValueFactory)) { + void query_empty_result() { + mockResponse(client); + try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, + new OpenSearchRequestBuilder("test", 3, settings, exprValueFactory))) { indexScan.open(); - assertFalse(indexScan.hasNext()); + assertAll( + () -> assertFalse(indexScan.hasNext()), + () -> assertEquals(0, indexScan.getTotalHits()) + ); } verify(client).cleanup(any()); } @Test - void queryAllResultsWithQuery() { - mockResponse(new ExprValue[]{ + void query_all_results_with_query() { + mockResponse(client, new ExprValue[]{ employee(1, "John", "IT"), employee(2, "Smith", "HR"), employee(3, "Allen", "IT")}); - try (OpenSearchIndexScan indexScan = - new OpenSearchIndexScan(client, settings, "employees", 10, exprValueFactory)) { + try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, + new OpenSearchRequestBuilder("employees", 10, settings, exprValueFactory))) { indexScan.open(); - assertTrue(indexScan.hasNext()); - assertEquals(employee(1, "John", "IT"), indexScan.next()); + assertAll( + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - assertTrue(indexScan.hasNext()); - assertEquals(employee(2, "Smith", "HR"), indexScan.next()); + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - assertTrue(indexScan.hasNext()); - assertEquals(employee(3, "Allen", "IT"), indexScan.next()); + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - assertFalse(indexScan.hasNext()); + () -> assertFalse(indexScan.hasNext()), + () -> assertEquals(3, indexScan.getTotalHits()) + ); } verify(client).cleanup(any()); } @Test - void queryAllResultsWithScroll() { - mockResponse( + void query_all_results_with_scroll() { + mockResponse(client, new ExprValue[]{employee(1, "John", "IT"), employee(2, "Smith", "HR")}, new ExprValue[]{employee(3, "Allen", "IT")}); + //when(settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)).thenReturn(2); - try (OpenSearchIndexScan indexScan = - new OpenSearchIndexScan(client, settings, "employees", 2, exprValueFactory)) { + try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, + new OpenSearchRequestBuilder("employees", 10, settings, exprValueFactory))) { indexScan.open(); - assertTrue(indexScan.hasNext()); - assertEquals(employee(1, "John", "IT"), indexScan.next()); + assertAll( + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - assertTrue(indexScan.hasNext()); - assertEquals(employee(2, "Smith", "HR"), indexScan.next()); + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - assertTrue(indexScan.hasNext()); - assertEquals(employee(3, "Allen", "IT"), indexScan.next()); + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - assertFalse(indexScan.hasNext()); + () -> assertFalse(indexScan.hasNext()), + () -> assertEquals(3, indexScan.getTotalHits()) + ); } verify(client).cleanup(any()); } @Test - void querySomeResultsWithQuery() { - mockResponse(new ExprValue[]{ + void query_some_results_with_query() { + mockResponse(client, new ExprValue[]{ employee(1, "John", "IT"), employee(2, "Smith", "HR"), employee(3, "Allen", "IT"), employee(4, "Bob", "HR")}); - try (OpenSearchIndexScan indexScan = - new OpenSearchIndexScan(client, settings, "employees", 10, exprValueFactory)) { + try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, + new OpenSearchRequestBuilder("employees", 10, settings, exprValueFactory))) { indexScan.getRequestBuilder().pushDownLimit(3, 0); indexScan.open(); - assertTrue(indexScan.hasNext()); - assertEquals(employee(1, "John", "IT"), indexScan.next()); + assertAll( + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - assertTrue(indexScan.hasNext()); - assertEquals(employee(2, "Smith", "HR"), indexScan.next()); + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - assertTrue(indexScan.hasNext()); - assertEquals(employee(3, "Allen", "IT"), indexScan.next()); + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), - assertFalse(indexScan.hasNext()); + () -> assertFalse(indexScan.hasNext()), + () -> assertEquals(3, indexScan.getTotalHits()) + ); } verify(client).cleanup(any()); } @Test - void querySomeResultsWithScroll() { - mockResponse( + void query_some_results_with_scroll() { + mockResponse(client, new ExprValue[]{employee(1, "John", "IT"), employee(2, "Smith", "HR")}, new ExprValue[]{employee(3, "Allen", "IT"), employee(4, "Bob", "HR")}); try (OpenSearchIndexScan indexScan = - new OpenSearchIndexScan(client, settings, "employees", 2, exprValueFactory)) { + new OpenSearchIndexScan(client, new OpenSearchRequestBuilder("employees", 2, settings, + exprValueFactory))) { indexScan.getRequestBuilder().pushDownLimit(3, 0); indexScan.open(); - assertTrue(indexScan.hasNext()); - assertEquals(employee(1, "John", "IT"), indexScan.next()); + assertAll( + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), + + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), + + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), + + () -> assertFalse(indexScan.hasNext()), + () -> assertEquals(3, indexScan.getTotalHits()) + ); + } + verify(client).cleanup(any()); + } + + @Test + void query_results_limited_by_query_size() { + mockResponse(client, new ExprValue[]{ + employee(1, "John", "IT"), + employee(2, "Smith", "HR"), + employee(3, "Allen", "IT"), + employee(4, "Bob", "HR")}); + when(settings.getSettingValue(Settings.Key.QUERY_SIZE_LIMIT)).thenReturn(2); + + try (OpenSearchIndexScan indexScan = new OpenSearchIndexScan(client, + new OpenSearchRequestBuilder("employees", 10, settings, exprValueFactory))) { + indexScan.open(); - assertTrue(indexScan.hasNext()); - assertEquals(employee(2, "Smith", "HR"), indexScan.next()); + assertAll( + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), - assertTrue(indexScan.hasNext()); - assertEquals(employee(3, "Allen", "IT"), indexScan.next()); + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), - assertFalse(indexScan.hasNext()); + () -> assertFalse(indexScan.hasNext()), + () -> assertEquals(2, indexScan.getTotalHits()) + ); } verify(client).cleanup(any()); } @Test - void pushDownFilters() { + void push_down_filters() { assertThat() .pushDown(QueryBuilders.termQuery("name", "John")) .shouldQuery(QueryBuilders.termQuery("name", "John")) @@ -194,7 +247,7 @@ void pushDownFilters() { } @Test - void pushDownHighlight() { + void push_down_highlight() { Map args = new HashMap<>(); assertThat() .pushDown(QueryBuilders.termQuery("name", "John")) @@ -205,7 +258,7 @@ void pushDownHighlight() { } @Test - void pushDownHighlightWithArguments() { + void push_down_highlight_with_arguments() { Map args = new HashMap<>(); args.put("pre_tags", new Literal("", DataType.STRING)); args.put("post_tags", new Literal("", DataType.STRING)); @@ -220,13 +273,14 @@ void pushDownHighlightWithArguments() { } @Test - void pushDownHighlightWithRepeatingFields() { - mockResponse( + void push_down_highlight_with_repeating_fields() { + mockResponse(client, new ExprValue[]{employee(1, "John", "IT"), employee(2, "Smith", "HR")}, new ExprValue[]{employee(3, "Allen", "IT"), employee(4, "Bob", "HR")}); try (OpenSearchIndexScan indexScan = - new OpenSearchIndexScan(client, settings, "test", 2, exprValueFactory)) { + new OpenSearchIndexScan(client, new OpenSearchRequestBuilder("test", 2, settings, + exprValueFactory))) { indexScan.getRequestBuilder().pushDownLimit(3, 0); indexScan.open(); Map args = new HashMap<>(); @@ -252,14 +306,16 @@ public PushDownAssertion(OpenSearchClient client, OpenSearchExprValueFactory valueFactory, Settings settings) { this.client = client; - this.indexScan = new OpenSearchIndexScan(client, settings, "test", 10000, valueFactory); + this.indexScan = new OpenSearchIndexScan(client, + new OpenSearchRequestBuilder("test", 10000, + settings, valueFactory)); this.response = mock(OpenSearchResponse.class); this.factory = valueFactory; when(response.isEmpty()).thenReturn(true); } PushDownAssertion pushDown(QueryBuilder query) { - indexScan.getRequestBuilder().pushDown(query); + indexScan.getRequestBuilder().pushDownFilter(query); return this; } @@ -290,7 +346,7 @@ PushDownAssertion shouldQuery(QueryBuilder expected) { } } - private void mockResponse(ExprValue[]... searchHitBatches) { + public static void mockResponse(OpenSearchClient client, ExprValue[]... searchHitBatches) { when(client.search(any())) .thenAnswer( new Answer() { @@ -304,6 +360,9 @@ public OpenSearchResponse answer(InvocationOnMock invocation) { when(response.isEmpty()).thenReturn(false); ExprValue[] searchHit = searchHitBatches[batchNum]; when(response.iterator()).thenReturn(Arrays.asList(searchHit).iterator()); + // used in OpenSearchPagedIndexScanTest + lenient().when(response.getTotalHits()) + .thenReturn((long) searchHitBatches[batchNum].length); } else { when(response.isEmpty()).thenReturn(true); } @@ -314,14 +373,14 @@ public OpenSearchResponse answer(InvocationOnMock invocation) { }); } - protected ExprValue employee(int docId, String name, String department) { + public static ExprValue employee(int docId, String name, String department) { SearchHit hit = new SearchHit(docId); hit.sourceRef( new BytesArray("{\"name\":\"" + name + "\",\"department\":\"" + department + "\"}")); return tupleValue(hit); } - private ExprValue tupleValue(SearchHit hit) { + private static ExprValue tupleValue(SearchHit hit) { return ExprValueUtils.tupleValue(hit.getSourceAsMap()); } } diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchPagedIndexScanTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchPagedIndexScanTest.java new file mode 100644 index 0000000000..38888115c9 --- /dev/null +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/scan/OpenSearchPagedIndexScanTest.java @@ -0,0 +1,164 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.sql.opensearch.storage.scan; + +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.CALLS_REAL_METHODS; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.withSettings; +import static org.opensearch.sql.data.type.ExprCoreType.STRING; +import static org.opensearch.sql.opensearch.storage.scan.OpenSearchIndexScanTest.employee; +import static org.opensearch.sql.opensearch.storage.scan.OpenSearchIndexScanTest.mockResponse; + +import com.google.common.collect.ImmutableMap; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.opensearch.sql.data.model.ExprValue; +import org.opensearch.sql.opensearch.client.OpenSearchClient; +import org.opensearch.sql.opensearch.data.type.OpenSearchDataType; +import org.opensearch.sql.opensearch.data.value.OpenSearchExprValueFactory; +import org.opensearch.sql.opensearch.request.ContinuePageRequestBuilder; +import org.opensearch.sql.opensearch.request.InitialPageRequestBuilder; +import org.opensearch.sql.opensearch.request.OpenSearchRequest; +import org.opensearch.sql.opensearch.request.PagedRequestBuilder; +import org.opensearch.sql.opensearch.response.OpenSearchResponse; + +@ExtendWith(MockitoExtension.class) +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) +public class OpenSearchPagedIndexScanTest { + @Mock + private OpenSearchClient client; + + private final OpenSearchExprValueFactory exprValueFactory = new OpenSearchExprValueFactory( + ImmutableMap.of( + "name", OpenSearchDataType.of(STRING), + "department", OpenSearchDataType.of(STRING))); + + @Test + void query_empty_result() { + mockResponse(client); + InitialPageRequestBuilder builder = new InitialPageRequestBuilder( + new OpenSearchRequest.IndexName("test"), 3, mock(), exprValueFactory); + try (OpenSearchPagedIndexScan indexScan = new OpenSearchPagedIndexScan(client, builder)) { + indexScan.open(); + assertFalse(indexScan.hasNext()); + } + verify(client).cleanup(any()); + } + + @Test + void query_all_results_initial_scroll_request() { + mockResponse(client, new ExprValue[]{ + employee(1, "John", "IT"), + employee(2, "Smith", "HR"), + employee(3, "Allen", "IT")}); + + PagedRequestBuilder builder = new InitialPageRequestBuilder( + new OpenSearchRequest.IndexName("test"), 3, mock(), exprValueFactory); + try (OpenSearchPagedIndexScan indexScan = new OpenSearchPagedIndexScan(client, builder)) { + indexScan.open(); + + assertAll( + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), + + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), + + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), + + () -> assertFalse(indexScan.hasNext()), + () -> assertEquals(3, indexScan.getTotalHits()) + ); + } + verify(client).cleanup(any()); + + builder = new ContinuePageRequestBuilder( + new OpenSearchRequest.IndexName("test"), "scroll", mock(), exprValueFactory); + try (OpenSearchPagedIndexScan indexScan = new OpenSearchPagedIndexScan(client, builder)) { + indexScan.open(); + + assertFalse(indexScan.hasNext()); + } + verify(client, times(2)).cleanup(any()); + } + + @Test + void query_all_results_continuation_scroll_request() { + mockResponse(client, new ExprValue[]{ + employee(1, "John", "IT"), + employee(2, "Smith", "HR"), + employee(3, "Allen", "IT")}); + + ContinuePageRequestBuilder builder = new ContinuePageRequestBuilder( + new OpenSearchRequest.IndexName("test"), "scroll", mock(), exprValueFactory); + try (OpenSearchPagedIndexScan indexScan = new OpenSearchPagedIndexScan(client, builder)) { + indexScan.open(); + + assertAll( + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(1, "John", "IT"), indexScan.next()), + + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(2, "Smith", "HR"), indexScan.next()), + + () -> assertTrue(indexScan.hasNext()), + () -> assertEquals(employee(3, "Allen", "IT"), indexScan.next()), + + () -> assertFalse(indexScan.hasNext()), + () -> assertEquals(3, indexScan.getTotalHits()) + ); + } + verify(client).cleanup(any()); + + builder = new ContinuePageRequestBuilder( + new OpenSearchRequest.IndexName("test"), "scroll", mock(), exprValueFactory); + try (OpenSearchPagedIndexScan indexScan = new OpenSearchPagedIndexScan(client, builder)) { + indexScan.open(); + + assertFalse(indexScan.hasNext()); + } + verify(client, times(2)).cleanup(any()); + } + + @Test + void explain_not_implemented() { + assertThrows(Throwable.class, () -> mock(OpenSearchPagedIndexScan.class, + withSettings().defaultAnswer(CALLS_REAL_METHODS)).explain()); + } + + @Test + void toCursor() { + PagedRequestBuilder builder = mock(); + OpenSearchRequest request = mock(); + OpenSearchResponse response = mock(); + when(builder.build()).thenReturn(request); + when(builder.getIndexName()).thenReturn(new OpenSearchRequest.IndexName("index")); + when(client.search(request)).thenReturn(response); + when(response.isEmpty()).thenReturn(true); + when(request.toCursor()).thenReturn("cu-cursor", "", null); + OpenSearchPagedIndexScan indexScan = new OpenSearchPagedIndexScan(client, builder); + indexScan.open(); + assertAll( + () -> assertEquals("(OpenSearchPagedIndexScan,index,cu-cursor)", indexScan.toCursor()), + () -> assertEquals("", indexScan.toCursor()), + () -> assertEquals("", indexScan.toCursor()) + ); + } +} diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngineTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngineTest.java index 3d497c2f5b..a88d81c020 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngineTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/ExpressionScriptEngineTest.java @@ -27,8 +27,8 @@ import org.opensearch.script.ScriptEngine; import org.opensearch.sql.expression.DSL; import org.opensearch.sql.expression.Expression; +import org.opensearch.sql.expression.serialization.ExpressionSerializer; import org.opensearch.sql.opensearch.storage.script.filter.ExpressionFilterScriptFactory; -import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) @ExtendWith(MockitoExtension.class) diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java index e771e01bce..474aba1420 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/AggregationQueryBuilderTest.java @@ -51,9 +51,9 @@ import org.opensearch.sql.expression.aggregation.AvgAggregator; import org.opensearch.sql.expression.aggregation.CountAggregator; import org.opensearch.sql.expression.aggregation.NamedAggregator; +import org.opensearch.sql.expression.serialization.ExpressionSerializer; import org.opensearch.sql.opensearch.data.type.OpenSearchDataType; import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; -import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) @ExtendWith(MockitoExtension.class) diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java index f93c69de28..eaeacd09ef 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/BucketAggregationBuilderTest.java @@ -46,9 +46,9 @@ import org.opensearch.sql.expression.DSL; import org.opensearch.sql.expression.NamedExpression; import org.opensearch.sql.expression.parse.ParseExpression; +import org.opensearch.sql.expression.serialization.ExpressionSerializer; import org.opensearch.sql.opensearch.data.type.OpenSearchDataType; import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; -import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) @ExtendWith(MockitoExtension.class) diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilderTest.java index 94f152f913..d8e81026b6 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/aggregation/dsl/MetricAggregationBuilderTest.java @@ -43,7 +43,7 @@ import org.opensearch.sql.expression.aggregation.SumAggregator; import org.opensearch.sql.expression.aggregation.TakeAggregator; import org.opensearch.sql.expression.function.FunctionName; -import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; +import org.opensearch.sql.expression.serialization.ExpressionSerializer; @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) @ExtendWith(MockitoExtension.class) diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java index 96245909a4..3b7865aa46 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/script/filter/FilterQueryBuilderTest.java @@ -53,9 +53,9 @@ import org.opensearch.sql.expression.FunctionExpression; import org.opensearch.sql.expression.LiteralExpression; import org.opensearch.sql.expression.ReferenceExpression; +import org.opensearch.sql.expression.serialization.ExpressionSerializer; import org.opensearch.sql.opensearch.data.type.OpenSearchDataType; import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; -import org.opensearch.sql.opensearch.storage.serialization.ExpressionSerializer; @DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) @ExtendWith(MockitoExtension.class) diff --git a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java index 494f3ff2d0..c04ef25611 100644 --- a/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java +++ b/opensearch/src/test/java/org/opensearch/sql/opensearch/storage/system/OpenSearchSystemIndexScanTest.java @@ -32,6 +32,7 @@ public void queryData() { systemIndexScan.open(); assertTrue(systemIndexScan.hasNext()); assertEquals(stringValue("text"), systemIndexScan.next()); + assertEquals(1, systemIndexScan.getTotalHits()); } @Test diff --git a/plugin/build.gradle b/plugin/build.gradle index 1c5b4366f0..cb9ab64d7b 100644 --- a/plugin/build.gradle +++ b/plugin/build.gradle @@ -248,6 +248,7 @@ afterEvaluate { testClusters.integTest { plugin(project.tasks.bundlePlugin.archiveFile) + testDistribution = "ARCHIVE" // debug with command, ./gradlew opensearch-sql:run -DdebugJVM. --debug-jvm does not work with keystore. if (System.getProperty("debugJVM") != null) { diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java b/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java index 3d733233be..1439ed0e25 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java @@ -51,6 +51,7 @@ import org.opensearch.sql.datasource.DataSourceService; import org.opensearch.sql.datasource.DataSourceServiceImpl; import org.opensearch.sql.datasource.DataSourceUserAuthorizationHelper; +import org.opensearch.sql.expression.serialization.DefaultExpressionSerializer; import org.opensearch.sql.legacy.esdomain.LocalClusterState; import org.opensearch.sql.legacy.executor.AsyncRestExecutor; import org.opensearch.sql.legacy.metrics.Metrics; @@ -61,7 +62,6 @@ import org.opensearch.sql.opensearch.setting.OpenSearchSettings; import org.opensearch.sql.opensearch.storage.OpenSearchDataSourceFactory; import org.opensearch.sql.opensearch.storage.script.ExpressionScriptEngine; -import org.opensearch.sql.opensearch.storage.serialization.DefaultExpressionSerializer; import org.opensearch.sql.plugin.config.OpenSearchPluginModule; import org.opensearch.sql.plugin.datasource.DataSourceSettings; import org.opensearch.sql.plugin.datasource.DataSourceUserAuthorizationHelperImpl; diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/config/OpenSearchPluginModule.java b/plugin/src/main/java/org/opensearch/sql/plugin/config/OpenSearchPluginModule.java index 5ab4bbaecd..b0c698a0cf 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/config/OpenSearchPluginModule.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/config/OpenSearchPluginModule.java @@ -18,6 +18,7 @@ import org.opensearch.sql.executor.QueryManager; import org.opensearch.sql.executor.QueryService; import org.opensearch.sql.executor.execution.QueryPlanFactory; +import org.opensearch.sql.executor.pagination.PaginatedPlanCache; import org.opensearch.sql.expression.function.BuiltinFunctionRepository; import org.opensearch.sql.monitor.ResourceMonitor; import org.opensearch.sql.opensearch.client.OpenSearchClient; @@ -58,8 +59,9 @@ public StorageEngine storageEngine(OpenSearchClient client, Settings settings) { } @Provides - public ExecutionEngine executionEngine(OpenSearchClient client, ExecutionProtector protector) { - return new OpenSearchExecutionEngine(client, protector); + public ExecutionEngine executionEngine(OpenSearchClient client, ExecutionProtector protector, + PaginatedPlanCache paginatedPlanCache) { + return new OpenSearchExecutionEngine(client, protector, paginatedPlanCache); } @Provides @@ -72,6 +74,11 @@ public ExecutionProtector protector(ResourceMonitor resourceMonitor) { return new OpenSearchExecutionProtector(resourceMonitor); } + @Provides + public PaginatedPlanCache paginatedPlanCache(StorageEngine storageEngine) { + return new PaginatedPlanCache(storageEngine); + } + @Provides @Singleton public QueryManager queryManager(NodeClient nodeClient) { @@ -92,12 +99,16 @@ public SQLService sqlService(QueryManager queryManager, QueryPlanFactory queryPl * {@link QueryPlanFactory}. */ @Provides - public QueryPlanFactory queryPlanFactory( - DataSourceService dataSourceService, ExecutionEngine executionEngine) { + public QueryPlanFactory queryPlanFactory(DataSourceService dataSourceService, + ExecutionEngine executionEngine, + PaginatedPlanCache paginatedPlanCache) { Analyzer analyzer = new Analyzer( new ExpressionAnalyzer(functionRepository), dataSourceService, functionRepository); Planner planner = new Planner(LogicalPlanOptimizer.create()); - return new QueryPlanFactory(new QueryService(analyzer, executionEngine, planner)); + Planner paginationPlanner = new Planner(LogicalPlanOptimizer.paginationCreate()); + QueryService queryService = new QueryService( + analyzer, executionEngine, planner, paginationPlanner); + return new QueryPlanFactory(queryService, paginatedPlanCache); } } diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java b/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java index 6825b2ac92..a67e077ecc 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/transport/TransportPPLQueryAction.java @@ -139,7 +139,8 @@ private ResponseListener createListener( @Override public void onResponse(ExecutionEngine.QueryResponse response) { String responseContent = - formatter.format(new QueryResult(response.getSchema(), response.getResults())); + formatter.format(new QueryResult(response.getSchema(), response.getResults(), + response.getCursor(), response.getTotal())); listener.onResponse(new TransportPPLQueryResponse(responseContent)); } diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/PPLService.java b/ppl/src/main/java/org/opensearch/sql/ppl/PPLService.java index e11edc1646..f91ac7222f 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/PPLService.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/PPLService.java @@ -90,6 +90,7 @@ private AbstractPlan plan( QueryContext.getRequestId(), anonymizer.anonymizeStatement(statement)); - return queryExecutionFactory.create(statement, queryListener, explainListener); + return queryExecutionFactory.createContinuePaginatedPlan( + statement, queryListener, explainListener); } } diff --git a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstStatementBuilder.java b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstStatementBuilder.java index e4f40e9a11..3b7e5a78dd 100644 --- a/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstStatementBuilder.java +++ b/ppl/src/main/java/org/opensearch/sql/ppl/parser/AstStatementBuilder.java @@ -33,7 +33,7 @@ public class AstStatementBuilder extends OpenSearchPPLParserBaseVisitor { ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new QueryResponse(schema, Collections.emptyList())); + listener.onResponse(new QueryResponse(schema, Collections.emptyList(), 0, Cursor.None)); return null; }).when(queryService).execute(any(), any()); @@ -87,7 +93,7 @@ public void onFailure(Exception e) { public void testExecuteCsvFormatShouldPass() { doAnswer(invocation -> { ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new QueryResponse(schema, Collections.emptyList())); + listener.onResponse(new QueryResponse(schema, Collections.emptyList(), 0, Cursor.None)); return null; }).when(queryService).execute(any(), any()); @@ -161,7 +167,7 @@ public void onFailure(Exception e) { public void testPrometheusQuery() { doAnswer(invocation -> { ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new QueryResponse(schema, Collections.emptyList())); + listener.onResponse(new QueryResponse(schema, Collections.emptyList(), 0, Cursor.None)); return null; }).when(queryService).execute(any(), any()); diff --git a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstStatementBuilderTest.java b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstStatementBuilderTest.java index 4760024692..de74e4932f 100644 --- a/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstStatementBuilderTest.java +++ b/ppl/src/test/java/org/opensearch/sql/ppl/parser/AstStatementBuilderTest.java @@ -39,7 +39,8 @@ public void buildQueryStatement() { "search source=t a=1", new Query( project( - filter(relation("t"), compare("=", field("a"), intLiteral(1))), AllFields.of()))); + filter(relation("t"), compare("=", field("a"), + intLiteral(1))), AllFields.of()), 0)); } @Test @@ -50,7 +51,7 @@ public void buildExplainStatement() { new Query( project( filter(relation("t"), compare("=", field("a"), intLiteral(1))), - AllFields.of())))); + AllFields.of()), 0))); } private void assertEqual(String query, Statement expectedStatement) { diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/QueryResult.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/QueryResult.java index 915a61f361..d06dba7719 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/QueryResult.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/QueryResult.java @@ -16,6 +16,7 @@ import org.opensearch.sql.data.model.ExprValueUtils; import org.opensearch.sql.executor.ExecutionEngine; import org.opensearch.sql.executor.ExecutionEngine.Schema.Column; +import org.opensearch.sql.executor.pagination.Cursor; /** * Query response that encapsulates query results and isolate {@link ExprValue} @@ -32,6 +33,16 @@ public class QueryResult implements Iterable { */ private final Collection exprValues; + @Getter + private final Cursor cursor; + + @Getter + private final long total; + + + public QueryResult(ExecutionEngine.Schema schema, Collection exprValues) { + this(schema, exprValues, Cursor.None, exprValues.size()); + } /** * size of results. diff --git a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatter.java b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatter.java index 943287cb62..b9a2d2fcc6 100644 --- a/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatter.java +++ b/protocol/src/main/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatter.java @@ -15,6 +15,7 @@ import org.opensearch.sql.data.type.ExprType; import org.opensearch.sql.exception.QueryEngineException; import org.opensearch.sql.executor.ExecutionEngine.Schema; +import org.opensearch.sql.executor.pagination.Cursor; import org.opensearch.sql.opensearch.response.error.ErrorMessage; import org.opensearch.sql.opensearch.response.error.ErrorMessageFactory; import org.opensearch.sql.protocol.response.QueryResult; @@ -39,9 +40,12 @@ protected Object buildJsonObject(QueryResult response) { json.datarows(fetchDataRows(response)); // Populate other fields - json.total(response.size()) + json.total(response.getTotal()) .size(response.size()) .status(200); + if (!response.getCursor().equals(Cursor.None)) { + json.cursor(response.getCursor().toString()); + } return json.build(); } @@ -95,6 +99,8 @@ public static class JdbcResponse { private final long total; private final long size; private final int status; + + private final String cursor; } @RequiredArgsConstructor diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/QueryResultTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/QueryResultTest.java index 319965e2d0..470bb205a8 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/QueryResultTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/QueryResultTest.java @@ -19,6 +19,7 @@ import java.util.Collections; import org.junit.jupiter.api.Test; import org.opensearch.sql.executor.ExecutionEngine; +import org.opensearch.sql.executor.pagination.Cursor; class QueryResultTest { @@ -35,7 +36,7 @@ void size() { tupleValue(ImmutableMap.of("name", "John", "age", 20)), tupleValue(ImmutableMap.of("name", "Allen", "age", 30)), tupleValue(ImmutableMap.of("name", "Smith", "age", 40)) - )); + ), Cursor.None, 0); assertEquals(3, response.size()); } @@ -45,7 +46,7 @@ void columnNameTypes() { schema, Collections.singletonList( tupleValue(ImmutableMap.of("name", "John", "age", 20)) - )); + ), Cursor.None, 0); assertEquals( ImmutableMap.of("name", "string", "age", "integer"), @@ -59,7 +60,8 @@ void columnNameTypesWithAlias() { new ExecutionEngine.Schema.Column("name", "n", STRING))); QueryResult response = new QueryResult( schema, - Collections.singletonList(tupleValue(ImmutableMap.of("n", "John")))); + Collections.singletonList(tupleValue(ImmutableMap.of("n", "John"))), + Cursor.None, 0); assertEquals( ImmutableMap.of("n", "string"), @@ -71,7 +73,7 @@ void columnNameTypesWithAlias() { void columnNameTypesFromEmptyExprValues() { QueryResult response = new QueryResult( schema, - Collections.emptyList()); + Collections.emptyList(), Cursor.None, 0); assertEquals( ImmutableMap.of("name", "string", "age", "integer"), response.columnNameTypes() @@ -100,7 +102,7 @@ void iterate() { Arrays.asList( tupleValue(ImmutableMap.of("name", "John", "age", 20)), tupleValue(ImmutableMap.of("name", "Allen", "age", 30)) - )); + ), Cursor.None, 0); int i = 0; for (Object[] objects : response) { diff --git a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatterTest.java b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatterTest.java index a6671c66f8..b5cb5984a1 100644 --- a/protocol/src/test/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatterTest.java +++ b/protocol/src/test/java/org/opensearch/sql/protocol/response/format/JdbcResponseFormatterTest.java @@ -31,6 +31,7 @@ import org.opensearch.sql.common.antlr.SyntaxCheckException; import org.opensearch.sql.data.model.ExprTupleValue; import org.opensearch.sql.exception.SemanticCheckException; +import org.opensearch.sql.executor.pagination.Cursor; import org.opensearch.sql.opensearch.data.type.OpenSearchDataType; import org.opensearch.sql.opensearch.data.type.OpenSearchTextType; import org.opensearch.sql.protocol.response.QueryResult; @@ -83,6 +84,37 @@ void format_response() { formatter.format(response)); } + @Test + void format_response_with_cursor() { + QueryResult response = new QueryResult( + new Schema(ImmutableList.of( + new Column("name", "name", STRING), + new Column("address", "address", OpenSearchTextType.of()), + new Column("age", "age", INTEGER))), + ImmutableList.of( + tupleValue(ImmutableMap.builder() + .put("name", "John") + .put("address", "Seattle") + .put("age", 20) + .build())), + new Cursor("test_cursor".getBytes()), 42); + + assertJsonEquals( + "{" + + "\"schema\":[" + + "{\"name\":\"name\",\"alias\":\"name\",\"type\":\"keyword\"}," + + "{\"name\":\"address\",\"alias\":\"address\",\"type\":\"text\"}," + + "{\"name\":\"age\",\"alias\":\"age\",\"type\":\"integer\"}" + + "]," + + "\"datarows\":[" + + "[\"John\",\"Seattle\",20]]," + + "\"total\":42," + + "\"size\":1," + + "\"cursor\":\"test_cursor\"," + + "\"status\":200}", + formatter.format(response)); + } + @Test void format_response_with_missing_and_null_value() { QueryResult response = diff --git a/sql/src/main/java/org/opensearch/sql/sql/SQLService.java b/sql/src/main/java/org/opensearch/sql/sql/SQLService.java index 082a3e9581..4ecf9e699b 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/SQLService.java +++ b/sql/src/main/java/org/opensearch/sql/sql/SQLService.java @@ -65,16 +65,24 @@ private AbstractPlan plan( SQLQueryRequest request, Optional> queryListener, Optional> explainListener) { - // 1.Parse query and convert parse tree (CST) to abstract syntax tree (AST) - ParseTree cst = parser.parse(request.getQuery()); - Statement statement = - cst.accept( - new AstStatementBuilder( - new AstBuilder(request.getQuery()), - AstStatementBuilder.StatementBuilderContext.builder() - .isExplain(request.isExplainRequest()) - .build())); + if (request.getCursor().isPresent()) { + // Handle v2 cursor here -- legacy cursor was handled earlier. + return queryExecutionFactory.createContinuePaginatedPlan(request.getCursor().get(), + request.isExplainRequest(), queryListener.orElse(null), explainListener.orElse(null)); + } else { + // 1.Parse query and convert parse tree (CST) to abstract syntax tree (AST) + ParseTree cst = parser.parse(request.getQuery()); + Statement statement = + cst.accept( + new AstStatementBuilder( + new AstBuilder(request.getQuery()), + AstStatementBuilder.StatementBuilderContext.builder() + .isExplain(request.isExplainRequest()) + .fetchSize(request.getFetchSize()) + .build())); - return queryExecutionFactory.create(statement, queryListener, explainListener); + return queryExecutionFactory.createContinuePaginatedPlan( + statement, queryListener, explainListener); + } } } diff --git a/sql/src/main/java/org/opensearch/sql/sql/domain/SQLQueryRequest.java b/sql/src/main/java/org/opensearch/sql/sql/domain/SQLQueryRequest.java index 508f80cee4..7545f4cc19 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/domain/SQLQueryRequest.java +++ b/sql/src/main/java/org/opensearch/sql/sql/domain/SQLQueryRequest.java @@ -6,13 +6,12 @@ package org.opensearch.sql.sql.domain; -import com.google.common.base.Strings; -import com.google.common.collect.ImmutableSet; import java.util.Collections; import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.stream.Stream; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.RequiredArgsConstructor; @@ -28,9 +27,9 @@ @EqualsAndHashCode @RequiredArgsConstructor public class SQLQueryRequest { - - private static final Set SUPPORTED_FIELDS = ImmutableSet.of( - "query", "fetch_size", "parameters"); + private static final String QUERY_FIELD_CURSOR = "cursor"; + private static final Set SUPPORTED_FIELDS = Set.of( + "query", "fetch_size", "parameters", QUERY_FIELD_CURSOR); private static final String QUERY_PARAMS_FORMAT = "format"; private static final String QUERY_PARAMS_SANITIZE = "sanitize"; @@ -64,36 +63,50 @@ public class SQLQueryRequest { @Accessors(fluent = true) private boolean sanitize = true; + private String cursor; + /** * Constructor of SQLQueryRequest that passes request params. */ - public SQLQueryRequest( - JSONObject jsonContent, String query, String path, Map params) { + public SQLQueryRequest(JSONObject jsonContent, String query, String path, + Map params, String cursor) { this.jsonContent = jsonContent; this.query = query; this.path = path; this.params = params; this.format = getFormat(params); this.sanitize = shouldSanitize(params); + this.cursor = cursor; } /** * Pre-check if the request can be supported by meeting ALL the following criteria: * 1.Only supported fields present in request body, ex. "filter" and "cursor" are not supported - * 2.No fetch_size or "fetch_size=0". In other word, it's not a cursor request - * 3.Response format is default or can be supported. + * 2.Response format is default or can be supported. * - * @return true if supported. + * @return true if supported. */ public boolean isSupported() { - return isOnlySupportedFieldInPayload() - && isFetchSizeZeroIfPresent() - && isSupportedFormat(); + var noCursor = !isCursor(); + var noQuery = query == null; + var noUnsupportedParams = params.isEmpty() + || (params.size() == 1 && params.containsKey(QUERY_PARAMS_FORMAT)); + var noContent = jsonContent == null || jsonContent.isEmpty(); + + return ((!noCursor && noQuery + && noUnsupportedParams && noContent) // if cursor is given, but other things + || (noCursor && !noQuery)) // or if cursor is not given, but query + && isOnlySupportedFieldInPayload() // and request has supported fields only + && isSupportedFormat(); // and request is in supported format + } + + private boolean isCursor() { + return cursor != null && !cursor.isEmpty(); } /** * Check if request is to explain rather than execute the query. - * @return true if it is a explain request + * @return true if it is an explain request */ public boolean isExplainRequest() { return path.endsWith("/_explain"); @@ -113,23 +126,23 @@ public Format format() { } private boolean isOnlySupportedFieldInPayload() { - return SUPPORTED_FIELDS.containsAll(jsonContent.keySet()); + return jsonContent == null || SUPPORTED_FIELDS.containsAll(jsonContent.keySet()); } - private boolean isFetchSizeZeroIfPresent() { - return (jsonContent.optInt("fetch_size") == 0); + public Optional getCursor() { + return Optional.ofNullable(cursor); + } + + public int getFetchSize() { + return jsonContent.optInt("fetch_size"); } private boolean isSupportedFormat() { - return Strings.isNullOrEmpty(format) || "jdbc".equalsIgnoreCase(format) - || "csv".equalsIgnoreCase(format) || "raw".equalsIgnoreCase(format); + return Stream.of("csv", "jdbc", "raw").anyMatch(format::equalsIgnoreCase); } private String getFormat(Map params) { - if (params.containsKey(QUERY_PARAMS_FORMAT)) { - return params.get(QUERY_PARAMS_FORMAT); - } - return "jdbc"; + return params.getOrDefault(QUERY_PARAMS_FORMAT, "jdbc"); } private boolean shouldSanitize(Map params) { diff --git a/sql/src/main/java/org/opensearch/sql/sql/parser/AstStatementBuilder.java b/sql/src/main/java/org/opensearch/sql/sql/parser/AstStatementBuilder.java index 40d549764a..593e7b51ff 100644 --- a/sql/src/main/java/org/opensearch/sql/sql/parser/AstStatementBuilder.java +++ b/sql/src/main/java/org/opensearch/sql/sql/parser/AstStatementBuilder.java @@ -26,7 +26,7 @@ public class AstStatementBuilder extends OpenSearchSQLParserBaseVisitor { - ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new QueryResponse(schema, Collections.emptyList())); - return null; - }).when(queryService).execute(any(), any()); - + public void can_execute_sql_query() { sqlService.execute( new SQLQueryRequest(new JSONObject(), "SELECT 123", QUERY, "jdbc"), - new ResponseListener() { + new ResponseListener<>() { @Override public void onResponse(QueryResponse response) { assertNotNull(response); @@ -84,13 +82,24 @@ public void onFailure(Exception e) { } @Test - public void canExecuteCsvFormatRequest() { - doAnswer(invocation -> { - ResponseListener listener = invocation.getArgument(1); - listener.onResponse(new QueryResponse(schema, Collections.emptyList())); - return null; - }).when(queryService).execute(any(), any()); + public void can_execute_cursor_query() { + sqlService.execute( + new SQLQueryRequest(new JSONObject(), null, QUERY, Map.of("format", "jdbc"), "n:cursor"), + new ResponseListener<>() { + @Override + public void onResponse(QueryResponse response) { + assertNotNull(response); + } + + @Override + public void onFailure(Exception e) { + fail(e); + } + }); + } + @Test + public void can_execute_csv_format_request() { sqlService.execute( new SQLQueryRequest(new JSONObject(), "SELECT 123", QUERY, "csv"), new ResponseListener() { @@ -107,7 +116,7 @@ public void onFailure(Exception e) { } @Test - public void canExplainSqlQuery() { + public void can_explain_sql_query() { doAnswer(invocation -> { ResponseListener listener = invocation.getArgument(1); listener.onResponse(new ExplainResponse(new ExplainResponseNode("Test"))); @@ -129,7 +138,25 @@ public void onFailure(Exception e) { } @Test - public void canCaptureErrorDuringExecution() { + public void cannot_explain_cursor_query() { + sqlService.explain(new SQLQueryRequest(new JSONObject(), null, EXPLAIN, + Map.of("format", "jdbc"), "n:cursor"), + new ResponseListener() { + @Override + public void onResponse(ExplainResponse response) { + fail(response.toString()); + } + + @Override + public void onFailure(Exception e) { + assertTrue(e.getMessage() + .contains("`explain` request for cursor requests is not supported.")); + } + }); + } + + @Test + public void can_capture_error_during_execution() { sqlService.execute( new SQLQueryRequest(new JSONObject(), "SELECT", QUERY, ""), new ResponseListener() { @@ -146,7 +173,7 @@ public void onFailure(Exception e) { } @Test - public void canCaptureErrorDuringExplain() { + public void can_capture_error_during_explain() { sqlService.explain( new SQLQueryRequest(new JSONObject(), "SELECT", EXPLAIN, ""), new ResponseListener() { @@ -161,5 +188,4 @@ public void onFailure(Exception e) { } }); } - } diff --git a/sql/src/test/java/org/opensearch/sql/sql/domain/SQLQueryRequestTest.java b/sql/src/test/java/org/opensearch/sql/sql/domain/SQLQueryRequestTest.java index 52a1f534e9..62bb665537 100644 --- a/sql/src/test/java/org/opensearch/sql/sql/domain/SQLQueryRequestTest.java +++ b/sql/src/test/java/org/opensearch/sql/sql/domain/SQLQueryRequestTest.java @@ -6,36 +6,43 @@ package org.opensearch.sql.sql.domain; +import static org.junit.jupiter.api.Assertions.assertAll; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.common.collect.ImmutableMap; +import java.util.HashMap; import java.util.Map; import org.json.JSONObject; +import org.junit.jupiter.api.DisplayNameGeneration; +import org.junit.jupiter.api.DisplayNameGenerator; import org.junit.jupiter.api.Test; import org.opensearch.sql.protocol.response.format.Format; +@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class) public class SQLQueryRequestTest { @Test - public void shouldSupportQuery() { + public void should_support_query() { SQLQueryRequest request = SQLQueryRequestBuilder.request("SELECT 1").build(); assertTrue(request.isSupported()); } @Test - public void shouldSupportQueryWithJDBCFormat() { + public void should_support_query_with_JDBC_format() { SQLQueryRequest request = SQLQueryRequestBuilder.request("SELECT 1") .format("jdbc") .build(); - assertTrue(request.isSupported()); - assertEquals(request.format(), Format.JDBC); + assertAll( + () -> assertTrue(request.isSupported()), + () -> assertEquals(request.format(), Format.JDBC) + ); } @Test - public void shouldSupportQueryWithQueryFieldOnly() { + public void should_support_query_with_query_field_only() { SQLQueryRequest request = SQLQueryRequestBuilder.request("SELECT 1") .jsonContent("{\"query\": \"SELECT 1\"}") @@ -44,16 +51,32 @@ public void shouldSupportQueryWithQueryFieldOnly() { } @Test - public void shouldSupportQueryWithParameters() { - SQLQueryRequest request = + public void should_support_query_with_parameters() { + SQLQueryRequest requestWithContent = SQLQueryRequestBuilder.request("SELECT 1") .jsonContent("{\"query\": \"SELECT 1\", \"parameters\":[]}") .build(); - assertTrue(request.isSupported()); + SQLQueryRequest requestWithParams = + SQLQueryRequestBuilder.request("SELECT 1") + .params(Map.of("one", "two")) + .build(); + assertAll( + () -> assertTrue(requestWithContent.isSupported()), + () -> assertTrue(requestWithParams.isSupported()) + ); + } + + @Test + public void should_support_query_without_parameters() { + SQLQueryRequest requestWithNoParams = + SQLQueryRequestBuilder.request("SELECT 1") + .params(Map.of()) + .build(); + assertTrue(requestWithNoParams.isSupported()); } @Test - public void shouldSupportQueryWithZeroFetchSize() { + public void should_support_query_with_zero_fetch_size() { SQLQueryRequest request = SQLQueryRequestBuilder.request("SELECT 1") .jsonContent("{\"query\": \"SELECT 1\", \"fetch_size\": 0}") @@ -62,7 +85,7 @@ public void shouldSupportQueryWithZeroFetchSize() { } @Test - public void shouldSupportQueryWithParametersAndZeroFetchSize() { + public void should_support_query_with_parameters_and_zero_fetch_size() { SQLQueryRequest request = SQLQueryRequestBuilder.request("SELECT 1") .jsonContent("{\"query\": \"SELECT 1\", \"fetch_size\": 0, \"parameters\":[]}") @@ -71,70 +94,155 @@ public void shouldSupportQueryWithParametersAndZeroFetchSize() { } @Test - public void shouldSupportExplain() { + public void should_support_explain() { SQLQueryRequest explainRequest = SQLQueryRequestBuilder.request("SELECT 1") .path("_plugins/_sql/_explain") .build(); - assertTrue(explainRequest.isExplainRequest()); - assertTrue(explainRequest.isSupported()); + + assertAll( + () -> assertTrue(explainRequest.isExplainRequest()), + () -> assertTrue(explainRequest.isSupported()) + ); } @Test - public void shouldNotSupportCursorRequest() { + public void should_support_cursor_request() { SQLQueryRequest fetchSizeRequest = SQLQueryRequestBuilder.request("SELECT 1") .jsonContent("{\"query\": \"SELECT 1\", \"fetch_size\": 5}") .build(); - assertFalse(fetchSizeRequest.isSupported()); SQLQueryRequest cursorRequest = + SQLQueryRequestBuilder.request(null) + .cursor("abcdefgh...") + .build(); + + assertAll( + () -> assertTrue(fetchSizeRequest.isSupported()), + () -> assertTrue(cursorRequest.isSupported()) + ); + } + + @Test + public void should_not_support_request_with_empty_cursor() { + SQLQueryRequest requestWithEmptyCursor = + SQLQueryRequestBuilder.request(null) + .cursor("") + .build(); + SQLQueryRequest requestWithNullCursor = + SQLQueryRequestBuilder.request(null) + .cursor(null) + .build(); + assertAll( + () -> assertFalse(requestWithEmptyCursor.isSupported()), + () -> assertFalse(requestWithNullCursor.isSupported()) + ); + } + + @Test + public void should_not_support_request_with_unknown_field() { + SQLQueryRequest request = + SQLQueryRequestBuilder.request("SELECT 1") + .jsonContent("{\"pewpew\": 42}") + .build(); + assertFalse(request.isSupported()); + } + + @Test + public void should_not_support_request_with_cursor_and_something_else() { + SQLQueryRequest requestWithQuery = SQLQueryRequestBuilder.request("SELECT 1") - .jsonContent("{\"cursor\": \"abcdefgh...\"}") + .cursor("n:12356") + .build(); + SQLQueryRequest requestWithParams = + SQLQueryRequestBuilder.request(null) + .cursor("n:12356") + .params(Map.of("one", "two")) + .build(); + SQLQueryRequest requestWithParamsWithFormat = + SQLQueryRequestBuilder.request(null) + .cursor("n:12356") + .params(Map.of("format", "jdbc")) .build(); - assertFalse(cursorRequest.isSupported()); + SQLQueryRequest requestWithParamsWithFormatAnd = + SQLQueryRequestBuilder.request(null) + .cursor("n:12356") + .params(Map.of("format", "jdbc", "something", "else")) + .build(); + SQLQueryRequest requestWithFetchSize = + SQLQueryRequestBuilder.request(null) + .cursor("n:12356") + .jsonContent("{\"fetch_size\": 5}") + .build(); + SQLQueryRequest requestWithNoParams = + SQLQueryRequestBuilder.request(null) + .cursor("n:12356") + .params(Map.of()) + .build(); + SQLQueryRequest requestWithNoContent = + SQLQueryRequestBuilder.request(null) + .cursor("n:12356") + .jsonContent("{}") + .build(); + assertAll( + () -> assertFalse(requestWithQuery.isSupported()), + () -> assertFalse(requestWithParams.isSupported()), + () -> assertFalse(requestWithFetchSize.isSupported()), + () -> assertTrue(requestWithNoParams.isSupported()), + () -> assertTrue(requestWithParamsWithFormat.isSupported()), + () -> assertFalse(requestWithParamsWithFormatAnd.isSupported()), + () -> assertTrue(requestWithNoContent.isSupported()) + ); } @Test - public void shouldUseJDBCFormatByDefault() { + public void should_use_JDBC_format_by_default() { SQLQueryRequest request = SQLQueryRequestBuilder.request("SELECT 1").params(ImmutableMap.of()).build(); assertEquals(request.format(), Format.JDBC); } @Test - public void shouldSupportCSVFormatAndSanitize() { + public void should_support_CSV_format_and_sanitize() { SQLQueryRequest csvRequest = SQLQueryRequestBuilder.request("SELECT 1") .format("csv") .build(); - assertTrue(csvRequest.isSupported()); - assertEquals(csvRequest.format(), Format.CSV); - assertTrue(csvRequest.sanitize()); + assertAll( + () -> assertTrue(csvRequest.isSupported()), + () -> assertEquals(csvRequest.format(), Format.CSV), + () -> assertTrue(csvRequest.sanitize()) + ); } @Test - public void shouldSkipSanitizeIfSetFalse() { + public void should_skip_sanitize_if_set_false() { ImmutableMap.Builder builder = ImmutableMap.builder(); Map params = builder.put("format", "csv").put("sanitize", "false").build(); SQLQueryRequest csvRequest = SQLQueryRequestBuilder.request("SELECT 1").params(params).build(); - assertEquals(csvRequest.format(), Format.CSV); - assertFalse(csvRequest.sanitize()); + assertAll( + () -> assertEquals(csvRequest.format(), Format.CSV), + () -> assertFalse(csvRequest.sanitize()) + ); } @Test - public void shouldNotSupportOtherFormat() { + public void should_not_support_other_format() { SQLQueryRequest csvRequest = SQLQueryRequestBuilder.request("SELECT 1") .format("other") .build(); - assertFalse(csvRequest.isSupported()); - assertThrows(IllegalArgumentException.class, csvRequest::format, - "response in other format is not supported."); + + assertAll( + () -> assertFalse(csvRequest.isSupported()), + () -> assertEquals("response in other format is not supported.", + assertThrows(IllegalArgumentException.class, csvRequest::format).getMessage()) + ); } @Test - public void shouldSupportRawFormat() { + public void should_support_raw_format() { SQLQueryRequest csvRequest = SQLQueryRequestBuilder.request("SELECT 1") .format("raw") @@ -150,7 +258,8 @@ private static class SQLQueryRequestBuilder { private String query; private String path = "_plugins/_sql"; private String format; - private Map params; + private String cursor; + private Map params = new HashMap<>(); static SQLQueryRequestBuilder request(String query) { SQLQueryRequestBuilder builder = new SQLQueryRequestBuilder(); @@ -178,14 +287,17 @@ SQLQueryRequestBuilder params(Map params) { return this; } + SQLQueryRequestBuilder cursor(String cursor) { + this.cursor = cursor; + return this; + } + SQLQueryRequest build() { - if (jsonContent == null) { - jsonContent = "{\"query\": \"" + query + "\"}"; - } - if (params != null) { - return new SQLQueryRequest(new JSONObject(jsonContent), query, path, params); + if (format != null) { + params.put("format", format); } - return new SQLQueryRequest(new JSONObject(jsonContent), query, path, format); + return new SQLQueryRequest(jsonContent == null ? null : new JSONObject(jsonContent), + query, path, params, cursor); } }