From 02999073274570b9ed6b1629b0d545eb3e88e82e Mon Sep 17 00:00:00 2001 From: Michael Froh Date: Wed, 8 Mar 2023 19:38:35 +0000 Subject: [PATCH 01/14] Initial search pipelines implementation This commit includes the basic features of search pipelines (see https://github.com/opensearch-project/search-processor/issues/80). Search pipelines are modeled after ingest pipelines and provide a simple, clean API for components to modify search requests and responses. With this commit we can: 1. Can create, retrieve, update, and delete search pipelines. 2. Transform search requests and responses by explicitly referencing a pipeline. Later work will include: 1. Adding an index setting to specify a default search pipeline. 2. Allowing search pipelines to be defined within a search request (for development/testing purposes, akin to simulating an ingest pipeline). 3. Adding a collection of search pipeline processors to support common useful transformations. (Suggestions welcome!) Signed-off-by: Michael Froh --- CHANGELOG.md | 1 + .../client/RestHighLevelClient.java | 5 + .../client/SearchPipelinesClient.java | 153 +++++ .../SearchPipelinesRequestConverters.java | 61 ++ .../client/SearchPipelinesClientIT.java | 116 ++++ modules/search-pipeline-common/build.gradle | 68 ++ .../common/SearchPipelineCommonIT.java | 13 + .../pipeline/common/AbstractProcessor.java | 34 + .../common/FilterQueryRequestProcessor.java | 117 ++++ .../SearchPipelineCommonModulePlugin.java | 31 + .../search/pipeline/common/package-info.java | 12 + .../FilterQueryRequestProcessorTests.java | 51 ++ .../SearchPipelineCommonYamlTestSuiteIT.java | 25 + .../test/search_pipeline/10_basic.yml | 15 + .../test/search_pipeline/20_crud.yml | 47 ++ .../api/search_pipelines.delete_pipeline.json | 35 + .../api/search_pipelines.get_pipeline.json | 37 ++ .../api/search_pipelines.put_pipeline.json | 39 ++ .../test/search_pipelines/10_basic.yml | 166 +++++ .../org/opensearch/action/ActionModule.java | 19 + .../admin/cluster/node/info/NodeInfo.java | 11 +- .../cluster/node/info/NodesInfoRequest.java | 3 +- .../cluster/node/info/NodesInfoResponse.java | 4 + .../node/info/TransportNodesInfoAction.java | 3 +- .../stats/TransportClusterStatsAction.java | 2 +- .../action/ingest/GetPipelineResponse.java | 5 + .../search/DeleteSearchPipelineAction.java | 26 + .../search/DeleteSearchPipelineRequest.java | 56 ++ .../DeleteSearchPipelineTransportAction.java | 80 +++ .../search/GetSearchPipelineAction.java | 25 + .../search/GetSearchPipelineRequest.java | 55 ++ .../search/GetSearchPipelineResponse.java | 144 ++++ .../GetSearchPipelineTransportAction.java | 78 +++ .../search/PutSearchPipelineAction.java | 26 + .../search/PutSearchPipelineRequest.java | 87 +++ .../PutSearchPipelineTransportAction.java | 99 +++ .../action/search/SearchRequest.java | 18 +- .../action/search/TransportSearchAction.java | 25 +- .../opensearch/client/ClusterAdminClient.java | 34 + .../client/support/AbstractClient.java | 37 ++ .../org/opensearch/cluster/ClusterModule.java | 9 + .../service/ClusterManagerTaskKeys.java | 3 + .../main/java/org/opensearch/node/Node.java | 17 +- .../java/org/opensearch/node/NodeService.java | 13 +- .../plugins/SearchPipelinesPlugin.java | 32 + .../RestDeleteSearchPipelineAction.java | 45 ++ .../search/RestGetSearchPipelineAction.java | 45 ++ .../search/RestPutSearchPipelineAction.java | 49 ++ .../rest/action/search/RestSearchAction.java | 1 + .../opensearch/search/pipeline/Pipeline.java | 158 +++++ .../pipeline/PipelineConfiguration.java | 160 +++++ .../opensearch/search/pipeline/Processor.java | 166 +++++ .../search/pipeline/ProcessorInfo.java | 82 +++ .../pipeline/SearchPipelineMetadata.java | 163 +++++ .../pipeline/SearchPipelineService.java | 410 ++++++++++++ .../search/pipeline/SearchPipelinesInfo.java | 82 +++ .../pipeline/SearchRequestProcessor.java | 19 + .../pipeline/SearchResponseProcessor.java | 20 + .../search/pipeline/package-info.java | 10 + .../cluster/node/info/NodeInfoTests.java | 1 + .../cluster/stats/ClusterStatsNodesTests.java | 1 + .../ingest/GetPipelineResponseTests.java | 11 + .../nodesinfo/NodeInfoStreamingTests.java | 15 +- .../pipeline/SearchPipelineServiceTests.java | 620 ++++++++++++++++++ .../snapshots/SnapshotResiliencyTests.java | 13 +- 65 files changed, 3993 insertions(+), 15 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelinesClient.java create mode 100644 client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelinesRequestConverters.java create mode 100644 client/rest-high-level/src/test/java/org/opensearch/client/SearchPipelinesClientIT.java create mode 100644 modules/search-pipeline-common/build.gradle create mode 100644 modules/search-pipeline-common/src/internalClusterTest/java/org/opensearch/search/pipeline/common/SearchPipelineCommonIT.java create mode 100644 modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/AbstractProcessor.java create mode 100644 modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/FilterQueryRequestProcessor.java create mode 100644 modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePlugin.java create mode 100644 modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/package-info.java create mode 100644 modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/FilterQueryRequestProcessorTests.java create mode 100644 modules/search-pipeline-common/src/yamlRestTest/java/org/opensearch/search/pipeline/common/SearchPipelineCommonYamlTestSuiteIT.java create mode 100644 modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/10_basic.yml create mode 100644 modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/20_crud.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.delete_pipeline.json create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.get_pipeline.json create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.put_pipeline.json create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search_pipelines/10_basic.yml create mode 100644 server/src/main/java/org/opensearch/action/search/DeleteSearchPipelineAction.java create mode 100644 server/src/main/java/org/opensearch/action/search/DeleteSearchPipelineRequest.java create mode 100644 server/src/main/java/org/opensearch/action/search/DeleteSearchPipelineTransportAction.java create mode 100644 server/src/main/java/org/opensearch/action/search/GetSearchPipelineAction.java create mode 100644 server/src/main/java/org/opensearch/action/search/GetSearchPipelineRequest.java create mode 100644 server/src/main/java/org/opensearch/action/search/GetSearchPipelineResponse.java create mode 100644 server/src/main/java/org/opensearch/action/search/GetSearchPipelineTransportAction.java create mode 100644 server/src/main/java/org/opensearch/action/search/PutSearchPipelineAction.java create mode 100644 server/src/main/java/org/opensearch/action/search/PutSearchPipelineRequest.java create mode 100644 server/src/main/java/org/opensearch/action/search/PutSearchPipelineTransportAction.java create mode 100644 server/src/main/java/org/opensearch/plugins/SearchPipelinesPlugin.java create mode 100644 server/src/main/java/org/opensearch/rest/action/search/RestDeleteSearchPipelineAction.java create mode 100644 server/src/main/java/org/opensearch/rest/action/search/RestGetSearchPipelineAction.java create mode 100644 server/src/main/java/org/opensearch/rest/action/search/RestPutSearchPipelineAction.java create mode 100644 server/src/main/java/org/opensearch/search/pipeline/Pipeline.java create mode 100644 server/src/main/java/org/opensearch/search/pipeline/PipelineConfiguration.java create mode 100644 server/src/main/java/org/opensearch/search/pipeline/Processor.java create mode 100644 server/src/main/java/org/opensearch/search/pipeline/ProcessorInfo.java create mode 100644 server/src/main/java/org/opensearch/search/pipeline/SearchPipelineMetadata.java create mode 100644 server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java create mode 100644 server/src/main/java/org/opensearch/search/pipeline/SearchPipelinesInfo.java create mode 100644 server/src/main/java/org/opensearch/search/pipeline/SearchRequestProcessor.java create mode 100644 server/src/main/java/org/opensearch/search/pipeline/SearchResponseProcessor.java create mode 100644 server/src/main/java/org/opensearch/search/pipeline/package-info.java create mode 100644 server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index df3ac6ae0fa1b..bfd313e9a8f37 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -78,6 +78,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Disallow multiple data paths for search nodes ([#6427](https://github.com/opensearch-project/OpenSearch/pull/6427)) - [Segment Replication] Allocation and rebalancing based on average primary shard count per index ([#6422](https://github.com/opensearch-project/OpenSearch/pull/6422)) - Add 'base_path' setting to File System Repository ([#6558](https://github.com/opensearch-project/OpenSearch/pull/6558)) +- Add initial search pipelines ([#dummy](https://github.com/opensearch-project/OpenSearch/pull/dummy)) ### Dependencies - Bump `org.apache.logging.log4j:log4j-core` from 2.18.0 to 2.20.0 ([#6490](https://github.com/opensearch-project/OpenSearch/pull/6490)) diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java index e97a42d971a85..d0d823575b563 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java @@ -268,6 +268,7 @@ public class RestHighLevelClient implements Closeable { private final IngestClient ingestClient = new IngestClient(this); private final SnapshotClient snapshotClient = new SnapshotClient(this); private final TasksClient tasksClient = new TasksClient(this); + private final SearchPipelinesClient searchPipelinesClient = new SearchPipelinesClient(this); /** * Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the @@ -354,6 +355,10 @@ public final TasksClient tasks() { return tasksClient; } + public final SearchPipelinesClient searchPipelines() { + return searchPipelinesClient; + } + /** * Executes a bulk request using the Bulk API. * @param bulkRequest the request diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelinesClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelinesClient.java new file mode 100644 index 0000000000000..36a390a805f4b --- /dev/null +++ b/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelinesClient.java @@ -0,0 +1,153 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.client; + +import org.opensearch.action.ActionListener; +import org.opensearch.action.search.DeleteSearchPipelineRequest; +import org.opensearch.action.search.GetSearchPipelineRequest; +import org.opensearch.action.search.GetSearchPipelineResponse; +import org.opensearch.action.search.PutSearchPipelineRequest; +import org.opensearch.action.support.master.AcknowledgedResponse; + +import java.io.IOException; +import java.util.Collections; + +import static java.util.Collections.emptySet; + +public final class SearchPipelinesClient { + private final RestHighLevelClient restHighLevelClient; + + SearchPipelinesClient(RestHighLevelClient restHighLevelClient) { + this.restHighLevelClient = restHighLevelClient; + } + + /** + * Add a pipeline or update an existing pipeline. + * + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public AcknowledgedResponse putPipeline(PutSearchPipelineRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity( + request, + SearchPipelinesRequestConverters::putPipeline, + options, + AcknowledgedResponse::fromXContent, + emptySet() + ); + } + + /** + * Asynchronously add a pipeline or update an existing pipeline. + * + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + * @return cancellable that may be used to cancel the request + */ + public Cancellable putPipelineAsync( + PutSearchPipelineRequest request, + RequestOptions options, + ActionListener listener + ) { + return restHighLevelClient.performRequestAsyncAndParseEntity( + request, + SearchPipelinesRequestConverters::putPipeline, + options, + AcknowledgedResponse::fromXContent, + listener, + emptySet() + ); + } + + /** + * Get an existing pipeline. + * + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public GetSearchPipelineResponse getPipeline(GetSearchPipelineRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity( + request, + SearchPipelinesRequestConverters::getPipeline, + options, + GetSearchPipelineResponse::fromXContent, + Collections.singleton(404) + ); + } + + /** + * Asynchronously get an existing pipeline. + * + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + * @return cancellable that may be used to cancel the request + */ + public Cancellable getPipelineAsync( + GetSearchPipelineRequest request, + RequestOptions options, + ActionListener listener + ) { + return restHighLevelClient.performRequestAsyncAndParseEntity( + request, + SearchPipelinesRequestConverters::getPipeline, + options, + GetSearchPipelineResponse::fromXContent, + listener, + Collections.singleton(404) + ); + } + + /** + * Delete an existing pipeline. + * + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public AcknowledgedResponse deletePipeline(DeleteSearchPipelineRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity( + request, + SearchPipelinesRequestConverters::deletePipeline, + options, + AcknowledgedResponse::fromXContent, + emptySet() + ); + } + + /** + * Asynchronously delete an existing pipeline. + * + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + * @return cancellable that may be used to cancel the request + */ + public Cancellable deletePipelineAsync( + DeleteSearchPipelineRequest request, + RequestOptions options, + ActionListener listener + ) { + return restHighLevelClient.performRequestAsyncAndParseEntity( + request, + SearchPipelinesRequestConverters::deletePipeline, + options, + AcknowledgedResponse::fromXContent, + listener, + emptySet() + ); + } + +} diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelinesRequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelinesRequestConverters.java new file mode 100644 index 0000000000000..53df19d2115d5 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelinesRequestConverters.java @@ -0,0 +1,61 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.client; + +import org.apache.hc.client5.http.classic.methods.HttpDelete; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpPut; +import org.opensearch.action.search.DeleteSearchPipelineRequest; +import org.opensearch.action.search.GetSearchPipelineRequest; +import org.opensearch.action.search.PutSearchPipelineRequest; + +import java.io.IOException; + +final class SearchPipelinesRequestConverters { + private SearchPipelinesRequestConverters() {} + + static Request putPipeline(PutSearchPipelineRequest putPipelineRequest) throws IOException { + String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_search/pipeline") + .addPathPart(putPipelineRequest.getId()) + .build(); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + + RequestConverters.Params params = new RequestConverters.Params(); + params.withTimeout(putPipelineRequest.timeout()); + params.withClusterManagerTimeout(putPipelineRequest.clusterManagerNodeTimeout()); + request.addParameters(params.asMap()); + request.setEntity(RequestConverters.createEntity(putPipelineRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request deletePipeline(DeleteSearchPipelineRequest deletePipelineRequest) { + String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_search/pipeline") + .addPathPart(deletePipelineRequest.getId()) + .build(); + Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + + RequestConverters.Params parameters = new RequestConverters.Params(); + parameters.withTimeout(deletePipelineRequest.timeout()); + parameters.withClusterManagerTimeout(deletePipelineRequest.clusterManagerNodeTimeout()); + request.addParameters(parameters.asMap()); + return request; + } + + static Request getPipeline(GetSearchPipelineRequest getPipelineRequest) { + String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_search/pipeline") + .addCommaSeparatedPathParts(getPipelineRequest.getIds()) + .build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + + RequestConverters.Params parameters = new RequestConverters.Params(); + parameters.withClusterManagerTimeout(getPipelineRequest.clusterManagerNodeTimeout()); + request.addParameters(parameters.asMap()); + return request; + } +} diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/SearchPipelinesClientIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/SearchPipelinesClientIT.java new file mode 100644 index 0000000000000..6f01cb5144eba --- /dev/null +++ b/client/rest-high-level/src/test/java/org/opensearch/client/SearchPipelinesClientIT.java @@ -0,0 +1,116 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.client; + +import org.opensearch.action.search.DeleteSearchPipelineRequest; +import org.opensearch.action.search.GetSearchPipelineRequest; +import org.opensearch.action.search.GetSearchPipelineResponse; +import org.opensearch.action.search.PutSearchPipelineRequest; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.search.pipeline.Pipeline; + +import java.io.IOException; + +public class SearchPipelinesClientIT extends OpenSearchRestHighLevelClientTestCase { + + public void testPutPipeline() throws IOException { + String id = "some_pipeline_id"; + XContentBuilder pipelineBuilder = buildSearchPipeline(); + PutSearchPipelineRequest request = new PutSearchPipelineRequest( + id, + BytesReference.bytes(pipelineBuilder), + pipelineBuilder.contentType() + ); + createPipeline(request); + } + + private static void createPipeline(PutSearchPipelineRequest request) throws IOException { + AcknowledgedResponse response = execute( + request, + highLevelClient().searchPipelines()::putPipeline, + highLevelClient().searchPipelines()::putPipelineAsync + ); + assertTrue(response.isAcknowledged()); + } + + public void testGetPipeline() throws IOException { + String id = "some_pipeline_id"; + XContentBuilder pipelineBuilder = buildSearchPipeline(); + PutSearchPipelineRequest request = new PutSearchPipelineRequest( + id, + BytesReference.bytes(pipelineBuilder), + pipelineBuilder.contentType() + ); + createPipeline(request); + + GetSearchPipelineRequest getRequest = new GetSearchPipelineRequest(id); + GetSearchPipelineResponse response = execute( + getRequest, + highLevelClient().searchPipelines()::getPipeline, + highLevelClient().searchPipelines()::getPipelineAsync + ); + assertTrue(response.isFound()); + assertEquals(1, response.pipelines().size()); + assertEquals(id, response.pipelines().get(0).getId()); + } + + public void testDeletePipeline() throws IOException { + String id = "some_pipeline_id"; + XContentBuilder pipelineBuilder = buildSearchPipeline(); + PutSearchPipelineRequest request = new PutSearchPipelineRequest( + id, + BytesReference.bytes(pipelineBuilder), + pipelineBuilder.contentType() + ); + createPipeline(request); + + DeleteSearchPipelineRequest deleteRequest = new DeleteSearchPipelineRequest(id); + AcknowledgedResponse response = execute( + deleteRequest, + highLevelClient().searchPipelines()::deletePipeline, + highLevelClient().searchPipelines()::deletePipelineAsync + ); + assertTrue(response.isAcknowledged()); + } + + private static XContentBuilder buildSearchPipeline() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + XContentBuilder pipelineBuilder = XContentBuilder.builder(xContentType.xContent()); + return buildSearchPipeline(pipelineBuilder); + } + + private static XContentBuilder buildSearchPipeline(XContentBuilder builder) throws IOException { + builder.startObject(); + { + builder.field("description", "a pipeline description"); + builder.startArray(Pipeline.REQUEST_PROCESSORS_KEY); + { + builder.startObject().startObject("filter_query"); + { + builder.startObject("query"); + { + builder.startObject("term"); + { + builder.field("field", "value"); + } + builder.endObject(); + } + builder.endObject(); + } + builder.endObject().endObject(); + } + builder.endArray(); + } + builder.endObject(); + return builder; + } +} diff --git a/modules/search-pipeline-common/build.gradle b/modules/search-pipeline-common/build.gradle new file mode 100644 index 0000000000000..dd37384923930 --- /dev/null +++ b/modules/search-pipeline-common/build.gradle @@ -0,0 +1,68 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +apply plugin: 'opensearch.yaml-rest-test' +apply plugin: 'opensearch.internal-cluster-test' + +opensearchplugin { + description 'Module for search pipeline processors that do not require additional security permissions or have large dependencies and resources' + classname 'org.opensearch.search.pipeline.common.SearchPipelineCommonModulePlugin' + extendedPlugins = ['lang-painless'] +} + +dependencies { + compileOnly project(':modules:lang-painless') + api project(':libs:opensearch-grok') + api project(':libs:opensearch-dissect') +} + +restResources { + restApi { + includeCore '_common', 'search_pipelines', 'cluster', 'indices', 'index', 'nodes', 'get', 'update', 'cat', 'mget' + } +} + +testClusters.all { + // Needed in order to test ingest pipeline templating: + // (this is because the integTest node is not using default distribution, but only the minimal number of required modules) + module ':modules:lang-mustache' +} + +thirdPartyAudit.ignoreMissingClasses( + // from log4j + 'org.osgi.framework.AdaptPermission', + 'org.osgi.framework.AdminPermission', + 'org.osgi.framework.Bundle', + 'org.osgi.framework.BundleActivator', + 'org.osgi.framework.BundleContext', + 'org.osgi.framework.BundleEvent', + 'org.osgi.framework.SynchronousBundleListener', + 'org.osgi.framework.wiring.BundleWire', + 'org.osgi.framework.wiring.BundleWiring' +) diff --git a/modules/search-pipeline-common/src/internalClusterTest/java/org/opensearch/search/pipeline/common/SearchPipelineCommonIT.java b/modules/search-pipeline-common/src/internalClusterTest/java/org/opensearch/search/pipeline/common/SearchPipelineCommonIT.java new file mode 100644 index 0000000000000..e855d0c2ae880 --- /dev/null +++ b/modules/search-pipeline-common/src/internalClusterTest/java/org/opensearch/search/pipeline/common/SearchPipelineCommonIT.java @@ -0,0 +1,13 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.pipeline.common; + +import org.opensearch.test.OpenSearchIntegTestCase; + +public class SearchPipelineCommonIT extends OpenSearchIntegTestCase {} diff --git a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/AbstractProcessor.java b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/AbstractProcessor.java new file mode 100644 index 0000000000000..e62497cb54db5 --- /dev/null +++ b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/AbstractProcessor.java @@ -0,0 +1,34 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.pipeline.common; + +import org.opensearch.search.pipeline.Processor; + +/** + * Base class for common processor behavior. + */ +abstract class AbstractProcessor implements Processor { + private final String tag; + private final String description; + + protected AbstractProcessor(String tag, String description) { + this.tag = tag; + this.description = description; + } + + @Override + public String getTag() { + return tag; + } + + @Override + public String getDescription() { + return description; + } +} diff --git a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/FilterQueryRequestProcessor.java b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/FilterQueryRequestProcessor.java new file mode 100644 index 0000000000000..81c00012daec6 --- /dev/null +++ b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/FilterQueryRequestProcessor.java @@ -0,0 +1,117 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.pipeline.common; + +import org.opensearch.action.search.SearchRequest; +import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.ParseField; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.search.pipeline.Processor; +import org.opensearch.search.pipeline.SearchRequestProcessor; + +import java.io.InputStream; +import java.util.Map; + +import static org.opensearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; + +/** + * This is a {@link SearchRequestProcessor} that replaces the incoming query with a BooleanQuery + * that MUST match the incoming query with a FILTER clause based on the configured query. + */ +public class FilterQueryRequestProcessor extends AbstractProcessor implements SearchRequestProcessor { + /** + * Key to reference this processor type from a search pipeline. + */ + public static final String TYPE = "filter_query"; + + final QueryBuilder filterQuery; + + @Override + public String getType() { + return TYPE; + } + + /** + * Constructor that takes a filter query. + * + * @param tag processor tag + * @param description processor description + * @param filterQuery the query that will be added as a filter to incoming queries + */ + public FilterQueryRequestProcessor(String tag, String description, QueryBuilder filterQuery) { + super(tag, description); + this.filterQuery = filterQuery; + } + + @Override + public SearchRequest processRequest(SearchRequest request) throws Exception { + QueryBuilder originalQuery = null; + if (request.source() != null) { + originalQuery = request.source().query(); + } + + BoolQueryBuilder filteredQuery = new BoolQueryBuilder().filter(filterQuery); + if (originalQuery != null) { + filteredQuery.must(originalQuery); + } + if (request.source() == null) { + request.source(new SearchSourceBuilder()); + } + request.source().query(filteredQuery); + return request; + } + + static class Factory implements Processor.Factory { + private final NamedXContentRegistry namedXContentRegistry; + public static final ParseField QUERY_FIELD = new ParseField("query"); + + Factory(NamedXContentRegistry namedXContentRegistry) { + this.namedXContentRegistry = namedXContentRegistry; + } + + @Override + public FilterQueryRequestProcessor create( + Map processorFactories, + String tag, + String description, + Map config + ) throws Exception { + try ( + XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent).map(config); + InputStream stream = BytesReference.bytes(builder).streamInput(); + XContentParser parser = XContentType.JSON.xContent() + .createParser(namedXContentRegistry, LoggingDeprecationHandler.INSTANCE, stream) + ) { + XContentParser.Token token = parser.nextToken(); + assert token == XContentParser.Token.START_OBJECT; + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + if (QUERY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + return new FilterQueryRequestProcessor(tag, description, parseInnerQueryBuilder(parser)); + } + } + } + } + throw new IllegalArgumentException( + "Did not specify the " + QUERY_FIELD.getPreferredName() + " property in processor of type " + TYPE + ); + } + } +} diff --git a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePlugin.java b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePlugin.java new file mode 100644 index 0000000000000..fc8f68cf3db70 --- /dev/null +++ b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePlugin.java @@ -0,0 +1,31 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.pipeline.common; + +import org.opensearch.plugins.Plugin; +import org.opensearch.plugins.SearchPipelinesPlugin; +import org.opensearch.search.pipeline.Processor; + +import java.util.Map; + +/** + * Plugin providing common search request/response processors for use in search pipelines. + */ +public class SearchPipelineCommonModulePlugin extends Plugin implements SearchPipelinesPlugin { + + /** + * No constructor needed, but build complains if we don't have a constructor with JavaDoc. + */ + public SearchPipelineCommonModulePlugin() {} + + @Override + public Map getProcessors(Processor.Parameters parameters) { + return Map.of(FilterQueryRequestProcessor.TYPE, new FilterQueryRequestProcessor.Factory(parameters.namedXContentRegistry)); + } +} diff --git a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/package-info.java b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/package-info.java new file mode 100644 index 0000000000000..9b065b4008021 --- /dev/null +++ b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * A collection of commonly-useful request and response processors for use in search pipelines. + */ +package org.opensearch.search.pipeline.common; diff --git a/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/FilterQueryRequestProcessorTests.java b/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/FilterQueryRequestProcessorTests.java new file mode 100644 index 0000000000000..1f355ac97c801 --- /dev/null +++ b/modules/search-pipeline-common/src/test/java/org/opensearch/search/pipeline/common/FilterQueryRequestProcessorTests.java @@ -0,0 +1,51 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.pipeline.common; + +import org.opensearch.action.search.SearchRequest; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.TermQueryBuilder; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.test.AbstractBuilderTestCase; + +import java.util.Collections; +import java.util.Map; + +public class FilterQueryRequestProcessorTests extends AbstractBuilderTestCase { + + public void testFilterQuery() throws Exception { + QueryBuilder filterQuery = new TermQueryBuilder("field", "value"); + FilterQueryRequestProcessor filterQueryRequestProcessor = new FilterQueryRequestProcessor(null, null, filterQuery); + QueryBuilder incomingQuery = new TermQueryBuilder("text", "foo"); + SearchSourceBuilder source = new SearchSourceBuilder().query(incomingQuery); + SearchRequest request = new SearchRequest().source(source); + SearchRequest transformedRequest = filterQueryRequestProcessor.processRequest(request); + assertEquals(new BoolQueryBuilder().must(incomingQuery).filter(filterQuery), transformedRequest.source().query()); + + // Test missing incoming query + request = new SearchRequest(); + transformedRequest = filterQueryRequestProcessor.processRequest(request); + assertEquals(new BoolQueryBuilder().filter(filterQuery), transformedRequest.source().query()); + } + + public void testFactory() throws Exception { + FilterQueryRequestProcessor.Factory factory = new FilterQueryRequestProcessor.Factory(this.xContentRegistry()); + FilterQueryRequestProcessor processor = factory.create( + Collections.emptyMap(), + null, + null, + Map.of("query", Map.of("term", Map.of("field", "value"))) + ); + assertEquals(new TermQueryBuilder("field", "value"), processor.filterQuery); + + // Missing "query" parameter: + expectThrows(IllegalArgumentException.class, () -> factory.create(Collections.emptyMap(), null, null, Collections.emptyMap())); + } +} diff --git a/modules/search-pipeline-common/src/yamlRestTest/java/org/opensearch/search/pipeline/common/SearchPipelineCommonYamlTestSuiteIT.java b/modules/search-pipeline-common/src/yamlRestTest/java/org/opensearch/search/pipeline/common/SearchPipelineCommonYamlTestSuiteIT.java new file mode 100644 index 0000000000000..cd1cbbc995f8b --- /dev/null +++ b/modules/search-pipeline-common/src/yamlRestTest/java/org/opensearch/search/pipeline/common/SearchPipelineCommonYamlTestSuiteIT.java @@ -0,0 +1,25 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.pipeline.common; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.opensearch.test.rest.yaml.ClientYamlTestCandidate; +import org.opensearch.test.rest.yaml.OpenSearchClientYamlSuiteTestCase; + +public class SearchPipelineCommonYamlTestSuiteIT extends OpenSearchClientYamlSuiteTestCase { + public SearchPipelineCommonYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return OpenSearchClientYamlSuiteTestCase.createParameters(); + } +} diff --git a/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/10_basic.yml b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/10_basic.yml new file mode 100644 index 0000000000000..473d92aa18052 --- /dev/null +++ b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/10_basic.yml @@ -0,0 +1,15 @@ +"Search pipeline common installed": + - skip: + reason: "contains is a newly added assertion" + features: contains + - do: + cluster.state: {} + + # Get cluster-manager node id + - set: { cluster_manager_node: cluster_manager } + + - do: + nodes.info: {} + + - contains: { nodes.$cluster_manager.modules: { name: search-pipeline-common } } + - contains: { nodes.$cluster_manager.search_pipelines.processors: { type: filter_query } } diff --git a/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/20_crud.yml b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/20_crud.yml new file mode 100644 index 0000000000000..cffb1c326462c --- /dev/null +++ b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/20_crud.yml @@ -0,0 +1,47 @@ +--- +teardown: + - do: + search_pipelines.delete_pipeline: + id: "my_pipeline" + ignore: 404 + +--- +"Test basic pipeline crud": + - do: + search_pipelines.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "request_processors": [ + { + "filter_query" : { + "query" : { + "term" : { + "field" : "value" + } + } + } + } + ] + } + - match: { acknowledged: true } + + - do: + search_pipelines.get_pipeline: + id: "my_pipeline" + - match: { my_pipeline.description: "_description" } + + - do: + search_pipelines.get_pipeline: {} + - match: { my_pipeline.description: "_description" } + + - do: + search_pipelines.delete_pipeline: + id: "my_pipeline" + - match: { acknowledged: true } + + - do: + catch: missing + search_pipelines.get_pipeline: + id: "my_pipeline" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.delete_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.delete_pipeline.json new file mode 100644 index 0000000000000..475d83807b1c0 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.delete_pipeline.json @@ -0,0 +1,35 @@ +{ + "search_pipelines.delete_pipeline": { + "documentation": { + "description": "Deletes a search pipeline.", + "url": "https://opensearch.org/docs/latest/opensearch/rest-api/search_pipelines/" + }, + "stability": "stable", + "url": { + "paths": [ + { + "path": "/_search/pipeline/{id}", + "methods": [ + "DELETE" + ], + "parts": { + "id": { + "type": "string", + "description": "Pipeline ID" + } + } + } + ] + }, + "params": { + "cluster_manager_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to cluster-manager node" + }, + "timeout":{ + "type":"time", + "description":"Explicit operation timeout" + } + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.get_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.get_pipeline.json new file mode 100644 index 0000000000000..93f9cb76f9da1 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.get_pipeline.json @@ -0,0 +1,37 @@ +{ + "search_pipelines.get_pipeline": { + "documentation": { + "description": "Returns a search pipeline", + "url": "https://opensearch.org/docs/latest/opensearch/rest-api/search_pipelines/" + }, + "stability": "stable", + "url": { + "paths": [ + { + "path": "/_search/pipeline", + "methods": [ + "GET" + ] + }, + { + "path": "/_search/pipeline/{id}", + "methods": [ + "GET" + ], + "parts": { + "id": { + "type": "string", + "description": "Comma-separated list of search pipeline ids. Wildcards supported." + } + } + } + ] + }, + "params": { + "cluster_manager_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to cluster-manager node" + } + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.put_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.put_pipeline.json new file mode 100644 index 0000000000000..8bddc5bbd61af --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.put_pipeline.json @@ -0,0 +1,39 @@ +{ + "search_pipelines.put_pipeline": { + "documentation": { + "description": "Creates or updates a search pipeline.", + "url": "https://opensearch.org/docs/latest/opensearch/rest-api/search_pipelines/" + }, + "stability": "stable", + "url": { + "paths": [ + { + "path": "/_search/pipeline/{id}", + "methods": [ + "PUT" + ], + "parts": { + "id": { + "type": "string", + "description": "Pipeline ID" + } + } + } + ] + }, + "params": { + "cluster_manager_timeout": { + "type": "time", + "description": "Explicit operation timeout for connection to cluster-manager node" + }, + "timeout": { + "type": "time", + "description": "Explicit operation timeout" + } + }, + "body": { + "description": "The search pipeline definition", + "required": true + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search_pipelines/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search_pipelines/10_basic.yml new file mode 100644 index 0000000000000..a58d39b418901 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search_pipelines/10_basic.yml @@ -0,0 +1,166 @@ +--- +"Test basic pipeline crud": + - skip: + version: " - 2.99.99" + reason: "to be introduced in future release / TODO: change if/when we backport to 2.x" + - do: + search_pipelines.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "request_processors": [ + ] + } + - match: { acknowledged: true } + + - do: + search_pipelines.get_pipeline: + id: "my_pipeline" + - match: { my_pipeline.description: "_description" } + + - do: + search_pipelines.delete_pipeline: + id: "my_pipeline" + - match: { acknowledged: true } + + - do: + catch: missing + search_pipelines.get_pipeline: + id: "my_pipeline" + +--- +"Test Put Versioned Pipeline": + - skip: + version: " - 2.99.99" + reason: "to be introduced in future release / TODO: change if/when we backport to 2.x" + - do: + search_pipelines.put_pipeline: + id: "my_pipeline" + body: > + { + "version": 10, + "request_processors": [ ] + } + - match: { acknowledged: true } + + - do: + search_pipelines.get_pipeline: + id: "my_pipeline" + - match: { my_pipeline.version: 10 } + + # Lower version + - do: + search_pipelines.put_pipeline: + id: "my_pipeline" + body: > + { + "version": 9, + "request_processors": [ ] + } + - match: { acknowledged: true } + + - do: + search_pipelines.get_pipeline: + id: "my_pipeline" + - match: { my_pipeline.version: 9 } + + # Higher version + - do: + search_pipelines.put_pipeline: + id: "my_pipeline" + body: > + { + "version": 6789, + "request_processors": [ ] + } + - match: { acknowledged: true } + + - do: + search_pipelines.get_pipeline: + id: "my_pipeline" + - match: { my_pipeline.version: 6789 } + + # No version + - do: + search_pipelines.put_pipeline: + id: "my_pipeline" + body: > + { + "request_processors": [ ] + } + - match: { acknowledged: true } + + - do: + search_pipelines.get_pipeline: + id: "my_pipeline" + - is_false: my_pipeline.version + + # Coming back with a version + - do: + search_pipelines.put_pipeline: + id: "my_pipeline" + body: > + { + "version": 5385, + "request_processors": [ ] + } + - match: { acknowledged: true } + + - do: + search_pipelines.get_pipeline: + id: "my_pipeline" + - match: { my_pipeline.version: 5385 } + + # Able to delete the versioned pipeline + - do: + search_pipelines.delete_pipeline: + id: "my_pipeline" + - match: { acknowledged: true } + + - do: + catch: missing + search_pipelines.get_pipeline: + id: "my_pipeline" +--- +"Test Get All Pipelines": + - skip: + version: " - 2.99.99" + reason: "to be introduced in future release / TODO: change if/when we backport to 2.x" + - do: + search_pipelines.put_pipeline: + id: "first_pipeline" + body: > + { + "description": "first", + "request_processors": [] + } + - do: + search_pipelines.put_pipeline: + id: "second_pipeline" + body: > + { + "description": "second", + "request_processors": [] + } + + - do: + search_pipelines.get_pipeline: {} + - match: { first_pipeline.description: "first" } + - match: { second_pipeline.description: "second" } + +--- +"Test invalid config": + - skip: + version: " - 2.99.99" + reason: "to be introduced in future release / TODO: change if/when we backport to 2.x" + - do: + catch: /parse_exception/ + search_pipelines.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "request_processors": [], + "invalid_field" : {} + } diff --git a/server/src/main/java/org/opensearch/action/ActionModule.java b/server/src/main/java/org/opensearch/action/ActionModule.java index 2cb11a0586c98..7d9e147485058 100644 --- a/server/src/main/java/org/opensearch/action/ActionModule.java +++ b/server/src/main/java/org/opensearch/action/ActionModule.java @@ -252,8 +252,14 @@ import org.opensearch.action.search.ClearScrollAction; import org.opensearch.action.search.CreatePitAction; import org.opensearch.action.search.DeletePitAction; +import org.opensearch.action.search.DeleteSearchPipelineAction; +import org.opensearch.action.search.DeleteSearchPipelineTransportAction; +import org.opensearch.action.search.GetSearchPipelineAction; +import org.opensearch.action.search.GetSearchPipelineTransportAction; import org.opensearch.action.search.MultiSearchAction; import org.opensearch.action.search.GetAllPitsAction; +import org.opensearch.action.search.PutSearchPipelineAction; +import org.opensearch.action.search.PutSearchPipelineTransportAction; import org.opensearch.action.search.SearchAction; import org.opensearch.action.search.SearchScrollAction; import org.opensearch.action.search.TransportClearScrollAction; @@ -434,9 +440,12 @@ import org.opensearch.rest.action.search.RestCountAction; import org.opensearch.rest.action.search.RestCreatePitAction; import org.opensearch.rest.action.search.RestDeletePitAction; +import org.opensearch.rest.action.search.RestDeleteSearchPipelineAction; import org.opensearch.rest.action.search.RestExplainAction; import org.opensearch.rest.action.search.RestGetAllPitsAction; +import org.opensearch.rest.action.search.RestGetSearchPipelineAction; import org.opensearch.rest.action.search.RestMultiSearchAction; +import org.opensearch.rest.action.search.RestPutSearchPipelineAction; import org.opensearch.rest.action.search.RestSearchAction; import org.opensearch.rest.action.search.RestSearchScrollAction; import org.opensearch.tasks.Task; @@ -719,6 +728,11 @@ public void reg actions.register(GetDecommissionStateAction.INSTANCE, TransportGetDecommissionStateAction.class); actions.register(DeleteDecommissionStateAction.INSTANCE, TransportDeleteDecommissionStateAction.class); + // Search Pipelines + actions.register(PutSearchPipelineAction.INSTANCE, PutSearchPipelineTransportAction.class); + actions.register(GetSearchPipelineAction.INSTANCE, GetSearchPipelineTransportAction.class); + actions.register(DeleteSearchPipelineAction.INSTANCE, DeleteSearchPipelineTransportAction.class); + return unmodifiableMap(actions.getRegistry()); } @@ -903,6 +917,11 @@ public void initRestHandlers(Supplier nodesInCluster) { registerHandler.accept(new RestPitSegmentsAction(nodesInCluster)); registerHandler.accept(new RestDeleteDecommissionStateAction()); + // Search pipelines API + registerHandler.accept(new RestPutSearchPipelineAction()); + registerHandler.accept(new RestGetSearchPipelineAction()); + registerHandler.accept(new RestDeleteSearchPipelineAction()); + for (ActionPlugin plugin : actionPlugins) { for (RestHandler handler : plugin.getRestHandlers( settings, diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java index fc54ecf795a1d..0e263b69a69df 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java @@ -48,6 +48,7 @@ import org.opensearch.monitor.process.ProcessInfo; import org.opensearch.node.ReportingService; import org.opensearch.search.aggregations.support.AggregationInfo; +import org.opensearch.search.pipeline.SearchPipelinesInfo; import org.opensearch.threadpool.ThreadPoolInfo; import org.opensearch.transport.TransportInfo; @@ -99,6 +100,9 @@ public NodeInfo(StreamInput in) throws IOException { addInfoIfNonNull(PluginsAndModules.class, in.readOptionalWriteable(PluginsAndModules::new)); addInfoIfNonNull(IngestInfo.class, in.readOptionalWriteable(IngestInfo::new)); addInfoIfNonNull(AggregationInfo.class, in.readOptionalWriteable(AggregationInfo::new)); + if (in.getVersion().onOrAfter(Version.V_3_0_0)) { // TODO: Change if/when we backport to 2.x + addInfoIfNonNull(SearchPipelinesInfo.class, in.readOptionalWriteable(SearchPipelinesInfo::new)); + } } public NodeInfo( @@ -115,7 +119,8 @@ public NodeInfo( @Nullable PluginsAndModules plugins, @Nullable IngestInfo ingest, @Nullable AggregationInfo aggsInfo, - @Nullable ByteSizeValue totalIndexingBuffer + @Nullable ByteSizeValue totalIndexingBuffer, + @Nullable SearchPipelinesInfo searchPipelinesInfo ) { super(node); this.version = version; @@ -130,6 +135,7 @@ public NodeInfo( addInfoIfNonNull(PluginsAndModules.class, plugins); addInfoIfNonNull(IngestInfo.class, ingest); addInfoIfNonNull(AggregationInfo.class, aggsInfo); + addInfoIfNonNull(SearchPipelinesInfo.class, searchPipelinesInfo); this.totalIndexingBuffer = totalIndexingBuffer; } @@ -218,5 +224,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(getInfo(PluginsAndModules.class)); out.writeOptionalWriteable(getInfo(IngestInfo.class)); out.writeOptionalWriteable(getInfo(AggregationInfo.class)); + if (out.getVersion().onOrAfter(Version.V_3_0_0)) { // TODO: Change if/when we backport to 2.x + out.writeOptionalWriteable(getInfo(SearchPipelinesInfo.class)); + } } } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoRequest.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoRequest.java index 77ffd98513698..a078199adb64a 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoRequest.java @@ -168,7 +168,8 @@ public enum Metric { PLUGINS("plugins"), INGEST("ingest"), AGGREGATIONS("aggregations"), - INDICES("indices"); + INDICES("indices"), + SEARCH_PIPELINES("search_pipelines"); private String metricName; diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java index 475602e6b328f..93ecbac1bdcd9 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java @@ -49,6 +49,7 @@ import org.opensearch.monitor.os.OsInfo; import org.opensearch.monitor.process.ProcessInfo; import org.opensearch.search.aggregations.support.AggregationInfo; +import org.opensearch.search.pipeline.SearchPipelinesInfo; import org.opensearch.threadpool.ThreadPoolInfo; import org.opensearch.transport.TransportInfo; @@ -147,6 +148,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (nodeInfo.getInfo(AggregationInfo.class) != null) { nodeInfo.getInfo(AggregationInfo.class).toXContent(builder, params); } + if (nodeInfo.getInfo(SearchPipelinesInfo.class) != null) { + nodeInfo.getInfo(SearchPipelinesInfo.class).toXContent(builder, params); + } builder.endObject(); } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/TransportNodesInfoAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/TransportNodesInfoAction.java index ee7b287b878e7..0bfe24f5b47df 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/TransportNodesInfoAction.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/TransportNodesInfoAction.java @@ -117,7 +117,8 @@ protected NodeInfo nodeOperation(NodeInfoRequest nodeRequest) { metrics.contains(NodesInfoRequest.Metric.PLUGINS.metricName()), metrics.contains(NodesInfoRequest.Metric.INGEST.metricName()), metrics.contains(NodesInfoRequest.Metric.AGGREGATIONS.metricName()), - metrics.contains(NodesInfoRequest.Metric.INDICES.metricName()) + metrics.contains(NodesInfoRequest.Metric.INDICES.metricName()), + metrics.contains(NodesInfoRequest.Metric.SEARCH_PIPELINES.metricName()) ); } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/stats/TransportClusterStatsAction.java index 2fdaa46de01bc..26332f762bdf2 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -145,7 +145,7 @@ protected ClusterStatsNodeResponse newNodeResponse(StreamInput in) throws IOExce @Override protected ClusterStatsNodeResponse nodeOperation(ClusterStatsNodeRequest nodeRequest) { - NodeInfo nodeInfo = nodeService.info(true, true, false, true, false, true, false, true, false, false, false); + NodeInfo nodeInfo = nodeService.info(true, true, false, true, false, true, false, true, false, false, false, false); NodeStats nodeStats = nodeService.stats( CommonStatsFlags.NONE, true, diff --git a/server/src/main/java/org/opensearch/action/ingest/GetPipelineResponse.java b/server/src/main/java/org/opensearch/action/ingest/GetPipelineResponse.java index b958c3ee4f0fb..6890252cec014 100644 --- a/server/src/main/java/org/opensearch/action/ingest/GetPipelineResponse.java +++ b/server/src/main/java/org/opensearch/action/ingest/GetPipelineResponse.java @@ -146,7 +146,12 @@ public boolean equals(Object other) { GetPipelineResponse otherResponse = (GetPipelineResponse) other; if (pipelines == null) { return otherResponse.pipelines == null; + } else if (otherResponse.pipelines == null) { + return false; } else { + if (otherResponse.pipelines.size() != pipelines.size()) { + return false; + } // We need a map here because order does not matter for equality Map otherPipelineMap = new HashMap<>(); for (PipelineConfiguration pipeline : otherResponse.pipelines) { diff --git a/server/src/main/java/org/opensearch/action/search/DeleteSearchPipelineAction.java b/server/src/main/java/org/opensearch/action/search/DeleteSearchPipelineAction.java new file mode 100644 index 0000000000000..65f8cf3de9506 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/DeleteSearchPipelineAction.java @@ -0,0 +1,26 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.ActionType; +import org.opensearch.action.support.master.AcknowledgedResponse; + +/** + * Action type to delete a search pipeline + * + * @opensearch.internal + */ +public class DeleteSearchPipelineAction extends ActionType { + public static final DeleteSearchPipelineAction INSTANCE = new DeleteSearchPipelineAction(); + public static final String NAME = "cluster:admin/search/pipeline/delete"; + + public DeleteSearchPipelineAction() { + super(NAME, AcknowledgedResponse::new); + } +} diff --git a/server/src/main/java/org/opensearch/action/search/DeleteSearchPipelineRequest.java b/server/src/main/java/org/opensearch/action/search/DeleteSearchPipelineRequest.java new file mode 100644 index 0000000000000..b6ba0bee87932 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/DeleteSearchPipelineRequest.java @@ -0,0 +1,56 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.action.support.master.AcknowledgedRequest; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Objects; + +/** + * Request to delete a search pipeline + * + * @opensearch.internal + */ +public class DeleteSearchPipelineRequest extends AcknowledgedRequest { + private String id; + + public DeleteSearchPipelineRequest(String id) { + this.id = Objects.requireNonNull(id); + } + + public DeleteSearchPipelineRequest() {} + + public DeleteSearchPipelineRequest(StreamInput in) throws IOException { + super(in); + id = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } +} diff --git a/server/src/main/java/org/opensearch/action/search/DeleteSearchPipelineTransportAction.java b/server/src/main/java/org/opensearch/action/search/DeleteSearchPipelineTransportAction.java new file mode 100644 index 0000000000000..7af687663833f --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/DeleteSearchPipelineTransportAction.java @@ -0,0 +1,80 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.ActionListener; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.clustermanager.TransportClusterManagerNodeAction; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.block.ClusterBlockException; +import org.opensearch.cluster.block.ClusterBlockLevel; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.common.inject.Inject; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.search.pipeline.SearchPipelineService; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import java.io.IOException; + +/** + * Perform the action of deleting a search pipeline + * + * @opensearch.internal + */ +public class DeleteSearchPipelineTransportAction extends TransportClusterManagerNodeAction< + DeleteSearchPipelineRequest, + AcknowledgedResponse> { + private final SearchPipelineService searchPipelineService; + + @Inject + public DeleteSearchPipelineTransportAction( + ThreadPool threadPool, + SearchPipelineService searchPipelineService, + TransportService transportService, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver + ) { + super( + DeleteSearchPipelineAction.NAME, + transportService, + searchPipelineService.getClusterService(), + threadPool, + actionFilters, + DeleteSearchPipelineRequest::new, + indexNameExpressionResolver + ); + this.searchPipelineService = searchPipelineService; + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected AcknowledgedResponse read(StreamInput in) throws IOException { + return new AcknowledgedResponse(in); + } + + @Override + protected void clusterManagerOperation( + DeleteSearchPipelineRequest request, + ClusterState state, + ActionListener listener + ) throws Exception { + searchPipelineService.deletePipeline(request, listener); + } + + @Override + protected ClusterBlockException checkBlock(DeleteSearchPipelineRequest request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/server/src/main/java/org/opensearch/action/search/GetSearchPipelineAction.java b/server/src/main/java/org/opensearch/action/search/GetSearchPipelineAction.java new file mode 100644 index 0000000000000..b80ffa781b0ec --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/GetSearchPipelineAction.java @@ -0,0 +1,25 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.ActionType; + +/** + * Action type to get search pipelines + * + * @opensearch.internal + */ +public class GetSearchPipelineAction extends ActionType { + public static final GetSearchPipelineAction INSTANCE = new GetSearchPipelineAction(); + public static final String NAME = "cluster:admin/search/pipeline/get"; + + public GetSearchPipelineAction() { + super(NAME, GetSearchPipelineResponse::new); + } +} diff --git a/server/src/main/java/org/opensearch/action/search/GetSearchPipelineRequest.java b/server/src/main/java/org/opensearch/action/search/GetSearchPipelineRequest.java new file mode 100644 index 0000000000000..4a1d3eaba87a0 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/GetSearchPipelineRequest.java @@ -0,0 +1,55 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.action.support.clustermanager.ClusterManagerNodeReadRequest; +import org.opensearch.common.Strings; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Objects; + +/** + * Request to get search pipelines + * + * @opensearch.internal + */ +public class GetSearchPipelineRequest extends ClusterManagerNodeReadRequest { + private final String[] ids; + + public GetSearchPipelineRequest(String... ids) { + this.ids = Objects.requireNonNull(ids); + } + + public GetSearchPipelineRequest() { + ids = Strings.EMPTY_ARRAY; + } + + public GetSearchPipelineRequest(StreamInput in) throws IOException { + super(in); + ids = in.readStringArray(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(ids); + } + + public String[] getIds() { + return ids; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } +} diff --git a/server/src/main/java/org/opensearch/action/search/GetSearchPipelineResponse.java b/server/src/main/java/org/opensearch/action/search/GetSearchPipelineResponse.java new file mode 100644 index 0000000000000..4fd86febcb0ae --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/GetSearchPipelineResponse.java @@ -0,0 +1,144 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.ActionResponse; +import org.opensearch.common.Strings; +import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.xcontent.StatusToXContentObject; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.rest.RestStatus; +import org.opensearch.search.pipeline.PipelineConfiguration; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken; + +/** + * transport response for getting a search pipeline + * + * @opensearch.internal + */ +public class GetSearchPipelineResponse extends ActionResponse implements StatusToXContentObject { + + private final List pipelines; + + public GetSearchPipelineResponse(StreamInput in) throws IOException { + super(in); + int size = in.readVInt(); + pipelines = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + pipelines.add(PipelineConfiguration.readFrom(in)); + } + + } + + public GetSearchPipelineResponse(List pipelines) { + this.pipelines = pipelines; + } + + public List pipelines() { + return Collections.unmodifiableList(pipelines); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + for (PipelineConfiguration pipeline : pipelines) { + builder.field(pipeline.getId(), pipeline.getConfigAsMap()); + } + builder.endObject(); + return builder; + } + + /** + * + * @param parser the parser for the XContent that contains the serialized GetPipelineResponse. + * @return an instance of GetPipelineResponse read from the parser + * @throws IOException If the parsing fails + */ + public static GetSearchPipelineResponse fromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + List pipelines = new ArrayList<>(); + while (parser.nextToken().equals(XContentParser.Token.FIELD_NAME)) { + String pipelineId = parser.currentName(); + parser.nextToken(); + try (XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent())) { + contentBuilder.generator().copyCurrentStructure(parser); + PipelineConfiguration pipeline = new PipelineConfiguration( + pipelineId, + BytesReference.bytes(contentBuilder), + contentBuilder.contentType() + ); + pipelines.add(pipeline); + } + } + ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.currentToken(), parser); + return new GetSearchPipelineResponse(pipelines); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(pipelines.size()); + for (PipelineConfiguration pipeline : pipelines) { + pipeline.writeTo(out); + } + } + + public boolean isFound() { + return !pipelines.isEmpty(); + } + + @Override + public RestStatus status() { + return isFound() ? RestStatus.OK : RestStatus.NOT_FOUND; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetSearchPipelineResponse otherResponse = (GetSearchPipelineResponse) o; + if (pipelines == null) { + return otherResponse.pipelines == null; + } else if (otherResponse.pipelines == null) { + return false; + } + // Convert to a map to ignore order; + return toMap(pipelines).equals(toMap(otherResponse.pipelines)); + } + + private static Map toMap(List pipelines) { + return pipelines.stream().collect(Collectors.toMap(PipelineConfiguration::getId, p -> p)); + } + + @Override + public String toString() { + return Strings.toString(XContentType.JSON, this); + } + + @Override + public int hashCode() { + int result = 1; + for (PipelineConfiguration pipeline : pipelines) { + // We only take the sum here to ensure that the order does not matter. + result += (pipeline == null ? 0 : pipeline.hashCode()); + } + return result; + } +} diff --git a/server/src/main/java/org/opensearch/action/search/GetSearchPipelineTransportAction.java b/server/src/main/java/org/opensearch/action/search/GetSearchPipelineTransportAction.java new file mode 100644 index 0000000000000..690990a0c4151 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/GetSearchPipelineTransportAction.java @@ -0,0 +1,78 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.ActionListener; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.clustermanager.TransportClusterManagerNodeReadAction; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.block.ClusterBlockException; +import org.opensearch.cluster.block.ClusterBlockLevel; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.search.pipeline.SearchPipelineService; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import java.io.IOException; + +/** + * Perform the action of getting a search pipeline + * + * @opensearch.internal + */ +public class GetSearchPipelineTransportAction extends TransportClusterManagerNodeReadAction< + GetSearchPipelineRequest, + GetSearchPipelineResponse> { + + @Inject + public GetSearchPipelineTransportAction( + ThreadPool threadPool, + ClusterService clusterService, + TransportService transportService, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver + ) { + super( + GetSearchPipelineAction.NAME, + transportService, + clusterService, + threadPool, + actionFilters, + GetSearchPipelineRequest::new, + indexNameExpressionResolver + ); + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected GetSearchPipelineResponse read(StreamInput in) throws IOException { + return new GetSearchPipelineResponse(in); + } + + @Override + protected void clusterManagerOperation( + GetSearchPipelineRequest request, + ClusterState state, + ActionListener listener + ) throws Exception { + listener.onResponse(new GetSearchPipelineResponse(SearchPipelineService.getPipelines(state, request.getIds()))); + } + + @Override + protected ClusterBlockException checkBlock(GetSearchPipelineRequest request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } +} diff --git a/server/src/main/java/org/opensearch/action/search/PutSearchPipelineAction.java b/server/src/main/java/org/opensearch/action/search/PutSearchPipelineAction.java new file mode 100644 index 0000000000000..798c8211ee505 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/PutSearchPipelineAction.java @@ -0,0 +1,26 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.ActionType; +import org.opensearch.action.support.master.AcknowledgedResponse; + +/** + * Action type to put a new search pipeline + * + * @opensearch.internal + */ +public class PutSearchPipelineAction extends ActionType { + public static final PutSearchPipelineAction INSTANCE = new PutSearchPipelineAction(); + public static final String NAME = "cluster:admin/search/pipeline/put"; + + public PutSearchPipelineAction() { + super(NAME, AcknowledgedResponse::new); + } +} diff --git a/server/src/main/java/org/opensearch/action/search/PutSearchPipelineRequest.java b/server/src/main/java/org/opensearch/action/search/PutSearchPipelineRequest.java new file mode 100644 index 0000000000000..178643641aa14 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/PutSearchPipelineRequest.java @@ -0,0 +1,87 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.action.support.master.AcknowledgedRequest; +import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.MediaType; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +/** + * Request to put a search pipeline + * + * @opensearch.internal + */ +public class PutSearchPipelineRequest extends AcknowledgedRequest implements ToXContentObject { + private String id; + private BytesReference source; + private XContentType xContentType; + + public PutSearchPipelineRequest(String id, BytesReference source, MediaType mediaType) { + this.id = Objects.requireNonNull(id); + this.source = Objects.requireNonNull(source); + if (mediaType instanceof XContentType == false) { + throw new IllegalArgumentException( + PutSearchPipelineRequest.class.getSimpleName() + " found unsupported media type [" + mediaType.getClass().getName() + "]" + ); + } + this.xContentType = XContentType.fromMediaType(Objects.requireNonNull(mediaType)); + } + + public PutSearchPipelineRequest(StreamInput in) throws IOException { + super(in); + id = in.readString(); + source = in.readBytesReference(); + xContentType = in.readEnum(XContentType.class); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public String getId() { + return id; + } + + public BytesReference getSource() { + return source; + } + + public XContentType getXContentType() { + return xContentType; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeBytesReference(source); + out.writeEnum(xContentType); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (source != null) { + builder.rawValue(source.streamInput(), xContentType); + } else { + builder.startObject().endObject(); + } + return builder; + } + +} diff --git a/server/src/main/java/org/opensearch/action/search/PutSearchPipelineTransportAction.java b/server/src/main/java/org/opensearch/action/search/PutSearchPipelineTransportAction.java new file mode 100644 index 0000000000000..342a1f4cb26c6 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/PutSearchPipelineTransportAction.java @@ -0,0 +1,99 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.action.ActionListener; +import org.opensearch.action.admin.cluster.node.info.NodeInfo; +import org.opensearch.action.admin.cluster.node.info.NodesInfoRequest; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.clustermanager.TransportClusterManagerNodeAction; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.client.OriginSettingClient; +import org.opensearch.client.node.NodeClient; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.block.ClusterBlockException; +import org.opensearch.cluster.block.ClusterBlockLevel; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.common.inject.Inject; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.search.pipeline.SearchPipelineService; +import org.opensearch.search.pipeline.SearchPipelinesInfo; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.opensearch.search.pipeline.SearchPipelineService.SEARCH_PIPELINE_ORIGIN; + +/** + * Perform the action of putting a search pipeline + * + * @opensearch.internal + */ +public class PutSearchPipelineTransportAction extends TransportClusterManagerNodeAction { + + private final SearchPipelineService searchPipelineService; + private final OriginSettingClient client; + + @Inject + public PutSearchPipelineTransportAction( + ThreadPool threadPool, + TransportService transportService, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + SearchPipelineService searchPipelineService, + NodeClient client + ) { + super( + PutSearchPipelineAction.NAME, + transportService, + searchPipelineService.getClusterService(), + threadPool, + actionFilters, + PutSearchPipelineRequest::new, + indexNameExpressionResolver + ); + this.client = new OriginSettingClient(client, SEARCH_PIPELINE_ORIGIN); + this.searchPipelineService = searchPipelineService; + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected AcknowledgedResponse read(StreamInput in) throws IOException { + return new AcknowledgedResponse(in); + } + + @Override + protected void clusterManagerOperation( + PutSearchPipelineRequest request, + ClusterState state, + ActionListener listener + ) throws Exception { + NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); + client.admin().cluster().nodesInfo(nodesInfoRequest, ActionListener.wrap(nodeInfos -> { + Map searchPipelinesInfos = new HashMap<>(); + for (NodeInfo nodeInfo : nodeInfos.getNodes()) { + searchPipelinesInfos.put(nodeInfo.getNode(), nodeInfo.getInfo(SearchPipelinesInfo.class)); + } + searchPipelineService.putPipeline(searchPipelinesInfos, request, listener); + }, listener::onFailure)); + } + + @Override + protected ClusterBlockException checkBlock(PutSearchPipelineRequest request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequest.java b/server/src/main/java/org/opensearch/action/search/SearchRequest.java index 21448bf9d7e94..40a59d81d6d69 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequest.java @@ -115,6 +115,8 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla private TimeValue cancelAfterTimeInterval; + private String pipeline; + public SearchRequest() { this.localClusterAlias = null; this.absoluteStartMillis = DEFAULT_ABSOLUTE_START_MILLIS; @@ -248,6 +250,7 @@ public SearchRequest(StreamInput in) throws IOException { } ccsMinimizeRoundtrips = in.readBoolean(); cancelAfterTimeInterval = in.readOptionalTimeValue(); + pipeline = in.readOptionalString(); } @Override @@ -276,6 +279,7 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeBoolean(ccsMinimizeRoundtrips); out.writeOptionalTimeValue(cancelAfterTimeInterval); + out.writeOptionalString(pipeline); } @Override @@ -654,6 +658,15 @@ public TimeValue getCancelAfterTimeInterval() { return cancelAfterTimeInterval; } + public SearchRequest pipeline(String pipeline) { + this.pipeline = pipeline; + return this; + } + + public String pipeline() { + return pipeline; + } + @Override public SearchTask createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { return new SearchTask(id, type, action, this::buildDescription, parentTaskId, headers, cancelAfterTimeInterval); @@ -700,7 +713,8 @@ public boolean equals(Object o) { && Objects.equals(localClusterAlias, that.localClusterAlias) && absoluteStartMillis == that.absoluteStartMillis && ccsMinimizeRoundtrips == that.ccsMinimizeRoundtrips - && Objects.equals(cancelAfterTimeInterval, that.cancelAfterTimeInterval); + && Objects.equals(cancelAfterTimeInterval, that.cancelAfterTimeInterval) + && Objects.equals(pipeline, that.pipeline); } @Override @@ -762,6 +776,8 @@ public String toString() { + source + ", cancelAfterTimeInterval=" + cancelAfterTimeInterval + + ", pipeline=" + + pipeline + "}"; } } diff --git a/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java b/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java index 1ca477942cdf6..221022fcfea80 100644 --- a/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java @@ -80,6 +80,7 @@ import org.opensearch.search.internal.AliasFilter; import org.opensearch.search.internal.InternalSearchResponse; import org.opensearch.search.internal.SearchContext; +import org.opensearch.search.pipeline.SearchPipelineService; import org.opensearch.search.profile.ProfileShardResult; import org.opensearch.search.profile.SearchProfileShardResults; import org.opensearch.tasks.CancellableTask; @@ -153,6 +154,7 @@ public class TransportSearchAction extends HandledTransportAction) SearchRequest::new); this.client = client; @@ -180,6 +183,7 @@ public TransportSearchAction( this.searchService = searchService; this.indexNameExpressionResolver = indexNameExpressionResolver; this.namedWriteableRegistry = namedWriteableRegistry; + this.searchPipelineService = searchPipelineService; } private Map buildPerIndexAliasFilter( @@ -375,16 +379,29 @@ boolean buildPointInTimeFromSearchResults() { private void executeRequest( Task task, - SearchRequest searchRequest, + SearchRequest originalSearchRequest, SearchAsyncActionProvider searchAsyncActionProvider, - ActionListener listener + ActionListener originalListener ) { final long relativeStartNanos = System.nanoTime(); final SearchTimeProvider timeProvider = new SearchTimeProvider( - searchRequest.getOrCreateAbsoluteStartMillis(), + originalSearchRequest.getOrCreateAbsoluteStartMillis(), relativeStartNanos, System::nanoTime ); + SearchRequest searchRequest; + try { + searchRequest = searchPipelineService.transformRequest(originalSearchRequest); + } catch (Exception e) { + originalListener.onFailure(e); + throw new RuntimeException(e); + } + ActionListener listener = ActionListener.wrap( + // TODO: Should we transform responses with the original request or the transformed request? Or both? + r -> originalListener.onResponse(searchPipelineService.transformResponse(originalSearchRequest, r)), + originalListener::onFailure + ); + ActionListener rewriteListener = ActionListener.wrap(source -> { if (source != searchRequest.source()) { // only set it if it changed - we don't allow null values to be set but it might be already null. this way we catch diff --git a/server/src/main/java/org/opensearch/client/ClusterAdminClient.java b/server/src/main/java/org/opensearch/client/ClusterAdminClient.java index 4ab438ec064f1..39aa1ed80d0d7 100644 --- a/server/src/main/java/org/opensearch/client/ClusterAdminClient.java +++ b/server/src/main/java/org/opensearch/client/ClusterAdminClient.java @@ -150,6 +150,10 @@ import org.opensearch.action.ingest.SimulatePipelineRequest; import org.opensearch.action.ingest.SimulatePipelineRequestBuilder; import org.opensearch.action.ingest.SimulatePipelineResponse; +import org.opensearch.action.search.DeleteSearchPipelineRequest; +import org.opensearch.action.search.GetSearchPipelineRequest; +import org.opensearch.action.search.GetSearchPipelineResponse; +import org.opensearch.action.search.PutSearchPipelineRequest; import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.xcontent.XContentType; @@ -899,4 +903,34 @@ public interface ClusterAdminClient extends OpenSearchClient { * Deletes the decommission metadata. */ DeleteDecommissionStateRequestBuilder prepareDeleteDecommissionRequest(); + + /** + * Stores a search pipeline + */ + void putSearchPipeline(PutSearchPipelineRequest request, ActionListener listener); + + /** + * Stores a search pipeline + */ + ActionFuture putSearchPipeline(PutSearchPipelineRequest request); + + /** + * Returns a stored search pipeline + */ + void getSearchPipeline(GetSearchPipelineRequest request, ActionListener listener); + + /** + * Returns a stored search pipeline + */ + ActionFuture getSearchPipeline(GetSearchPipelineRequest request); + + /** + * Deletes a stored search pipeline + */ + void deleteSearchPipeline(DeleteSearchPipelineRequest request, ActionListener listener); + + /** + * Deletes a stored search pipeline + */ + ActionFuture deleteSearchPipeline(DeleteSearchPipelineRequest request); } diff --git a/server/src/main/java/org/opensearch/client/support/AbstractClient.java b/server/src/main/java/org/opensearch/client/support/AbstractClient.java index 261709394e531..6a38254a03e71 100644 --- a/server/src/main/java/org/opensearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/opensearch/client/support/AbstractClient.java @@ -363,13 +363,20 @@ import org.opensearch.action.search.DeletePitAction; import org.opensearch.action.search.DeletePitRequest; import org.opensearch.action.search.DeletePitResponse; +import org.opensearch.action.search.DeleteSearchPipelineAction; +import org.opensearch.action.search.DeleteSearchPipelineRequest; import org.opensearch.action.search.GetAllPitNodesRequest; import org.opensearch.action.search.GetAllPitNodesResponse; +import org.opensearch.action.search.GetSearchPipelineAction; +import org.opensearch.action.search.GetSearchPipelineRequest; +import org.opensearch.action.search.GetSearchPipelineResponse; import org.opensearch.action.search.MultiSearchAction; import org.opensearch.action.search.MultiSearchRequest; import org.opensearch.action.search.MultiSearchRequestBuilder; import org.opensearch.action.search.MultiSearchResponse; import org.opensearch.action.search.GetAllPitsAction; +import org.opensearch.action.search.PutSearchPipelineAction; +import org.opensearch.action.search.PutSearchPipelineRequest; import org.opensearch.action.search.SearchAction; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchRequestBuilder; @@ -1452,6 +1459,36 @@ public void deleteDecommissionState( public DeleteDecommissionStateRequestBuilder prepareDeleteDecommissionRequest() { return new DeleteDecommissionStateRequestBuilder(this, DeleteDecommissionStateAction.INSTANCE); } + + @Override + public void putSearchPipeline(PutSearchPipelineRequest request, ActionListener listener) { + execute(PutSearchPipelineAction.INSTANCE, request, listener); + } + + @Override + public ActionFuture putSearchPipeline(PutSearchPipelineRequest request) { + return execute(PutSearchPipelineAction.INSTANCE, request); + } + + @Override + public void getSearchPipeline(GetSearchPipelineRequest request, ActionListener listener) { + execute(GetSearchPipelineAction.INSTANCE, request, listener); + } + + @Override + public ActionFuture getSearchPipeline(GetSearchPipelineRequest request) { + return execute(GetSearchPipelineAction.INSTANCE, request); + } + + @Override + public void deleteSearchPipeline(DeleteSearchPipelineRequest request, ActionListener listener) { + execute(DeleteSearchPipelineAction.INSTANCE, request, listener); + } + + @Override + public ActionFuture deleteSearchPipeline(DeleteSearchPipelineRequest request) { + return execute(DeleteSearchPipelineAction.INSTANCE, request); + } } static class IndicesAdmin implements IndicesAdminClient { diff --git a/server/src/main/java/org/opensearch/cluster/ClusterModule.java b/server/src/main/java/org/opensearch/cluster/ClusterModule.java index a33bd7e8057ed..28961d24c111a 100644 --- a/server/src/main/java/org/opensearch/cluster/ClusterModule.java +++ b/server/src/main/java/org/opensearch/cluster/ClusterModule.java @@ -96,6 +96,7 @@ import org.opensearch.persistent.PersistentTasksNodeService; import org.opensearch.plugins.ClusterPlugin; import org.opensearch.script.ScriptMetadata; +import org.opensearch.search.pipeline.SearchPipelineMetadata; import org.opensearch.snapshots.SnapshotsInfoService; import org.opensearch.tasks.Task; import org.opensearch.tasks.TaskResultsService; @@ -173,6 +174,7 @@ public static List getNamedWriteables() { // Metadata registerMetadataCustom(entries, RepositoriesMetadata.TYPE, RepositoriesMetadata::new, RepositoriesMetadata::readDiffFrom); registerMetadataCustom(entries, IngestMetadata.TYPE, IngestMetadata::new, IngestMetadata::readDiffFrom); + registerMetadataCustom(entries, SearchPipelineMetadata.TYPE, SearchPipelineMetadata::new, SearchPipelineMetadata::readDiffFrom); registerMetadataCustom(entries, ScriptMetadata.TYPE, ScriptMetadata::new, ScriptMetadata::readDiffFrom); registerMetadataCustom(entries, IndexGraveyard.TYPE, IndexGraveyard::new, IndexGraveyard::readDiffFrom); registerMetadataCustom( @@ -250,6 +252,13 @@ public static List getNamedXWriteables() { entries.add( new NamedXContentRegistry.Entry(Metadata.Custom.class, new ParseField(IngestMetadata.TYPE), IngestMetadata::fromXContent) ); + entries.add( + new NamedXContentRegistry.Entry( + Metadata.Custom.class, + new ParseField(SearchPipelineMetadata.TYPE), + SearchPipelineMetadata::fromXContent + ) + ); entries.add( new NamedXContentRegistry.Entry(Metadata.Custom.class, new ParseField(ScriptMetadata.TYPE), ScriptMetadata::fromXContent) ); diff --git a/server/src/main/java/org/opensearch/cluster/service/ClusterManagerTaskKeys.java b/server/src/main/java/org/opensearch/cluster/service/ClusterManagerTaskKeys.java index 0743997c23c9a..c88bea56cb9bd 100644 --- a/server/src/main/java/org/opensearch/cluster/service/ClusterManagerTaskKeys.java +++ b/server/src/main/java/org/opensearch/cluster/service/ClusterManagerTaskKeys.java @@ -32,6 +32,9 @@ public final class ClusterManagerTaskKeys { public static final String REMOVE_INDEX_TEMPLATE_V2_KEY = "remove-index-template-v2"; public static final String PUT_PIPELINE_KEY = "put-pipeline"; public static final String DELETE_PIPELINE_KEY = "delete-pipeline"; + + public static final String PUT_SEARCH_PIPELINE_KEY = "put-search-pipeline"; + public static final String DELETE_SEARCH_PIPELINE_KEY = "delete-search-pipeline"; public static final String CREATE_PERSISTENT_TASK_KEY = "create-persistent-task"; public static final String FINISH_PERSISTENT_TASK_KEY = "finish-persistent-task"; public static final String REMOVE_PERSISTENT_TASK_KEY = "remove-persistent-task"; diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index 6b7d810e9f0d9..9e2df65d984c5 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -46,8 +46,10 @@ import org.opensearch.indices.replication.SegmentReplicationSourceService; import org.opensearch.extensions.ExtensionsManager; import org.opensearch.extensions.NoopExtensionsManager; +import org.opensearch.plugins.SearchPipelinesPlugin; import org.opensearch.search.backpressure.SearchBackpressureService; import org.opensearch.search.backpressure.settings.SearchBackpressureSettings; +import org.opensearch.search.pipeline.SearchPipelineService; import org.opensearch.tasks.TaskResourceTrackingService; import org.opensearch.threadpool.RunnableTaskExecutionListener; import org.opensearch.index.store.RemoteSegmentStoreDirectoryFactory; @@ -541,6 +543,7 @@ protected Node( pluginsService.filterPlugins(IngestPlugin.class), client ); + final SetOnce repositoriesServiceReference = new SetOnce<>(); final ClusterInfoService clusterInfoService = newClusterInfoService(settings, clusterService, threadPool, client); final UsageService usageService = new UsageService(); @@ -950,6 +953,16 @@ protected Node( rerouteService, fsHealthService ); + final SearchPipelineService searchPipelineService = new SearchPipelineService( + clusterService, + threadPool, + this.environment, + scriptService, + analysisModule.getAnalysisRegistry(), + xContentRegistry, + pluginsService.filterPlugins(SearchPipelinesPlugin.class), + client + ); this.nodeService = new NodeService( settings, threadPool, @@ -969,7 +982,8 @@ protected Node( indexingPressureService, searchModule.getValuesSourceRegistry().getUsageService(), searchBackpressureService, - nodeEnvironment + nodeEnvironment, + searchPipelineService ); final SearchService searchService = newSearchService( @@ -1027,6 +1041,7 @@ protected Node( b.bind(ScriptService.class).toInstance(scriptService); b.bind(AnalysisRegistry.class).toInstance(analysisModule.getAnalysisRegistry()); b.bind(IngestService.class).toInstance(ingestService); + b.bind(SearchPipelineService.class).toInstance(searchPipelineService); b.bind(IndexingPressureService.class).toInstance(indexingPressureService); b.bind(TaskResourceTrackingService.class).toInstance(taskResourceTrackingService); b.bind(SearchBackpressureService.class).toInstance(searchBackpressureService); diff --git a/server/src/main/java/org/opensearch/node/NodeService.java b/server/src/main/java/org/opensearch/node/NodeService.java index b4446085243df..767e257523819 100644 --- a/server/src/main/java/org/opensearch/node/NodeService.java +++ b/server/src/main/java/org/opensearch/node/NodeService.java @@ -56,6 +56,7 @@ import org.opensearch.script.ScriptService; import org.opensearch.search.aggregations.support.AggregationUsageService; import org.opensearch.search.backpressure.SearchBackpressureService; +import org.opensearch.search.pipeline.SearchPipelineService; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; @@ -85,6 +86,7 @@ public class NodeService implements Closeable { private final IndexingPressureService indexingPressureService; private final AggregationUsageService aggregationUsageService; private final SearchBackpressureService searchBackpressureService; + private final SearchPipelineService searchPipelineService; private final ClusterService clusterService; private final Discovery discovery; private final NodeEnvironment nodeEnvironment; @@ -108,7 +110,8 @@ public class NodeService implements Closeable { IndexingPressureService indexingPressureService, AggregationUsageService aggregationUsageService, SearchBackpressureService searchBackpressureService, - NodeEnvironment nodeEnvironment + NodeEnvironment nodeEnvironment, + SearchPipelineService searchPipelineService ) { this.settings = settings; this.threadPool = threadPool; @@ -127,9 +130,11 @@ public class NodeService implements Closeable { this.indexingPressureService = indexingPressureService; this.aggregationUsageService = aggregationUsageService; this.searchBackpressureService = searchBackpressureService; + this.searchPipelineService = searchPipelineService; this.clusterService = clusterService; this.nodeEnvironment = nodeEnvironment; clusterService.addStateApplier(ingestService); + clusterService.addStateApplier(searchPipelineService); } public NodeInfo info( @@ -143,7 +148,8 @@ public NodeInfo info( boolean plugin, boolean ingest, boolean aggs, - boolean indices + boolean indices, + boolean searchPipelines ) { return new NodeInfo( Version.CURRENT, @@ -159,7 +165,8 @@ public NodeInfo info( plugin ? (pluginService == null ? null : pluginService.info()) : null, ingest ? (ingestService == null ? null : ingestService.info()) : null, aggs ? (aggregationUsageService == null ? null : aggregationUsageService.info()) : null, - indices ? indicesService.getTotalIndexingBufferBytes() : null + indices ? indicesService.getTotalIndexingBufferBytes() : null, + searchPipelines ? (searchPipelineService == null ? null : searchPipelineService.info()) : null ); } diff --git a/server/src/main/java/org/opensearch/plugins/SearchPipelinesPlugin.java b/server/src/main/java/org/opensearch/plugins/SearchPipelinesPlugin.java new file mode 100644 index 0000000000000..1a43fd57cd72f --- /dev/null +++ b/server/src/main/java/org/opensearch/plugins/SearchPipelinesPlugin.java @@ -0,0 +1,32 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugins; + +import org.opensearch.search.pipeline.Processor; + +import java.util.Collections; +import java.util.Map; + +/** + * An extension point for {@link Plugin} implementation to add custom search pipeline processors. + * + * @opensearch.api + */ +public interface SearchPipelinesPlugin { + /** + * Returns additional search pipeline processor types added by this plugin. + * + * The key of the returned {@link Map} is the unique name for the processor which is specified + * in pipeline configurations, and the value is a {@link org.opensearch.search.pipeline.Processor.Factory} + * to create the processor from a given pipeline configuration. + */ + default Map getProcessors(Processor.Parameters parameters) { + return Collections.emptyMap(); + } +} diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestDeleteSearchPipelineAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestDeleteSearchPipelineAction.java new file mode 100644 index 0000000000000..cb89a5276fae1 --- /dev/null +++ b/server/src/main/java/org/opensearch/rest/action/search/RestDeleteSearchPipelineAction.java @@ -0,0 +1,45 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.rest.action.search; + +import org.opensearch.action.search.DeleteSearchPipelineRequest; +import org.opensearch.client.node.NodeClient; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestToXContentListener; + +import java.io.IOException; +import java.util.List; + +import static org.opensearch.rest.RestRequest.Method.DELETE; + +/** + * REST action to delete a search pipeline + * + * @opensearch.internal + */ +public class RestDeleteSearchPipelineAction extends BaseRestHandler { + @Override + public String getName() { + return "search_delete_pipeline_action"; + } + + @Override + public List routes() { + return List.of(new Route(DELETE, "/_search/pipeline/{id}")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + DeleteSearchPipelineRequest request = new DeleteSearchPipelineRequest(restRequest.param("id")); + request.clusterManagerNodeTimeout(restRequest.paramAsTime("cluster_manager_timeout", request.clusterManagerNodeTimeout())); + request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + return channel -> client.admin().cluster().deleteSearchPipeline(request, new RestToXContentListener<>(channel)); + } +} diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestGetSearchPipelineAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestGetSearchPipelineAction.java new file mode 100644 index 0000000000000..d5003de00ec35 --- /dev/null +++ b/server/src/main/java/org/opensearch/rest/action/search/RestGetSearchPipelineAction.java @@ -0,0 +1,45 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.rest.action.search; + +import org.opensearch.action.search.GetSearchPipelineRequest; +import org.opensearch.client.node.NodeClient; +import org.opensearch.common.Strings; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestStatusToXContentListener; + +import java.io.IOException; +import java.util.List; + +import static org.opensearch.rest.RestRequest.Method.GET; + +/** + * REST action to retrieve search pipelines + * + * @opensearch.internal + */ +public class RestGetSearchPipelineAction extends BaseRestHandler { + @Override + public String getName() { + return "search_get_pipeline_action"; + } + + @Override + public List routes() { + return List.of(new Route(GET, "/_search/pipeline"), new Route(GET, "/_search/pipeline/{id}")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + GetSearchPipelineRequest request = new GetSearchPipelineRequest(Strings.splitStringByCommaToArray(restRequest.param("id"))); + request.clusterManagerNodeTimeout(restRequest.paramAsTime("cluster_manager_timeout", request.clusterManagerNodeTimeout())); + return channel -> client.admin().cluster().getSearchPipeline(request, new RestStatusToXContentListener<>(channel)); + } +} diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestPutSearchPipelineAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestPutSearchPipelineAction.java new file mode 100644 index 0000000000000..a2bb061e52c32 --- /dev/null +++ b/server/src/main/java/org/opensearch/rest/action/search/RestPutSearchPipelineAction.java @@ -0,0 +1,49 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.rest.action.search; + +import org.opensearch.action.search.PutSearchPipelineRequest; +import org.opensearch.client.node.NodeClient; +import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.collect.Tuple; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestToXContentListener; + +import java.io.IOException; +import java.util.List; + +import static org.opensearch.rest.RestRequest.Method.PUT; + +/** + * REST action to put a search pipeline + * + * @opensearch.internal + */ +public class RestPutSearchPipelineAction extends BaseRestHandler { + @Override + public String getName() { + return "search_put_pipeline_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/_search/pipeline/{id}")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + Tuple sourceTuple = restRequest.contentOrSourceParam(); + PutSearchPipelineRequest request = new PutSearchPipelineRequest(restRequest.param("id"), sourceTuple.v2(), sourceTuple.v1()); + request.clusterManagerNodeTimeout(restRequest.paramAsTime("cluster_manager_timeout", request.clusterManagerNodeTimeout())); + request.timeout(restRequest.paramAsTime("timeout", request.timeout())); + return channel -> client.admin().cluster().putSearchPipeline(request, new RestToXContentListener<>(channel)); + } +} diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java index 1c6526701d91d..d624e1bf826b2 100644 --- a/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java @@ -200,6 +200,7 @@ public static void parseSearchRequest( searchRequest.routing(request.param("routing")); searchRequest.preference(request.param("preference")); searchRequest.indicesOptions(IndicesOptions.fromRequest(request, searchRequest.indicesOptions())); + searchRequest.pipeline(request.param("search_pipeline")); checkRestTotalHits(request, searchRequest); diff --git a/server/src/main/java/org/opensearch/search/pipeline/Pipeline.java b/server/src/main/java/org/opensearch/search/pipeline/Pipeline.java new file mode 100644 index 0000000000000..1b052e4a7a673 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/pipeline/Pipeline.java @@ -0,0 +1,158 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.pipeline; + +import org.opensearch.OpenSearchParseException; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.common.Nullable; +import org.opensearch.ingest.ConfigurationUtils; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import static org.opensearch.ingest.ConfigurationUtils.TAG_KEY; +import static org.opensearch.ingest.Pipeline.DESCRIPTION_KEY; +import static org.opensearch.ingest.Pipeline.VERSION_KEY; + +/** + * Concrete representation of a search pipeline, holding multiple processors. + */ +public class Pipeline { + + public static final String REQUEST_PROCESSORS_KEY = "request_processors"; + public static final String RESPONSE_PROCESSORS_KEY = "response_processors"; + private final String id; + private final String description; + private final Integer version; + + // TODO: Refactor org.opensearch.ingest.CompoundProcessor to implement our generic Processor interface + // Then these can be CompoundProcessors instead of lists. + private final List searchRequestProcessors; + private final List searchResponseProcessors; + + Pipeline( + String id, + @Nullable String description, + @Nullable Integer version, + List requestProcessors, + List responseProcessors + ) { + this.id = id; + this.description = description; + this.version = version; + this.searchRequestProcessors = requestProcessors; + this.searchResponseProcessors = responseProcessors; + } + + public static Pipeline create(String id, Map config, Map processorFactories) + throws Exception { + String description = ConfigurationUtils.readOptionalStringProperty(null, null, config, DESCRIPTION_KEY); + Integer version = ConfigurationUtils.readIntProperty(null, null, config, VERSION_KEY, null); + List> requestProcessorConfigs = ConfigurationUtils.readOptionalList(null, null, config, REQUEST_PROCESSORS_KEY); + List requestProcessors = readProcessors( + SearchRequestProcessor.class, + processorFactories, + requestProcessorConfigs + ); + List> responseProcessorConfigs = ConfigurationUtils.readOptionalList( + null, + null, + config, + RESPONSE_PROCESSORS_KEY + ); + List responseProcessors = readProcessors( + SearchResponseProcessor.class, + processorFactories, + responseProcessorConfigs + ); + if (config.isEmpty() == false) { + throw new OpenSearchParseException( + "pipeline [" + + id + + "] doesn't support one or more provided configuration parameters " + + Arrays.toString(config.keySet().toArray()) + ); + } + return new Pipeline(id, description, version, requestProcessors, responseProcessors); + } + + @SuppressWarnings("unchecked") // Cast is checked using isInstance + private static List readProcessors( + Class processorType, + Map processorFactories, + List> requestProcessorConfigs + ) throws Exception { + List processors = new ArrayList<>(); + if (requestProcessorConfigs == null) { + return processors; + } + for (Map processorConfigWithKey : requestProcessorConfigs) { + for (Map.Entry entry : processorConfigWithKey.entrySet()) { + String type = entry.getKey(); + if (!processorFactories.containsKey(type)) { + throw new IllegalArgumentException("Invalid processor type " + type); + } + Map config = (Map) entry.getValue(); + String tag = ConfigurationUtils.readOptionalStringProperty(null, null, config, TAG_KEY); + String description = ConfigurationUtils.readOptionalStringProperty(null, tag, config, DESCRIPTION_KEY); + Processor processor = processorFactories.get(type).create(processorFactories, tag, description, config); + if (processorType.isInstance(processor)) { + processors.add((T) processor); + } else { + throw new IllegalArgumentException("Processor type " + type + " is not a " + processorType.getSimpleName()); + } + } + } + return processors; + } + + List flattenAllProcessors() { + List allProcessors = new ArrayList<>(searchRequestProcessors.size() + searchResponseProcessors.size()); + allProcessors.addAll(searchRequestProcessors); + allProcessors.addAll(searchResponseProcessors); + return allProcessors; + } + + String getId() { + return id; + } + + String getDescription() { + return description; + } + + Integer getVersion() { + return version; + } + + List getSearchRequestProcessors() { + return searchRequestProcessors; + } + + List getSearchResponseProcessors() { + return searchResponseProcessors; + } + + SearchRequest transformRequest(SearchRequest request) throws Exception { + for (SearchRequestProcessor searchRequestProcessor : searchRequestProcessors) { + request = searchRequestProcessor.processRequest(request); + } + return request; + } + + SearchResponse transformResponse(SearchRequest request, SearchResponse response) throws Exception { + for (SearchResponseProcessor responseProcessor : searchResponseProcessors) { + response = responseProcessor.processResponse(request, response); + } + return response; + } +} diff --git a/server/src/main/java/org/opensearch/search/pipeline/PipelineConfiguration.java b/server/src/main/java/org/opensearch/search/pipeline/PipelineConfiguration.java new file mode 100644 index 0000000000000..e2599fd78908c --- /dev/null +++ b/server/src/main/java/org/opensearch/search/pipeline/PipelineConfiguration.java @@ -0,0 +1,160 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.pipeline; + +import org.opensearch.cluster.AbstractDiffable; +import org.opensearch.cluster.Diff; +import org.opensearch.common.Strings; +import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.xcontent.XContentHelper; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.ParseField; +import org.opensearch.core.xcontent.ContextParser; +import org.opensearch.core.xcontent.MediaType; +import org.opensearch.core.xcontent.ObjectParser; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +/** + * TODO: Copied verbatim from {@link org.opensearch.ingest.PipelineConfiguration}. + * + * See if we can refactor into a common class. I suspect not, just because this one will hold + */ +public class PipelineConfiguration extends AbstractDiffable implements ToXContentObject { + private static final ObjectParser PARSER = new ObjectParser<>( + "pipeline_config", + true, + PipelineConfiguration.Builder::new + ); + static { + PARSER.declareString(PipelineConfiguration.Builder::setId, new ParseField("id")); + PARSER.declareField((parser, builder, aVoid) -> { + XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent()); + contentBuilder.generator().copyCurrentStructure(parser); + builder.setConfig(BytesReference.bytes(contentBuilder), contentBuilder.contentType()); + }, new ParseField("config"), ObjectParser.ValueType.OBJECT); + + } + + public static ContextParser getParser() { + return (parser, context) -> PARSER.apply(parser, null).build(); + } + + private static class Builder { + + private String id; + private BytesReference config; + private XContentType xContentType; + + void setId(String id) { + this.id = id; + } + + void setConfig(BytesReference config, MediaType mediaType) { + if (mediaType instanceof XContentType == false) { + throw new IllegalArgumentException("PipelineConfiguration does not support media type [" + mediaType.getClass() + "]"); + } + this.config = config; + this.xContentType = XContentType.fromMediaType(mediaType); + } + + PipelineConfiguration build() { + return new PipelineConfiguration(id, config, xContentType); + } + } + + private final String id; + // Store config as bytes reference, because the config is only used when the pipeline store reads the cluster state + // and the way the map of maps config is read requires a deep copy (it removes instead of gets entries to check for unused options) + // also the get pipeline api just directly returns this to the caller + private final BytesReference config; + private final XContentType xContentType; + + public PipelineConfiguration(String id, BytesReference config, XContentType xContentType) { + this.id = Objects.requireNonNull(id); + this.config = Objects.requireNonNull(config); + this.xContentType = Objects.requireNonNull(xContentType); + } + + public PipelineConfiguration(String id, BytesReference config, MediaType mediaType) { + this(id, config, XContentType.fromMediaType(mediaType)); + } + + public String getId() { + return id; + } + + public Map getConfigAsMap() { + return XContentHelper.convertToMap(config, true, xContentType).v2(); + } + + // pkg-private for tests + XContentType getXContentType() { + return xContentType; + } + + // pkg-private for tests + BytesReference getConfig() { + return config; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("id", id); + builder.field("config", getConfigAsMap()); + builder.endObject(); + return builder; + } + + public static PipelineConfiguration readFrom(StreamInput in) throws IOException { + return new PipelineConfiguration(in.readString(), in.readBytesReference(), in.readEnum(XContentType.class)); + } + + public static Diff readDiffFrom(StreamInput in) throws IOException { + return readDiffFrom(PipelineConfiguration::readFrom, in); + } + + @Override + public String toString() { + return Strings.toString(XContentType.JSON, this); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(id); + out.writeBytesReference(config); + out.writeEnum(xContentType); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + PipelineConfiguration that = (PipelineConfiguration) o; + + if (!id.equals(that.id)) return false; + return getConfigAsMap().equals(that.getConfigAsMap()); + + } + + @Override + public int hashCode() { + int result = id.hashCode(); + result = 31 * result + getConfigAsMap().hashCode(); + return result; + } +} diff --git a/server/src/main/java/org/opensearch/search/pipeline/Processor.java b/server/src/main/java/org/opensearch/search/pipeline/Processor.java new file mode 100644 index 0000000000000..245db15a771b9 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/pipeline/Processor.java @@ -0,0 +1,166 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.search.pipeline; + +import org.opensearch.client.Client; +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.env.Environment; +import org.opensearch.index.analysis.AnalysisRegistry; +import org.opensearch.script.ScriptService; +import org.opensearch.threadpool.Scheduler; + +import java.util.Map; +import java.util.function.BiFunction; +import java.util.function.Consumer; +import java.util.function.LongSupplier; + +/** + * A processor implementation may modify the request or response from a search call. + * Whether changes are made and what exactly is modified is up to the implementation. + *

+ * Processors may get called concurrently and thus need to be thread-safe. + * + * + * TODO: Refactor {@link org.opensearch.ingest.Processor} to extend this interface, and specialize to IngestProcessor. + * + * @opensearch.internal + */ +public interface Processor { + + /** + * Gets the type of processor + */ + String getType(); + + /** + * Gets the tag of a processor. + */ + String getTag(); + + /** + * Gets the description of a processor. + */ + String getDescription(); + + /** + * A factory that knows how to construct a processor based on a map of maps. + */ + interface Factory { + + /** + * Creates a processor based on the specified map of maps config. + * + * @param processorFactories Other processors which may be created inside this processor + * @param tag The tag for the processor + * @param description A short description of what this processor does + * @param config The configuration for the processor + * + * Note: Implementations are responsible for removing the used configuration + * keys, so that after creation the config map should be empty. + */ + Processor create(Map processorFactories, String tag, String description, Map config) + throws Exception; + } + + /** + * Infrastructure class that holds services that can be used by processor factories to create processor instances + * and that gets passed around to all {@link org.opensearch.plugins.SearchPipelinesPlugin}s. + */ + class Parameters { + + /** + * Useful to provide access to the node's environment like config directory to processor factories. + */ + public final Environment env; + + /** + * Provides processors script support. + */ + public final ScriptService scriptService; + + /** + * Provide analyzer support + */ + public final AnalysisRegistry analysisRegistry; + + /** + * Allows processors to read headers set by {@link org.opensearch.action.support.ActionFilter} + * instances that have run while handling the current search. + */ + public final ThreadContext threadContext; + + public final LongSupplier relativeTimeSupplier; + + public final SearchPipelineService searchPipelineService; + + public final Consumer genericExecutor; + + public final NamedXContentRegistry namedXContentRegistry; + + /** + * Provides scheduler support + */ + public final BiFunction scheduler; + + /** + * Provides access to the node's cluster client + */ + public final Client client; + + public Parameters( + Environment env, + ScriptService scriptService, + AnalysisRegistry analysisRegistry, + ThreadContext threadContext, + LongSupplier relativeTimeSupplier, + BiFunction scheduler, + SearchPipelineService searchPipelineService, + Client client, + Consumer genericExecutor, + NamedXContentRegistry namedXContentRegistry + ) { + this.env = env; + this.scriptService = scriptService; + this.threadContext = threadContext; + this.analysisRegistry = analysisRegistry; + this.relativeTimeSupplier = relativeTimeSupplier; + this.scheduler = scheduler; + this.searchPipelineService = searchPipelineService; + this.client = client; + this.genericExecutor = genericExecutor; + this.namedXContentRegistry = namedXContentRegistry; + } + + } +} diff --git a/server/src/main/java/org/opensearch/search/pipeline/ProcessorInfo.java b/server/src/main/java/org/opensearch/search/pipeline/ProcessorInfo.java new file mode 100644 index 0000000000000..0864fecb6c7f1 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/pipeline/ProcessorInfo.java @@ -0,0 +1,82 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.pipeline; + +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.io.stream.Writeable; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; + +import java.io.IOException; + +/** + * Information about a search pipeline processor + * + * TODO: This is copy/pasted from the ingest ProcessorInfo. + * Can/should we share implementation or is this just boilerplate? + * + * @opensearch.internal + */ +public class ProcessorInfo implements Writeable, ToXContentObject, Comparable { + + private final String type; + + public ProcessorInfo(String type) { + this.type = type; + } + + /** + * Read from a stream. + */ + public ProcessorInfo(StreamInput input) throws IOException { + type = input.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(this.type); + } + + /** + * @return The unique processor type + */ + public String getType() { + return type; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("type", type); + builder.endObject(); + return null; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + ProcessorInfo that = (ProcessorInfo) o; + + return type.equals(that.type); + + } + + @Override + public int hashCode() { + return type.hashCode(); + } + + @Override + public int compareTo(ProcessorInfo o) { + return type.compareTo(o.type); + } +} diff --git a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineMetadata.java b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineMetadata.java new file mode 100644 index 0000000000000..bfbf5cd24bf92 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineMetadata.java @@ -0,0 +1,163 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.pipeline; + +import org.opensearch.Version; +import org.opensearch.cluster.Diff; +import org.opensearch.cluster.DiffableUtils; +import org.opensearch.cluster.NamedDiff; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.core.ParseField; +import org.opensearch.core.xcontent.ObjectParser; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Represents the search pipelines that are available in the cluster + * + * @opensearch.internal + */ +public class SearchPipelineMetadata implements Metadata.Custom { + public static final String TYPE = "search_pipeline"; + + private static final ParseField PIPELINES_FIELD = new ParseField("pipeline"); + private static final ObjectParser, Void> SEARCH_PIPELINE_METADATA_PARSER = new ObjectParser<>( + "search_pipeline_metadata", + ArrayList::new + ); + static { + SEARCH_PIPELINE_METADATA_PARSER.declareObjectArray(List::addAll, PipelineConfiguration.getParser(), PIPELINES_FIELD); + } + // Mapping from pipeline ID to each pipeline's configuration. + private final Map pipelines; + + public SearchPipelineMetadata(Map pipelines) { + this.pipelines = Collections.unmodifiableMap(pipelines); + } + + @Override + public String getWriteableName() { + return TYPE; + } + + @Override + public Version getMinimalSupportedVersion() { + return Version.CURRENT.minimumCompatibilityVersion(); + } + + public Map getPipelines() { + return pipelines; + } + + public SearchPipelineMetadata(StreamInput in) throws IOException { + int size = in.readVInt(); + Map pipelines = new HashMap<>(size); + for (int i = 0; i < size; i++) { + PipelineConfiguration pipeline = PipelineConfiguration.readFrom(in); + pipelines.put(pipeline.getId(), pipeline); + } + this.pipelines = Collections.unmodifiableMap(pipelines); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(pipelines.size()); + for (PipelineConfiguration pipeline : pipelines.values()) { + pipeline.writeTo(out); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startArray(PIPELINES_FIELD.getPreferredName()); + for (PipelineConfiguration pipeline : pipelines.values()) { + pipeline.toXContent(builder, params); + } + builder.endArray(); + return builder; + } + + public static SearchPipelineMetadata fromXContent(XContentParser parser) throws IOException { + Map pipelines = new HashMap<>(); + List configs = SEARCH_PIPELINE_METADATA_PARSER.parse(parser, null); + for (PipelineConfiguration pipeline : configs) { + pipelines.put(pipeline.getId(), pipeline); + } + return new SearchPipelineMetadata(pipelines); + } + + @Override + public EnumSet context() { + return Metadata.ALL_CONTEXTS; + } + + @Override + public Diff diff(Metadata.Custom previousState) { + return new SearchPipelineMetadataDiff((SearchPipelineMetadata) previousState, this); + } + + public static NamedDiff readDiffFrom(StreamInput in) throws IOException { + return new SearchPipelineMetadataDiff(in); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchPipelineMetadata that = (SearchPipelineMetadata) o; + return pipelines.equals(that.pipelines); + } + + @Override + public int hashCode() { + return pipelines.hashCode(); + } + + static class SearchPipelineMetadataDiff implements NamedDiff { + final Diff> pipelines; + + public SearchPipelineMetadataDiff(SearchPipelineMetadata before, SearchPipelineMetadata after) { + this.pipelines = DiffableUtils.diff(before.pipelines, after.pipelines, DiffableUtils.getStringKeySerializer()); + } + + public SearchPipelineMetadataDiff(StreamInput in) throws IOException { + this.pipelines = DiffableUtils.readJdkMapDiff( + in, + DiffableUtils.getStringKeySerializer(), + PipelineConfiguration::readFrom, + PipelineConfiguration::readDiffFrom + ); + } + + @Override + public Metadata.Custom apply(Metadata.Custom part) { + return new SearchPipelineMetadata(pipelines.apply(((SearchPipelineMetadata) part).pipelines)); + } + + @Override + public String getWriteableName() { + return TYPE; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + pipelines.writeTo(out); + } + } +} diff --git a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java new file mode 100644 index 0000000000000..74d901085b7aa --- /dev/null +++ b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java @@ -0,0 +1,410 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.pipeline; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.ExceptionsHelper; +import org.opensearch.OpenSearchParseException; +import org.opensearch.ResourceNotFoundException; +import org.opensearch.action.ActionListener; +import org.opensearch.action.search.DeleteSearchPipelineRequest; +import org.opensearch.action.search.PutSearchPipelineRequest; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.client.Client; +import org.opensearch.cluster.AckedClusterStateUpdateTask; +import org.opensearch.cluster.ClusterChangedEvent; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.ClusterStateApplier; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.cluster.service.ClusterManagerTaskKeys; +import org.opensearch.cluster.service.ClusterManagerTaskThrottler; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.common.regex.Regex; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.xcontent.XContentHelper; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.env.Environment; +import org.opensearch.gateway.GatewayService; +import org.opensearch.index.analysis.AnalysisRegistry; +import org.opensearch.ingest.ConfigurationUtils; +import org.opensearch.node.ReportingService; +import org.opensearch.plugins.SearchPipelinesPlugin; +import org.opensearch.script.ScriptService; +import org.opensearch.threadpool.ThreadPool; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.function.Consumer; + +/** + * The main entry point for search pipelines. Handles CRUD operations and exposes the API to execute search pipelines + * against requests and responses. + */ +public class SearchPipelineService implements ClusterStateApplier, ReportingService { + public static final String SEARCH_PIPELINE_ORIGIN = "search_pipeline"; + + private static final Logger logger = LogManager.getLogger(SearchPipelineService.class); + private final ClusterService clusterService; + private final ScriptService scriptService; + private final Map processorFactories; + private volatile Map pipelines = Collections.emptyMap(); + private final ThreadPool threadPool; + private final List> searchPipelineClusterStateListeners = new CopyOnWriteArrayList<>(); + private final ClusterManagerTaskThrottler.ThrottlingKey putPipelineTaskKey; + private final ClusterManagerTaskThrottler.ThrottlingKey deletePipelineTaskKey; + private volatile ClusterState state; + + public SearchPipelineService( + ClusterService clusterService, + ThreadPool threadPool, + Environment env, + ScriptService scriptService, + AnalysisRegistry analysisRegistry, + NamedXContentRegistry namedXContentRegistry, + List searchPipelinesPlugins, + Client client + ) { + this.clusterService = clusterService; + this.scriptService = scriptService; + this.threadPool = threadPool; + this.processorFactories = processorFactories( + searchPipelinesPlugins, + new Processor.Parameters( + env, + scriptService, + analysisRegistry, + threadPool.getThreadContext(), + threadPool::relativeTimeInMillis, + (delay, command) -> threadPool.schedule(command, TimeValue.timeValueMillis(delay), ThreadPool.Names.GENERIC), + this, + client, + threadPool.generic()::execute, + namedXContentRegistry + ) + ); + putPipelineTaskKey = clusterService.registerClusterManagerTask(ClusterManagerTaskKeys.PUT_SEARCH_PIPELINE_KEY, true); + deletePipelineTaskKey = clusterService.registerClusterManagerTask(ClusterManagerTaskKeys.DELETE_SEARCH_PIPELINE_KEY, true); + } + + private static Map processorFactories( + List searchPipelinesPlugins, + Processor.Parameters parameters + ) { + Map processorFactories = new HashMap<>(); + for (SearchPipelinesPlugin searchPipelinesPlugin : searchPipelinesPlugins) { + Map newProcessors = searchPipelinesPlugin.getProcessors(parameters); + for (Map.Entry entry : newProcessors.entrySet()) { + if (processorFactories.put(entry.getKey(), entry.getValue()) != null) { + throw new IllegalArgumentException("Ingest processor [" + entry.getKey() + "] is already registered"); + } + } + } + return Collections.unmodifiableMap(processorFactories); + } + + @Override + public void applyClusterState(ClusterChangedEvent event) { + state = event.state(); + + if (state.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { + return; + } + searchPipelineClusterStateListeners.forEach(consumer -> consumer.accept(state)); + + SearchPipelineMetadata newSearchPipelineMetadata = state.getMetadata().custom(SearchPipelineMetadata.TYPE); + if (newSearchPipelineMetadata == null) { + return; + } + + try { + innerUpdatePipelines(newSearchPipelineMetadata); + } catch (OpenSearchParseException e) { + logger.warn("failed to update search pipelines", e); + } + } + + void innerUpdatePipelines(SearchPipelineMetadata newSearchPipelineMetadata) { + Map existingPipelines = this.pipelines; + + // Lazily initialize these variables in order to favour the most likely scenario that there are no pipeline changes: + Map newPipelines = null; + List exceptions = null; + // Iterate over pipeline configurations in metadata and constructs a new pipeline if there is no pipeline + // or the pipeline configuration has been modified + + for (PipelineConfiguration newConfiguration : newSearchPipelineMetadata.getPipelines().values()) { + PipelineHolder previous = existingPipelines.get(newConfiguration.getId()); + if (previous != null && previous.configuration.equals(newConfiguration)) { + continue; + } + if (newPipelines == null) { + newPipelines = new HashMap<>(existingPipelines); + } + try { + Pipeline newPipeline = Pipeline.create(newConfiguration.getId(), newConfiguration.getConfigAsMap(), processorFactories); + newPipelines.put(newConfiguration.getId(), new PipelineHolder(newConfiguration, newPipeline)); + + if (previous == null) { + continue; + } + // TODO -- once we add in pipeline metrics (like in ingest pipelines), we will need to deep-copy + // the old pipeline's metrics into the new pipeline. + } catch (Exception e) { + OpenSearchParseException parseException = new OpenSearchParseException( + "Error updating pipeline with id [" + newConfiguration.getId() + "]", + e + ); + // TODO -- replace pipeline with one that throws an exception when we try to use it + if (exceptions == null) { + exceptions = new ArrayList<>(); + } + exceptions.add(parseException); + } + } + // Iterate over the current active pipelines and check whether they are missing in the pipeline configuration and + // if so delete the pipeline from new Pipelines map: + for (Map.Entry entry : existingPipelines.entrySet()) { + if (newSearchPipelineMetadata.getPipelines().get(entry.getKey()) == null) { + if (newPipelines == null) { + newPipelines = new HashMap<>(existingPipelines); + } + newPipelines.remove(entry.getKey()); + } + } + + if (newPipelines != null) { + this.pipelines = Collections.unmodifiableMap(newPipelines); + if (exceptions != null) { + ExceptionsHelper.rethrowAndSuppress(exceptions); + } + } + } + + public void putPipeline( + Map searchPipelinesInfos, + PutSearchPipelineRequest request, + ActionListener listener + ) throws Exception { + + validatePipeline(searchPipelinesInfos, request); + clusterService.submitStateUpdateTask( + "put-search-pipeline-" + request.getId(), + new AckedClusterStateUpdateTask<>(request, listener) { + @Override + public ClusterState execute(ClusterState currentState) { + return innerPut(request, currentState); + } + + @Override + public ClusterManagerTaskThrottler.ThrottlingKey getClusterManagerThrottlingKey() { + return putPipelineTaskKey; + } + + @Override + protected AcknowledgedResponse newResponse(boolean acknowledged) { + return new AcknowledgedResponse(acknowledged); + } + } + ); + } + + static ClusterState innerPut(PutSearchPipelineRequest request, ClusterState currentState) { + SearchPipelineMetadata currentSearchPipelineMetadata = currentState.metadata().custom(SearchPipelineMetadata.TYPE); + Map pipelines; + if (currentSearchPipelineMetadata != null) { + pipelines = new HashMap<>(currentSearchPipelineMetadata.getPipelines()); + } else { + pipelines = new HashMap<>(); + } + pipelines.put(request.getId(), new PipelineConfiguration(request.getId(), request.getSource(), request.getXContentType())); + ClusterState.Builder newState = ClusterState.builder(currentState); + newState.metadata( + Metadata.builder(currentState.getMetadata()) + .putCustom(SearchPipelineMetadata.TYPE, new SearchPipelineMetadata(pipelines)) + .build() + ); + return newState.build(); + } + + void validatePipeline(Map searchPipelinesInfos, PutSearchPipelineRequest request) throws Exception { + if (searchPipelinesInfos.isEmpty()) { + throw new IllegalStateException("Search pipeline info is empty"); + } + Map pipelineConfig = XContentHelper.convertToMap(request.getSource(), false, request.getXContentType()).v2(); + Pipeline pipeline = Pipeline.create(request.getId(), pipelineConfig, processorFactories); + List exceptions = new ArrayList<>(); + for (Processor processor : pipeline.flattenAllProcessors()) { + for (Map.Entry entry : searchPipelinesInfos.entrySet()) { + String type = processor.getType(); + if (entry.getValue().containsProcessor(type) == false) { + String message = "Processor type [" + processor.getType() + "] is not installed on node [" + entry.getKey() + "]"; + exceptions.add(ConfigurationUtils.newConfigurationException(processor.getType(), processor.getTag(), null, message)); + } + } + } + ExceptionsHelper.rethrowAndSuppress(exceptions); + } + + public void deletePipeline(DeleteSearchPipelineRequest request, ActionListener listener) throws Exception { + clusterService.submitStateUpdateTask( + "delete-search-pipeline-" + request.getId(), + new AckedClusterStateUpdateTask<>(request, listener) { + @Override + public ClusterState execute(ClusterState currentState) { + return innerDelete(request, currentState); + } + + @Override + public ClusterManagerTaskThrottler.ThrottlingKey getClusterManagerThrottlingKey() { + return deletePipelineTaskKey; + } + + @Override + protected AcknowledgedResponse newResponse(boolean acknowledged) { + return new AcknowledgedResponse(acknowledged); + } + } + ); + + } + + static ClusterState innerDelete(DeleteSearchPipelineRequest request, ClusterState currentState) { + SearchPipelineMetadata currentMetadata = currentState.metadata().custom(SearchPipelineMetadata.TYPE); + if (currentMetadata == null) { + return currentState; + } + Map pipelines = currentMetadata.getPipelines(); + Set toRemove = new HashSet<>(); + for (String pipelineKey : pipelines.keySet()) { + if (Regex.simpleMatch(request.getId(), pipelineKey)) { + toRemove.add(pipelineKey); + } + } + if (toRemove.isEmpty()) { + if (Regex.isMatchAllPattern(request.getId())) { + // Deleting all the empty state is a no-op. + return currentState; + } + throw new ResourceNotFoundException("pipeline [{}] is missing", request.getId()); + } + final Map newPipelines = new HashMap<>(pipelines); + for (String key : toRemove) { + newPipelines.remove(key); + } + ClusterState.Builder newState = ClusterState.builder(currentState); + newState.metadata( + Metadata.builder(currentState.getMetadata()).putCustom(SearchPipelineMetadata.TYPE, new SearchPipelineMetadata(newPipelines)) + ); + return newState.build(); + } + + public SearchRequest transformRequest(SearchRequest searchRequest) throws Exception { + String pipelineId = searchRequest.pipeline(); + if (pipelineId != null) { + PipelineHolder pipeline = pipelines.get(pipelineId); + if (pipeline == null) { + throw new IllegalArgumentException("Pipeline " + pipelineId + " is not defined"); + } + // Save the original request by deep cloning the existing request. + BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(); + searchRequest.writeTo(bytesStreamOutput); + SearchRequest clonedRequest = new SearchRequest(bytesStreamOutput.bytes().streamInput()); + return pipeline.pipeline.transformRequest(clonedRequest); + } + return searchRequest; + } + + public SearchResponse transformResponse(SearchRequest request, SearchResponse searchResponse) throws Exception { + String pipelineId = request.pipeline(); + if (pipelineId != null) { + PipelineHolder pipeline = pipelines.get(pipelineId); + if (pipeline == null) { + throw new IllegalArgumentException("Pipeline " + pipelineId + " is not defined"); + } + return pipeline.pipeline.transformResponse(request, searchResponse); + } + return searchResponse; + } + + Map getProcessorFactories() { + return processorFactories; + } + + @Override + public SearchPipelinesInfo info() { + List processorInfoList = new ArrayList<>(); + for (Map.Entry entry : processorFactories.entrySet()) { + processorInfoList.add(new ProcessorInfo(entry.getKey())); + } + return new SearchPipelinesInfo(processorInfoList); + } + + public static List getPipelines(ClusterState clusterState, String... ids) { + SearchPipelineMetadata metadata = clusterState.getMetadata().custom(SearchPipelineMetadata.TYPE); + return innerGetPipelines(metadata, ids); + } + + static List innerGetPipelines(SearchPipelineMetadata metadata, String... ids) { + if (metadata == null) { + return Collections.emptyList(); + } + + // if we didn't ask for _any_ ID, then we get them all (this is the same as if they ask for '*') + if (ids.length == 0) { + return new ArrayList<>(metadata.getPipelines().values()); + } + List result = new ArrayList<>(ids.length); + for (String id : ids) { + if (Regex.isSimpleMatchPattern(id)) { + for (Map.Entry entry : metadata.getPipelines().entrySet()) { + if (Regex.simpleMatch(id, entry.getKey())) { + result.add(entry.getValue()); + } + } + } else { + PipelineConfiguration pipeline = metadata.getPipelines().get(id); + if (pipeline != null) { + result.add(pipeline); + } + } + } + return result; + } + + public ClusterService getClusterService() { + return clusterService; + } + + Map getPipelines() { + return pipelines; + } + + static class PipelineHolder { + + final PipelineConfiguration configuration; + final Pipeline pipeline; + + PipelineHolder(PipelineConfiguration configuration, Pipeline pipeline) { + this.configuration = Objects.requireNonNull(configuration); + this.pipeline = Objects.requireNonNull(pipeline); + } + } +} diff --git a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelinesInfo.java b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelinesInfo.java new file mode 100644 index 0000000000000..0f4a830b6e5ef --- /dev/null +++ b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelinesInfo.java @@ -0,0 +1,82 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.pipeline; + +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.node.ReportingService; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.TreeSet; + +/** + * Information about a search pipelines event + * + * @opensearch.internal + */ +public class SearchPipelinesInfo implements ReportingService.Info { + + private final Set processors; + + public SearchPipelinesInfo(List processors) { + this.processors = new TreeSet<>(processors); // we use a treeset here to have a test-able / predictable order + } + + /** + * Read from a stream. + */ + public SearchPipelinesInfo(StreamInput in) throws IOException { + processors = new TreeSet<>(); + final int size = in.readVInt(); + for (int i = 0; i < size; i++) { + processors.add(new ProcessorInfo(in)); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject("search_pipelines"); + builder.startArray("processors"); + for (ProcessorInfo info : processors) { + info.toXContent(builder, params); + } + builder.endArray(); + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.write(processors.size()); + for (ProcessorInfo info : processors) { + info.writeTo(out); + } + } + + public boolean containsProcessor(String type) { + return processors.contains(new ProcessorInfo(type)); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchPipelinesInfo that = (SearchPipelinesInfo) o; + return Objects.equals(processors, that.processors); + } + + @Override + public int hashCode() { + return Objects.hash(processors); + } +} diff --git a/server/src/main/java/org/opensearch/search/pipeline/SearchRequestProcessor.java b/server/src/main/java/org/opensearch/search/pipeline/SearchRequestProcessor.java new file mode 100644 index 0000000000000..db20ddf596ffb --- /dev/null +++ b/server/src/main/java/org/opensearch/search/pipeline/SearchRequestProcessor.java @@ -0,0 +1,19 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.pipeline; + +import org.opensearch.action.search.SearchRequest; + +/** + * Interface for a search pipeline processor that modifies a search request. + */ +public interface SearchRequestProcessor extends Processor { + SearchRequest processRequest(SearchRequest request) throws Exception; + +} diff --git a/server/src/main/java/org/opensearch/search/pipeline/SearchResponseProcessor.java b/server/src/main/java/org/opensearch/search/pipeline/SearchResponseProcessor.java new file mode 100644 index 0000000000000..b3bc5e98453b9 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/pipeline/SearchResponseProcessor.java @@ -0,0 +1,20 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.pipeline; + +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; + +/** + * Interface for a search pipeline processor that modifies a search response. + */ +public interface SearchResponseProcessor extends Processor { + SearchResponse processResponse(SearchRequest request, SearchResponse response) throws Exception; + +} diff --git a/server/src/main/java/org/opensearch/search/pipeline/package-info.java b/server/src/main/java/org/opensearch/search/pipeline/package-info.java new file mode 100644 index 0000000000000..91573d06e8f76 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/pipeline/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** Search pipelines base package. */ +package org.opensearch.search.pipeline; diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/node/info/NodeInfoTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/node/info/NodeInfoTests.java index 6710276dad0e9..cfd6fcec4bdc6 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/node/info/NodeInfoTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/node/info/NodeInfoTests.java @@ -71,6 +71,7 @@ public void testGetInfo() { null, null, null, + null, null ); diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodesTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodesTests.java index 9c0090a15d7a5..d3a40868bc389 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodesTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodesTests.java @@ -167,6 +167,7 @@ private static NodeInfo createNodeInfo(String nodeId, String transportType, Stri null, null, null, + null, null ); } diff --git a/server/src/test/java/org/opensearch/action/ingest/GetPipelineResponseTests.java b/server/src/test/java/org/opensearch/action/ingest/GetPipelineResponseTests.java index 962428a2a0930..68bb523d594c0 100644 --- a/server/src/test/java/org/opensearch/action/ingest/GetPipelineResponseTests.java +++ b/server/src/test/java/org/opensearch/action/ingest/GetPipelineResponseTests.java @@ -99,6 +99,16 @@ public void testXContentDeserialization() throws IOException { } } + public void testSubsetNotEqual() throws IOException { + PipelineConfiguration pipeline1 = createRandomPipeline("pipe1"); + PipelineConfiguration pipeline2 = createRandomPipeline("pipe2"); + + GetPipelineResponse response1 = new GetPipelineResponse(List.of(pipeline1)); + GetPipelineResponse response2 = new GetPipelineResponse(List.of(pipeline1, pipeline2)); + assertNotEquals(response1, response2); + assertNotEquals(response2, response1); + } + @Override protected GetPipelineResponse doParseInstance(XContentParser parser) throws IOException { return GetPipelineResponse.fromXContent(parser); @@ -133,4 +143,5 @@ protected GetPipelineResponse mutateInstance(GetPipelineResponse response) { throw new UncheckedIOException(e); } } + } diff --git a/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java b/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java index 6f226385896d8..341f52f28e5e0 100644 --- a/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java +++ b/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java @@ -54,6 +54,7 @@ import org.opensearch.plugins.PluginInfo; import org.opensearch.search.aggregations.support.AggregationInfo; import org.opensearch.search.aggregations.support.AggregationUsageService; +import org.opensearch.search.pipeline.SearchPipelinesInfo; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.VersionUtils; import org.opensearch.threadpool.ThreadPool; @@ -242,6 +243,17 @@ private static NodeInfo createNodeInfo() { // pick a random long that sometimes exceeds an int: indexingBuffer = new ByteSizeValue(random().nextLong() & ((1L << 40) - 1)); } + + SearchPipelinesInfo searchPipelinesInfo = null; + if (randomBoolean()) { + int numProcessors = randomIntBetween(0, 5); + List processors = new ArrayList<>(numProcessors); + for (int i = 0; i < numProcessors; i++) { + processors.add(new org.opensearch.search.pipeline.ProcessorInfo(randomAlphaOfLengthBetween(3, 10))); + } + searchPipelinesInfo = new SearchPipelinesInfo(processors); + } + return new NodeInfo( VersionUtils.randomVersion(random()), build, @@ -256,7 +268,8 @@ private static NodeInfo createNodeInfo() { pluginsAndModules, ingestInfo, aggregationInfo, - indexingBuffer + indexingBuffer, + searchPipelinesInfo ); } } diff --git a/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java b/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java new file mode 100644 index 0000000000000..c1074f3f41fc1 --- /dev/null +++ b/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java @@ -0,0 +1,620 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.pipeline; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.lucene.search.TotalHits; +import org.junit.Before; +import org.opensearch.OpenSearchParseException; +import org.opensearch.ResourceNotFoundException; +import org.opensearch.Version; +import org.opensearch.action.search.DeleteSearchPipelineRequest; +import org.opensearch.action.search.PutSearchPipelineRequest; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.search.SearchResponseSections; +import org.opensearch.client.Client; +import org.opensearch.cluster.ClusterChangedEvent; +import org.opensearch.cluster.ClusterName; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.bytes.BytesArray; +import org.opensearch.common.util.concurrent.OpenSearchExecutors; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.plugins.SearchPipelinesPlugin; +import org.opensearch.search.SearchHit; +import org.opensearch.search.SearchHits; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.test.MockLogAppender; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.threadpool.ThreadPool; + +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutorService; +import java.util.function.Consumer; + +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class SearchPipelineServiceTests extends OpenSearchTestCase { + private static final SearchPipelinesPlugin DUMMY_PLUGIN = new SearchPipelinesPlugin() { + @Override + public Map getProcessors(Processor.Parameters parameters) { + return Map.of("foo", (factories, tag, description, config) -> null); + } + }; + + private ThreadPool threadPool; + + @Before + public void setup() { + threadPool = mock(ThreadPool.class); + ExecutorService executorService = OpenSearchExecutors.newDirectExecutorService(); + when(threadPool.generic()).thenReturn(executorService); + when(threadPool.executor(anyString())).thenReturn(executorService); + } + + public void testSearchPipelinePlugin() { + Client client = mock(Client.class); + SearchPipelineService searchPipelineService = new SearchPipelineService( + mock(ClusterService.class), + threadPool, + null, + null, + null, + this.xContentRegistry(), + List.of(DUMMY_PLUGIN), + client + ); + Map factories = searchPipelineService.getProcessorFactories(); + assertEquals(1, factories.size()); + assertTrue(factories.containsKey("foo")); + } + + public void testSearchPipelinePluginDuplicate() { + Client client = mock(Client.class); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new SearchPipelineService( + mock(ClusterService.class), + threadPool, + null, + null, + null, + this.xContentRegistry(), + List.of(DUMMY_PLUGIN, DUMMY_PLUGIN), + client + ) + ); + assertTrue(e.getMessage(), e.getMessage().contains(" already registered")); + } + + public void testExecuteSearchPipelineDoesNotExist() { + Client client = mock(Client.class); + SearchPipelineService searchPipelineService = new SearchPipelineService( + mock(ClusterService.class), + threadPool, + null, + null, + null, + this.xContentRegistry(), + List.of(DUMMY_PLUGIN), + client + ); + final SearchRequest searchRequest = new SearchRequest("_index").pipeline("bar"); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> searchPipelineService.transformRequest(searchRequest) + ); + assertTrue(e.getMessage(), e.getMessage().contains(" not defined")); + } + + private static abstract class FakeProcessor implements Processor { + private final String type; + private final String tag; + private final String description; + + protected FakeProcessor(String type, String tag, String description) { + this.type = type; + this.tag = tag; + this.description = description; + } + + @Override + public String getType() { + return type; + } + + @Override + public String getTag() { + return tag; + } + + @Override + public String getDescription() { + return description; + } + } + + private static class FakeRequestProcessor extends FakeProcessor implements SearchRequestProcessor { + private final Consumer executor; + + public FakeRequestProcessor(String type, String tag, String description, Consumer executor) { + super(type, tag, description); + this.executor = executor; + } + + @Override + public SearchRequest processRequest(SearchRequest request) throws Exception { + executor.accept(request); + return request; + } + } + + private static class FakeResponseProcessor extends FakeProcessor implements SearchResponseProcessor { + private final Consumer executor; + + public FakeResponseProcessor(String type, String tag, String description, Consumer executor) { + super(type, tag, description); + this.executor = executor; + } + + @Override + public SearchResponse processResponse(SearchRequest request, SearchResponse response) throws Exception { + executor.accept(response); + return response; + } + } + + private static SearchPipelineService createWithProcessors() { + Map processors = new HashMap<>(); + processors.put("scale_request_size", (processorFactories, tag, description, config) -> { + float scale = ((Number) config.remove("scale")).floatValue(); + return new FakeRequestProcessor( + "scale_request_size", + tag, + description, + req -> req.source().size((int) (req.source().size() * scale)) + ); + }); + processors.put("fixed_score", (processorFactories, tag, description, config) -> { + float score = ((Number) config.remove("score")).floatValue(); + return new FakeResponseProcessor("fixed_score", tag, description, rsp -> rsp.getHits().forEach(h -> h.score(score))); + }); + return createWithProcessors(processors); + } + + private static SearchPipelineService createWithProcessors(Map processors) { + Client client = mock(Client.class); + ThreadPool threadPool = mock(ThreadPool.class); + ExecutorService executorService = OpenSearchExecutors.newDirectExecutorService(); + when(threadPool.generic()).thenReturn(executorService); + when(threadPool.executor(anyString())).thenReturn(executorService); + return new SearchPipelineService( + mock(ClusterService.class), + threadPool, + null, + null, + null, + null, + Collections.singletonList(new SearchPipelinesPlugin() { + @Override + public Map getProcessors(Processor.Parameters parameters) { + return processors; + } + }), + client + ); + } + + public void testUpdatePipelines() { + SearchPipelineService searchPipelineService = createWithProcessors(); + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); + ClusterState previousClusterState = clusterState; + searchPipelineService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); + assertEquals(0, searchPipelineService.getPipelines().size()); + + PipelineConfiguration pipeline = new PipelineConfiguration( + "_id", + new BytesArray( + "{ " + + "\"request_processors\" : [ { \"scale_request_size\": { \"scale\" : 2 } } ], " + + "\"response_processors\" : [ { \"fixed_score\" : { \"score\" : 1.0 } } ]" + + "}" + ), + XContentType.JSON + ); + SearchPipelineMetadata pipelineMetadata = new SearchPipelineMetadata(Map.of("_id", pipeline)); + clusterState = ClusterState.builder(clusterState) + .metadata(Metadata.builder().putCustom(SearchPipelineMetadata.TYPE, pipelineMetadata).build()) + .build(); + searchPipelineService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); + assertEquals(1, searchPipelineService.getPipelines().size()); + assertEquals("_id", searchPipelineService.getPipelines().get("_id").pipeline.getId()); + assertNull(searchPipelineService.getPipelines().get("_id").pipeline.getDescription()); + assertEquals(1, searchPipelineService.getPipelines().get("_id").pipeline.getSearchRequestProcessors().size()); + assertEquals( + "scale_request_size", + searchPipelineService.getPipelines().get("_id").pipeline.getSearchRequestProcessors().get(0).getType() + ); + assertEquals(1, searchPipelineService.getPipelines().get("_id").pipeline.getSearchResponseProcessors().size()); + assertEquals( + "fixed_score", + searchPipelineService.getPipelines().get("_id").pipeline.getSearchResponseProcessors().get(0).getType() + ); + } + + public void testPutPipeline() { + SearchPipelineService searchPipelineService = createWithProcessors(); + String id = "_id"; + SearchPipelineService.PipelineHolder pipeline = searchPipelineService.getPipelines().get(id); + assertNull(pipeline); + + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); + + PutSearchPipelineRequest putRequest = new PutSearchPipelineRequest(id, new BytesArray("{}"), XContentType.JSON); + ClusterState previousClusterState = clusterState; + clusterState = SearchPipelineService.innerPut(putRequest, clusterState); + searchPipelineService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); + pipeline = searchPipelineService.getPipelines().get(id); + assertNotNull(pipeline); + assertEquals(id, pipeline.pipeline.getId()); + assertNull(pipeline.pipeline.getDescription()); + assertEquals(0, pipeline.pipeline.getSearchRequestProcessors().size()); + assertEquals(0, pipeline.pipeline.getSearchResponseProcessors().size()); + + // Overwrite pipeline + putRequest = new PutSearchPipelineRequest(id, new BytesArray("{ \"description\": \"empty pipeline\"}"), XContentType.JSON); + previousClusterState = clusterState; + clusterState = SearchPipelineService.innerPut(putRequest, clusterState); + searchPipelineService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); + pipeline = searchPipelineService.getPipelines().get(id); + assertNotNull(pipeline); + assertEquals(id, pipeline.pipeline.getId()); + assertEquals("empty pipeline", pipeline.pipeline.getDescription()); + assertEquals(0, pipeline.pipeline.getSearchRequestProcessors().size()); + assertEquals(0, pipeline.pipeline.getSearchResponseProcessors().size()); + } + + public void testPutInvalidPipeline() throws IllegalAccessException { + SearchPipelineService searchPipelineService = createWithProcessors(); + String id = "_id"; + + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); + ClusterState previousState = clusterState; + + PutSearchPipelineRequest putRequest = new PutSearchPipelineRequest( + id, + new BytesArray("{\"request_processors\" : [ { \"scale_request_size\": { \"scale\" : \"foo\" } } ] }"), + XContentType.JSON + ); + clusterState = SearchPipelineService.innerPut(putRequest, clusterState); + try (MockLogAppender mockAppender = MockLogAppender.createForLoggers(LogManager.getLogger(SearchPipelineService.class))) { + mockAppender.addExpectation( + new MockLogAppender.SeenEventExpectation( + "test1", + SearchPipelineService.class.getCanonicalName(), + Level.WARN, + "failed to update search pipelines" + ) + ); + searchPipelineService.applyClusterState(new ClusterChangedEvent("", clusterState, previousState)); + mockAppender.assertAllExpectationsMatched(); + } + assertEquals(0, searchPipelineService.getPipelines().size()); + } + + public void testDeletePipeline() { + SearchPipelineService searchPipelineService = createWithProcessors(); + PipelineConfiguration config = new PipelineConfiguration( + "_id", + new BytesArray("{\"request_processors\" : [ { \"scale_request_size\": { \"scale\" : 2 } } ] }"), + XContentType.JSON + ); + SearchPipelineMetadata searchPipelineMetadata = new SearchPipelineMetadata(Map.of("_id", config)); + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); + ClusterState previousState = clusterState; + clusterState = ClusterState.builder(clusterState) + .metadata(Metadata.builder().putCustom(SearchPipelineMetadata.TYPE, searchPipelineMetadata).build()) + .build(); + searchPipelineService.applyClusterState(new ClusterChangedEvent("", clusterState, previousState)); + assertEquals(1, searchPipelineService.getPipelines().size()); + + // Delete pipeline: + DeleteSearchPipelineRequest deleteRequest = new DeleteSearchPipelineRequest("_id"); + previousState = clusterState; + clusterState = SearchPipelineService.innerDelete(deleteRequest, clusterState); + searchPipelineService.applyClusterState(new ClusterChangedEvent("", clusterState, previousState)); + assertEquals(0, searchPipelineService.getPipelines().size()); + + final ClusterState finalClusterState = clusterState; + // Delete missing pipeline + ResourceNotFoundException e = expectThrows( + ResourceNotFoundException.class, + () -> SearchPipelineService.innerDelete(deleteRequest, finalClusterState) + ); + assertEquals("pipeline [_id] is missing", e.getMessage()); + } + + public void testDeletePipelinesWithWildcard() { + SearchPipelineService searchPipelineService = createWithProcessors(); + BytesArray definition = new BytesArray("{\"request_processors\" : [ { \"scale_request_size\": { \"scale\" : 2 } } ] }"); + SearchPipelineMetadata metadata = new SearchPipelineMetadata( + Map.of( + "p1", + new PipelineConfiguration("p1", definition, XContentType.JSON), + "p2", + new PipelineConfiguration("p2", definition, XContentType.JSON), + "q1", + new PipelineConfiguration("q1", definition, XContentType.JSON) + ) + ); + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); + ClusterState previousState = clusterState; + clusterState = ClusterState.builder(clusterState) + .metadata(Metadata.builder().putCustom(SearchPipelineMetadata.TYPE, metadata)) + .build(); + searchPipelineService.applyClusterState(new ClusterChangedEvent("", clusterState, previousState)); + assertNotNull(searchPipelineService.getPipelines().get("p1")); + assertNotNull(searchPipelineService.getPipelines().get("p2")); + assertNotNull(searchPipelineService.getPipelines().get("q1")); + + // Delete all pipelines starting with "p" + DeleteSearchPipelineRequest deleteRequest = new DeleteSearchPipelineRequest("p*"); + previousState = clusterState; + clusterState = SearchPipelineService.innerDelete(deleteRequest, clusterState); + searchPipelineService.applyClusterState(new ClusterChangedEvent("", clusterState, previousState)); + assertEquals(1, searchPipelineService.getPipelines().size()); + assertNotNull(searchPipelineService.getPipelines().get("q1")); + + // Prefix wildcard fails if no matches + final ClusterState finalClusterState = clusterState; + ResourceNotFoundException e = expectThrows( + ResourceNotFoundException.class, + () -> SearchPipelineService.innerDelete(deleteRequest, finalClusterState) + ); + assertEquals("pipeline [p*] is missing", e.getMessage()); + + // Delete all removes remaining pipeline + DeleteSearchPipelineRequest deleteAllRequest = new DeleteSearchPipelineRequest("*"); + previousState = clusterState; + clusterState = SearchPipelineService.innerDelete(deleteAllRequest, clusterState); + searchPipelineService.applyClusterState(new ClusterChangedEvent("", clusterState, previousState)); + assertEquals(0, searchPipelineService.getPipelines().size()); + + // Delete all does not throw exception if no pipelines + SearchPipelineService.innerDelete(deleteAllRequest, clusterState); + } + + public void testTransformRequest() throws Exception { + SearchPipelineService searchPipelineService = createWithProcessors(); + + SearchPipelineMetadata metadata = new SearchPipelineMetadata( + Map.of( + "p1", + new PipelineConfiguration( + "p1", + new BytesArray("{\"request_processors\" : [ { \"scale_request_size\": { \"scale\" : 2 } } ] }"), + XContentType.JSON + ) + ) + ); + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); + ClusterState previousState = clusterState; + clusterState = ClusterState.builder(clusterState) + .metadata(Metadata.builder().putCustom(SearchPipelineMetadata.TYPE, metadata)) + .build(); + searchPipelineService.applyClusterState(new ClusterChangedEvent("", clusterState, previousState)); + + int size = 10; + SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().size(size); + SearchRequest request = new SearchRequest("_index").source(sourceBuilder).pipeline("p1"); + + SearchRequest transformedRequest = searchPipelineService.transformRequest(request); + + assertEquals(2 * size, transformedRequest.source().size()); + assertEquals(size, request.source().size()); + + // This request doesn't specify a pipeline, it doesn't get transformed. + request = new SearchRequest("_index").source(sourceBuilder); + SearchRequest notTransformedRequest = searchPipelineService.transformRequest(request); + assertEquals(size, notTransformedRequest.source().size()); + assertSame(request, notTransformedRequest); + } + + public void testTransformResponse() throws Exception { + SearchPipelineService searchPipelineService = createWithProcessors(); + + SearchPipelineMetadata metadata = new SearchPipelineMetadata( + Map.of( + "p1", + new PipelineConfiguration( + "p1", + new BytesArray("{\"response_processors\" : [ { \"fixed_score\": { \"score\" : 2 } } ] }"), + XContentType.JSON + ) + ) + ); + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); + ClusterState previousState = clusterState; + clusterState = ClusterState.builder(clusterState) + .metadata(Metadata.builder().putCustom(SearchPipelineMetadata.TYPE, metadata)) + .build(); + searchPipelineService.applyClusterState(new ClusterChangedEvent("", clusterState, previousState)); + + int size = 10; + SearchHit[] hits = new SearchHit[size]; + for (int i = 0; i < size; i++) { + hits[i] = new SearchHit(i, "doc" + i, Collections.emptyMap(), Collections.emptyMap()); + hits[i].score(i); + } + SearchHits searchHits = new SearchHits(hits, new TotalHits(size * 2, TotalHits.Relation.EQUAL_TO), size); + SearchResponseSections searchResponseSections = new SearchResponseSections(searchHits, null, null, false, false, null, 0); + SearchResponse searchResponse = new SearchResponse(searchResponseSections, null, 1, 1, 0, 10, null, null); + + // First try without specifying a pipeline, which should be a no-op. + SearchRequest searchRequest = new SearchRequest(); + SearchResponse notTransformedResponse = searchPipelineService.transformResponse(searchRequest, searchResponse); + assertSame(searchResponse, notTransformedResponse); + + // Now apply a pipeline + searchRequest = new SearchRequest().pipeline("p1"); + SearchResponse transformedResponse = searchPipelineService.transformResponse(searchRequest, searchResponse); + assertEquals(size, transformedResponse.getHits().getHits().length); + for (int i = 0; i < size; i++) { + assertEquals(2.0, transformedResponse.getHits().getHits()[i].getScore(), 0.0001f); + } + + expectThrows( + IllegalArgumentException.class, + () -> searchPipelineService.transformResponse(new SearchRequest().pipeline("p2"), searchResponse) + ); + } + + public void testGetPipelines() { + // + assertEquals(0, SearchPipelineService.innerGetPipelines(null, "p1").size()); + + SearchPipelineMetadata metadata = new SearchPipelineMetadata( + Map.of( + "p1", + new PipelineConfiguration( + "p1", + new BytesArray("{\"request_processors\" : [ { \"scale_request_size\": { \"scale\" : 2 } } ] }"), + XContentType.JSON + ), + "p2", + new PipelineConfiguration( + "p2", + new BytesArray("{\"response_processors\" : [ { \"fixed_score\": { \"score\" : 2 } } ] }"), + XContentType.JSON + ) + ) + ); + + // Return all when no ids specified + List pipelines = SearchPipelineService.innerGetPipelines(metadata); + assertEquals(2, pipelines.size()); + pipelines.sort(Comparator.comparing(PipelineConfiguration::getId)); + assertEquals("p1", pipelines.get(0).getId()); + assertEquals("p2", pipelines.get(1).getId()); + + // Get specific pipeline + pipelines = SearchPipelineService.innerGetPipelines(metadata, "p1"); + assertEquals(1, pipelines.size()); + assertEquals("p1", pipelines.get(0).getId()); + + // Get both pipelines explicitly + pipelines = SearchPipelineService.innerGetPipelines(metadata, "p1", "p2"); + assertEquals(2, pipelines.size()); + pipelines.sort(Comparator.comparing(PipelineConfiguration::getId)); + assertEquals("p1", pipelines.get(0).getId()); + assertEquals("p2", pipelines.get(1).getId()); + + // Match all + pipelines = SearchPipelineService.innerGetPipelines(metadata, "*"); + assertEquals(2, pipelines.size()); + pipelines.sort(Comparator.comparing(PipelineConfiguration::getId)); + assertEquals("p1", pipelines.get(0).getId()); + assertEquals("p2", pipelines.get(1).getId()); + + // Match prefix + pipelines = SearchPipelineService.innerGetPipelines(metadata, "p*"); + assertEquals(2, pipelines.size()); + pipelines.sort(Comparator.comparing(PipelineConfiguration::getId)); + assertEquals("p1", pipelines.get(0).getId()); + assertEquals("p2", pipelines.get(1).getId()); + } + + public void testValidatePipeline() throws Exception { + SearchPipelineService searchPipelineService = createWithProcessors(); + + ProcessorInfo reqProcessor = new ProcessorInfo("scale_request_size"); + ProcessorInfo rspProcessor = new ProcessorInfo("fixed_score"); + DiscoveryNode n1 = new DiscoveryNode("n1", buildNewFakeTransportAddress(), Version.CURRENT); + DiscoveryNode n2 = new DiscoveryNode("n2", buildNewFakeTransportAddress(), Version.CURRENT); + PutSearchPipelineRequest putRequest = new PutSearchPipelineRequest( + "p1", + new BytesArray( + "{" + + "\"request_processors\": [{ \"scale_request_size\": { \"scale\" : 2 } }]," + + "\"response_processors\": [{ \"fixed_score\": { \"score\" : 2 } }]" + + "}" + ), + XContentType.JSON + ); + + // One node is missing a processor + expectThrows( + OpenSearchParseException.class, + () -> searchPipelineService.validatePipeline( + Map.of( + n1, + new SearchPipelinesInfo(List.of(reqProcessor, rspProcessor)), + n2, + new SearchPipelinesInfo(List.of(reqProcessor)) + ), + putRequest + ) + ); + + // Discovery failed, no infos passed. + expectThrows(IllegalStateException.class, () -> searchPipelineService.validatePipeline(Collections.emptyMap(), putRequest)); + + // Invalid configuration in request + PutSearchPipelineRequest badPutRequest = new PutSearchPipelineRequest( + "p1", + new BytesArray( + "{" + + "\"request_processors\": [{ \"scale_request_size\": { \"scale\" : \"banana\" } }]," + + "\"response_processors\": [{ \"fixed_score\": { \"score\" : 2 } }]" + + "}" + ), + XContentType.JSON + ); + expectThrows( + ClassCastException.class, + () -> searchPipelineService.validatePipeline( + Map.of( + n1, + new SearchPipelinesInfo(List.of(reqProcessor, rspProcessor)), + n2, + new SearchPipelinesInfo(List.of(reqProcessor, rspProcessor)) + ), + badPutRequest + ) + ); + + // Success + searchPipelineService.validatePipeline( + Map.of( + n1, + new SearchPipelinesInfo(List.of(reqProcessor, rspProcessor)), + n2, + new SearchPipelinesInfo(List.of(reqProcessor, rspProcessor)) + ), + putRequest + ); + } + + public void testInfo() { + SearchPipelineService searchPipelineService = createWithProcessors(); + SearchPipelinesInfo info = searchPipelineService.info(); + assertTrue(info.containsProcessor("scale_request_size")); + assertTrue(info.containsProcessor("fixed_score")); + } +} diff --git a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java index 33b7e74ad7b51..cc760b38a82f7 100644 --- a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java @@ -204,6 +204,7 @@ import org.opensearch.search.SearchService; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.fetch.FetchPhase; +import org.opensearch.search.pipeline.SearchPipelineService; import org.opensearch.search.query.QueryPhase; import org.opensearch.snapshots.mockstore.MockEventuallyConsistentRepository; import org.opensearch.tasks.TaskResourceTrackingService; @@ -2077,7 +2078,17 @@ public void onFailure(final Exception e) { clusterService, actionFilters, indexNameExpressionResolver, - namedWriteableRegistry + namedWriteableRegistry, + new SearchPipelineService( + clusterService, + threadPool, + environment, + scriptService, + new AnalysisModule(environment, Collections.emptyList()).getAnalysisRegistry(), + namedXContentRegistry, + List.of(), + client + ) ) ); actions.put( From 425120b953b195aac28258d25b25b6e07826bcd5 Mon Sep 17 00:00:00 2001 From: Michael Froh Date: Fri, 10 Mar 2023 22:40:27 +0000 Subject: [PATCH 02/14] Incorporate feedback from @reta and @navneet1v 1. SearchPipelinesClient: JavaDoc fix 2. SearchRequest: Check versions when (de)serializing new "pipeline" property. 3. Rename SearchPipelinesPlugin -> SearchPipelinePlugin. 4. Pipeline: Change visibility to package private 5. SearchPipelineProcessingException: New exception type to wrap exceptions thrown when executing a pipeline. Bonus: Added an integration test for filter_query request processor. Signed-off-by: Michael Froh --- .../client/SearchPipelinesClient.java | 4 +- .../client/SearchPipelinesClientIT.java | 3 +- modules/search-pipeline-common/build.gradle | 2 +- .../SearchPipelineCommonModulePlugin.java | 4 +- .../test/search_pipeline/30_filter_query.yml | 60 +++++++++++++++++++ .../resources/rest-api-spec/api/search.json | 4 ++ .../action/search/SearchRequest.java | 8 ++- .../main/java/org/opensearch/node/Node.java | 4 +- ...sPlugin.java => SearchPipelinePlugin.java} | 2 +- .../opensearch/search/pipeline/Pipeline.java | 31 +++++++--- .../opensearch/search/pipeline/Processor.java | 3 +- .../SearchPipelineProcessingException.java | 31 ++++++++++ .../pipeline/SearchPipelineService.java | 23 +++---- .../pipeline/SearchPipelineServiceTests.java | 6 +- 14 files changed, 146 insertions(+), 39 deletions(-) create mode 100644 modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/30_filter_query.yml rename server/src/main/java/org/opensearch/plugins/{SearchPipelinesPlugin.java => SearchPipelinePlugin.java} (96%) create mode 100644 server/src/main/java/org/opensearch/search/pipeline/SearchPipelineProcessingException.java diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelinesClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelinesClient.java index 36a390a805f4b..d6ec367194c99 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelinesClient.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelinesClient.java @@ -69,7 +69,7 @@ public Cancellable putPipelineAsync( } /** - * Get an existing pipeline. + * Get existing pipelines. * * @param request the request * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized @@ -87,7 +87,7 @@ public GetSearchPipelineResponse getPipeline(GetSearchPipelineRequest request, R } /** - * Asynchronously get an existing pipeline. + * Asynchronously get existing pipelines. * * @param request the request * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/SearchPipelinesClientIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/SearchPipelinesClientIT.java index 6f01cb5144eba..383d759c7107f 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/SearchPipelinesClientIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/SearchPipelinesClientIT.java @@ -16,7 +16,6 @@ import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.search.pipeline.Pipeline; import java.io.IOException; @@ -92,7 +91,7 @@ private static XContentBuilder buildSearchPipeline(XContentBuilder builder) thro builder.startObject(); { builder.field("description", "a pipeline description"); - builder.startArray(Pipeline.REQUEST_PROCESSORS_KEY); + builder.startArray("request_processors"); { builder.startObject().startObject("filter_query"); { diff --git a/modules/search-pipeline-common/build.gradle b/modules/search-pipeline-common/build.gradle index dd37384923930..73e2871ce46b6 100644 --- a/modules/search-pipeline-common/build.gradle +++ b/modules/search-pipeline-common/build.gradle @@ -44,7 +44,7 @@ dependencies { restResources { restApi { - includeCore '_common', 'search_pipelines', 'cluster', 'indices', 'index', 'nodes', 'get', 'update', 'cat', 'mget' + includeCore '_common', 'search_pipelines', 'cluster', 'indices', 'index', 'nodes', 'get', 'update', 'cat', 'mget', 'search' } } diff --git a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePlugin.java b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePlugin.java index fc8f68cf3db70..caca753caf819 100644 --- a/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePlugin.java +++ b/modules/search-pipeline-common/src/main/java/org/opensearch/search/pipeline/common/SearchPipelineCommonModulePlugin.java @@ -9,7 +9,7 @@ package org.opensearch.search.pipeline.common; import org.opensearch.plugins.Plugin; -import org.opensearch.plugins.SearchPipelinesPlugin; +import org.opensearch.plugins.SearchPipelinePlugin; import org.opensearch.search.pipeline.Processor; import java.util.Map; @@ -17,7 +17,7 @@ /** * Plugin providing common search request/response processors for use in search pipelines. */ -public class SearchPipelineCommonModulePlugin extends Plugin implements SearchPipelinesPlugin { +public class SearchPipelineCommonModulePlugin extends Plugin implements SearchPipelinePlugin { /** * No constructor needed, but build complains if we don't have a constructor with JavaDoc. diff --git a/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/30_filter_query.yml b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/30_filter_query.yml new file mode 100644 index 0000000000000..3e84c36338996 --- /dev/null +++ b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/30_filter_query.yml @@ -0,0 +1,60 @@ +--- +teardown: + - do: + search_pipelines.delete_pipeline: + id: "my_pipeline" + ignore: 404 + +--- +"Test filter_query processor": + - do: + search_pipelines.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "request_processors": [ + { + "filter_query" : { + "query" : { + "term" : { + "field" : "value" + } + } + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + body: { + "field" : "foo" + } + - do: + index: + index: test + id: 2 + body: { + "field": "value" + } + + - do: + indices.refresh: + index: test + + - do: + search: + body: { } + - match: { hits.total.value: 2 } + + - do: + search: + index: test + search_pipeline: "my_pipeline" + body: { } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "2" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json index ac321acf8907b..e0fbeeb83ffc4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json @@ -225,6 +225,10 @@ "type":"boolean", "description":"Indicates whether hits.total should be rendered as an integer or an object in the rest search response", "default":false + }, + "search_pipeline": { + "type": "string", + "description": "The search pipeline to use to execute this request" } }, "body":{ diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequest.java b/server/src/main/java/org/opensearch/action/search/SearchRequest.java index 40a59d81d6d69..73e8b56c0db9d 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequest.java @@ -250,7 +250,9 @@ public SearchRequest(StreamInput in) throws IOException { } ccsMinimizeRoundtrips = in.readBoolean(); cancelAfterTimeInterval = in.readOptionalTimeValue(); - pipeline = in.readOptionalString(); + if (in.getVersion().onOrAfter(Version.V_3_0_0)) { // TODO: Update if/when we backport to 2.x + pipeline = in.readOptionalString(); + } } @Override @@ -279,7 +281,9 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeBoolean(ccsMinimizeRoundtrips); out.writeOptionalTimeValue(cancelAfterTimeInterval); - out.writeOptionalString(pipeline); + if (out.getVersion().onOrAfter(Version.V_3_0_0)) { // TODO: Update if/when we backport to 2.x + out.writeOptionalString(pipeline); + } } @Override diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index 9e2df65d984c5..60f0bb1ad606c 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -46,7 +46,7 @@ import org.opensearch.indices.replication.SegmentReplicationSourceService; import org.opensearch.extensions.ExtensionsManager; import org.opensearch.extensions.NoopExtensionsManager; -import org.opensearch.plugins.SearchPipelinesPlugin; +import org.opensearch.plugins.SearchPipelinePlugin; import org.opensearch.search.backpressure.SearchBackpressureService; import org.opensearch.search.backpressure.settings.SearchBackpressureSettings; import org.opensearch.search.pipeline.SearchPipelineService; @@ -960,7 +960,7 @@ protected Node( scriptService, analysisModule.getAnalysisRegistry(), xContentRegistry, - pluginsService.filterPlugins(SearchPipelinesPlugin.class), + pluginsService.filterPlugins(SearchPipelinePlugin.class), client ); this.nodeService = new NodeService( diff --git a/server/src/main/java/org/opensearch/plugins/SearchPipelinesPlugin.java b/server/src/main/java/org/opensearch/plugins/SearchPipelinePlugin.java similarity index 96% rename from server/src/main/java/org/opensearch/plugins/SearchPipelinesPlugin.java rename to server/src/main/java/org/opensearch/plugins/SearchPipelinePlugin.java index 1a43fd57cd72f..8e6fbef6c8b1d 100644 --- a/server/src/main/java/org/opensearch/plugins/SearchPipelinesPlugin.java +++ b/server/src/main/java/org/opensearch/plugins/SearchPipelinePlugin.java @@ -18,7 +18,7 @@ * * @opensearch.api */ -public interface SearchPipelinesPlugin { +public interface SearchPipelinePlugin { /** * Returns additional search pipeline processor types added by this plugin. * diff --git a/server/src/main/java/org/opensearch/search/pipeline/Pipeline.java b/server/src/main/java/org/opensearch/search/pipeline/Pipeline.java index 1b052e4a7a673..a30b90b6e1ece 100644 --- a/server/src/main/java/org/opensearch/search/pipeline/Pipeline.java +++ b/server/src/main/java/org/opensearch/search/pipeline/Pipeline.java @@ -12,6 +12,7 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.Nullable; +import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.ingest.ConfigurationUtils; import java.util.ArrayList; @@ -26,7 +27,7 @@ /** * Concrete representation of a search pipeline, holding multiple processors. */ -public class Pipeline { +class Pipeline { public static final String REQUEST_PROCESSORS_KEY = "request_processors"; public static final String RESPONSE_PROCESSORS_KEY = "response_processors"; @@ -142,17 +143,29 @@ List getSearchResponseProcessors() { return searchResponseProcessors; } - SearchRequest transformRequest(SearchRequest request) throws Exception { - for (SearchRequestProcessor searchRequestProcessor : searchRequestProcessors) { - request = searchRequestProcessor.processRequest(request); + SearchRequest transformRequest(SearchRequest originalRequest) throws SearchPipelineProcessingException { + try { + // Save the original request by deep cloning the existing request. + BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(); + originalRequest.writeTo(bytesStreamOutput); + SearchRequest request = new SearchRequest(bytesStreamOutput.bytes().streamInput()); + for (SearchRequestProcessor searchRequestProcessor : searchRequestProcessors) { + request = searchRequestProcessor.processRequest(request); + } + return request; + } catch (Exception e) { + throw new SearchPipelineProcessingException(e); } - return request; } - SearchResponse transformResponse(SearchRequest request, SearchResponse response) throws Exception { - for (SearchResponseProcessor responseProcessor : searchResponseProcessors) { - response = responseProcessor.processResponse(request, response); + SearchResponse transformResponse(SearchRequest request, SearchResponse response) throws SearchPipelineProcessingException { + try { + for (SearchResponseProcessor responseProcessor : searchResponseProcessors) { + response = responseProcessor.processResponse(request, response); + } + return response; + } catch (Exception e) { + throw new SearchPipelineProcessingException(e); } - return response; } } diff --git a/server/src/main/java/org/opensearch/search/pipeline/Processor.java b/server/src/main/java/org/opensearch/search/pipeline/Processor.java index 245db15a771b9..490c62b12b6a3 100644 --- a/server/src/main/java/org/opensearch/search/pipeline/Processor.java +++ b/server/src/main/java/org/opensearch/search/pipeline/Processor.java @@ -37,6 +37,7 @@ import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; import org.opensearch.index.analysis.AnalysisRegistry; +import org.opensearch.plugins.SearchPipelinePlugin; import org.opensearch.script.ScriptService; import org.opensearch.threadpool.Scheduler; @@ -95,7 +96,7 @@ Processor create(Map processorFactories, String tag, String des /** * Infrastructure class that holds services that can be used by processor factories to create processor instances - * and that gets passed around to all {@link org.opensearch.plugins.SearchPipelinesPlugin}s. + * and that gets passed around to all {@link SearchPipelinePlugin}s. */ class Parameters { diff --git a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineProcessingException.java b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineProcessingException.java new file mode 100644 index 0000000000000..a8fd0a8e42750 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineProcessingException.java @@ -0,0 +1,31 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.pipeline; + +import org.opensearch.OpenSearchException; +import org.opensearch.OpenSearchWrapperException; +import org.opensearch.common.io.stream.StreamInput; + +import java.io.IOException; + +/** + * A dedicated wrapper for exceptions encountered executing a search pipeline processor. The wrapper is needed as we + * currently only unwrap causes for instances of {@link OpenSearchWrapperException}. + * + * @opensearch.internal + */ +public class SearchPipelineProcessingException extends OpenSearchException implements OpenSearchWrapperException { + SearchPipelineProcessingException(Exception cause) { + super(cause); + } + + public SearchPipelineProcessingException(StreamInput in) throws IOException { + super(in); + } +} diff --git a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java index 74d901085b7aa..03d48106bef29 100644 --- a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java +++ b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java @@ -29,7 +29,6 @@ import org.opensearch.cluster.service.ClusterManagerTaskKeys; import org.opensearch.cluster.service.ClusterManagerTaskThrottler; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.regex.Regex; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentHelper; @@ -39,7 +38,7 @@ import org.opensearch.index.analysis.AnalysisRegistry; import org.opensearch.ingest.ConfigurationUtils; import org.opensearch.node.ReportingService; -import org.opensearch.plugins.SearchPipelinesPlugin; +import org.opensearch.plugins.SearchPipelinePlugin; import org.opensearch.script.ScriptService; import org.opensearch.threadpool.ThreadPool; @@ -79,14 +78,14 @@ public SearchPipelineService( ScriptService scriptService, AnalysisRegistry analysisRegistry, NamedXContentRegistry namedXContentRegistry, - List searchPipelinesPlugins, + List searchPipelinePlugins, Client client ) { this.clusterService = clusterService; this.scriptService = scriptService; this.threadPool = threadPool; this.processorFactories = processorFactories( - searchPipelinesPlugins, + searchPipelinePlugins, new Processor.Parameters( env, scriptService, @@ -105,12 +104,12 @@ public SearchPipelineService( } private static Map processorFactories( - List searchPipelinesPlugins, + List searchPipelinePlugins, Processor.Parameters parameters ) { Map processorFactories = new HashMap<>(); - for (SearchPipelinesPlugin searchPipelinesPlugin : searchPipelinesPlugins) { - Map newProcessors = searchPipelinesPlugin.getProcessors(parameters); + for (SearchPipelinePlugin searchPipelinePlugin : searchPipelinePlugins) { + Map newProcessors = searchPipelinePlugin.getProcessors(parameters); for (Map.Entry entry : newProcessors.entrySet()) { if (processorFactories.put(entry.getKey(), entry.getValue()) != null) { throw new IllegalArgumentException("Ingest processor [" + entry.getKey() + "] is already registered"); @@ -316,23 +315,19 @@ static ClusterState innerDelete(DeleteSearchPipelineRequest request, ClusterStat return newState.build(); } - public SearchRequest transformRequest(SearchRequest searchRequest) throws Exception { + public SearchRequest transformRequest(SearchRequest searchRequest) { String pipelineId = searchRequest.pipeline(); if (pipelineId != null) { PipelineHolder pipeline = pipelines.get(pipelineId); if (pipeline == null) { throw new IllegalArgumentException("Pipeline " + pipelineId + " is not defined"); } - // Save the original request by deep cloning the existing request. - BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(); - searchRequest.writeTo(bytesStreamOutput); - SearchRequest clonedRequest = new SearchRequest(bytesStreamOutput.bytes().streamInput()); - return pipeline.pipeline.transformRequest(clonedRequest); + return pipeline.pipeline.transformRequest(searchRequest); } return searchRequest; } - public SearchResponse transformResponse(SearchRequest request, SearchResponse searchResponse) throws Exception { + public SearchResponse transformResponse(SearchRequest request, SearchResponse searchResponse) { String pipelineId = request.pipeline(); if (pipelineId != null) { PipelineHolder pipeline = pipelines.get(pipelineId); diff --git a/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java b/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java index c1074f3f41fc1..379d3237ee382 100644 --- a/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java +++ b/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java @@ -30,7 +30,7 @@ import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.plugins.SearchPipelinesPlugin; +import org.opensearch.plugins.SearchPipelinePlugin; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; import org.opensearch.search.builder.SearchSourceBuilder; @@ -51,7 +51,7 @@ import static org.mockito.Mockito.when; public class SearchPipelineServiceTests extends OpenSearchTestCase { - private static final SearchPipelinesPlugin DUMMY_PLUGIN = new SearchPipelinesPlugin() { + private static final SearchPipelinePlugin DUMMY_PLUGIN = new SearchPipelinePlugin() { @Override public Map getProcessors(Processor.Parameters parameters) { return Map.of("foo", (factories, tag, description, config) -> null); @@ -211,7 +211,7 @@ private static SearchPipelineService createWithProcessors(Map getProcessors(Processor.Parameters parameters) { return processors; From bb1da6b70dc57378ebb2e9553e7084059a8820bd Mon Sep 17 00:00:00 2001 From: Michael Froh Date: Sat, 11 Mar 2023 00:37:53 +0000 Subject: [PATCH 03/14] Register SearchPipelineProcessingException Also added more useful messages to unit tests to explicitly explain what hoops need to be jumped through in order to add a new serializable exception. Signed-off-by: Michael Froh --- .../java/org/opensearch/OpenSearchException.java | 11 ++++++++++- .../org/opensearch/ExceptionSerializationTests.java | 12 +++++++++++- 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/opensearch/OpenSearchException.java b/server/src/main/java/org/opensearch/OpenSearchException.java index 619d7386446e0..68c6d01bcc028 100644 --- a/server/src/main/java/org/opensearch/OpenSearchException.java +++ b/server/src/main/java/org/opensearch/OpenSearchException.java @@ -54,6 +54,7 @@ import org.opensearch.index.shard.ShardId; import org.opensearch.rest.RestStatus; import org.opensearch.search.aggregations.MultiBucketConsumerService; +import org.opensearch.search.pipeline.SearchPipelineProcessingException; import org.opensearch.snapshots.SnapshotInUseDeletionException; import org.opensearch.transport.TcpTransport; @@ -1642,12 +1643,20 @@ private enum OpenSearchExceptionHandle { V_2_6_0 ), NODE_WEIGHED_AWAY_EXCEPTION(NodeWeighedAwayException.class, NodeWeighedAwayException::new, 169, V_2_6_0), + SEARCH_PIPELINE_PROCESSING_EXCEPTION( + SearchPipelineProcessingException.class, + SearchPipelineProcessingException::new, + 170, + V_3_0_0 // TODO: Update if/when we backport to 2.x + ), INDEX_CREATE_BLOCK_EXCEPTION( org.opensearch.cluster.block.IndexCreateBlockException.class, org.opensearch.cluster.block.IndexCreateBlockException::new, CUSTOM_ELASTICSEARCH_EXCEPTIONS_BASE_ID + 1, V_3_0_0 - ); + ), + + ; final Class exceptionClass; final CheckedFunction constructor; diff --git a/server/src/test/java/org/opensearch/ExceptionSerializationTests.java b/server/src/test/java/org/opensearch/ExceptionSerializationTests.java index ccbe31f145976..f8962f84362f1 100644 --- a/server/src/test/java/org/opensearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/opensearch/ExceptionSerializationTests.java @@ -107,6 +107,7 @@ import org.opensearch.search.SearchShardTarget; import org.opensearch.search.aggregations.MultiBucketConsumerService; import org.opensearch.search.internal.ShardSearchContextId; +import org.opensearch.search.pipeline.SearchPipelineProcessingException; import org.opensearch.snapshots.Snapshot; import org.opensearch.snapshots.SnapshotException; import org.opensearch.snapshots.SnapshotId; @@ -250,7 +251,11 @@ public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOEx // Remove the deprecated exception classes from the unregistered list. assertTrue(notRegistered.remove(NotMasterException.class)); assertTrue(notRegistered.remove(MasterNotDiscoveredException.class)); - assertTrue("Classes subclassing OpenSearchException must be registered \n" + notRegistered.toString(), notRegistered.isEmpty()); + assertTrue( + "Classes subclassing OpenSearchException must be registered in OpenSearchException.OpenSearchExceptionHandle \n" + + notRegistered, + notRegistered.isEmpty() + ); assertTrue(registered.removeAll(OpenSearchException.getRegisteredKeys())); // check assertEquals(registered.toString(), 0, registered.size()); } @@ -878,6 +883,7 @@ public void testIds() { ids.put(167, UnsupportedWeightedRoutingStateException.class); ids.put(168, PreferenceBasedSearchNotAllowedException.class); ids.put(169, NodeWeighedAwayException.class); + ids.put(170, SearchPipelineProcessingException.class); ids.put(10001, IndexCreateBlockException.class); Map, Integer> reverse = new HashMap<>(); @@ -889,6 +895,10 @@ public void testIds() { for (final Tuple> tuple : OpenSearchException.classes()) { assertNotNull(tuple.v1()); + assertNotNull( + tuple.v2().getName() + " not found in ExceptionSerializationTests.testIds. Please add it.", + reverse.get(tuple.v2()) + ); assertEquals((int) reverse.get(tuple.v2()), (int) tuple.v1()); } From 6f020ed08af87106362a4fca7daa21ce733c7e68 Mon Sep 17 00:00:00 2001 From: Michael Froh Date: Thu, 16 Mar 2023 23:35:44 +0000 Subject: [PATCH 04/14] Remove unneeded dependencies from search-pipeline-common I had copied some dependencies from ingest-common, but they are not used by search-pipeline-common (yet). Signed-off-by: Michael Froh --- modules/search-pipeline-common/build.gradle | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/modules/search-pipeline-common/build.gradle b/modules/search-pipeline-common/build.gradle index 73e2871ce46b6..b7f3e17a17e67 100644 --- a/modules/search-pipeline-common/build.gradle +++ b/modules/search-pipeline-common/build.gradle @@ -33,25 +33,18 @@ apply plugin: 'opensearch.internal-cluster-test' opensearchplugin { description 'Module for search pipeline processors that do not require additional security permissions or have large dependencies and resources' classname 'org.opensearch.search.pipeline.common.SearchPipelineCommonModulePlugin' - extendedPlugins = ['lang-painless'] } dependencies { - compileOnly project(':modules:lang-painless') - api project(':libs:opensearch-grok') - api project(':libs:opensearch-dissect') } restResources { restApi { - includeCore '_common', 'search_pipelines', 'cluster', 'indices', 'index', 'nodes', 'get', 'update', 'cat', 'mget', 'search' + includeCore '_common', 'search_pipelines', 'cluster', 'indices', 'index', 'nodes', 'search' } } testClusters.all { - // Needed in order to test ingest pipeline templating: - // (this is because the integTest node is not using default distribution, but only the minimal number of required modules) - module ':modules:lang-mustache' } thirdPartyAudit.ignoreMissingClasses( From 03eef0236567e0c0ac9e856a0c36b6e7cd82906c Mon Sep 17 00:00:00 2001 From: Michael Froh Date: Fri, 17 Mar 2023 18:23:30 +0000 Subject: [PATCH 05/14] Avoid cloning SearchRequest if no SearchRequestProcessors Also, add tests to confirm that a pipeline with no processors works fine (as a no-op). Signed-off-by: Michael Froh --- .../search_pipeline/25_empty_pipeline.yml | 47 +++++++++++++++++++ .../test/search_pipeline/30_filter_query.yml | 8 ++++ .../opensearch/search/pipeline/Pipeline.java | 3 ++ 3 files changed, 58 insertions(+) create mode 100644 modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/25_empty_pipeline.yml diff --git a/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/25_empty_pipeline.yml b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/25_empty_pipeline.yml new file mode 100644 index 0000000000000..597dc4be6923f --- /dev/null +++ b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/25_empty_pipeline.yml @@ -0,0 +1,47 @@ +--- +teardown: + - do: + search_pipelines.delete_pipeline: + id: "my_pipeline" + ignore: 404 + +--- +"Test explicitly empty pipeline is no-op": + - do: + search_pipelines.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description" + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + body: { + "field" : "foo" + } + - do: + index: + index: test + id: 2 + body: { + "field": "value" + } + + - do: + indices.refresh: + index: test + + - do: + search: + index: test + - match: { hits.total.value: 2 } + + - do: + search: + index: test + search_pipeline: "my_pipeline" + - match: { hits.total.value: 2 } diff --git a/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/30_filter_query.yml b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/30_filter_query.yml index 3e84c36338996..c869f2fe62b1e 100644 --- a/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/30_filter_query.yml +++ b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/30_filter_query.yml @@ -58,3 +58,11 @@ teardown: body: { } - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "2" } + + # Should also work with no search body specified + - do: + search: + index: test + search_pipeline: "my_pipeline" + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "2" } diff --git a/server/src/main/java/org/opensearch/search/pipeline/Pipeline.java b/server/src/main/java/org/opensearch/search/pipeline/Pipeline.java index a30b90b6e1ece..029bcf5408c36 100644 --- a/server/src/main/java/org/opensearch/search/pipeline/Pipeline.java +++ b/server/src/main/java/org/opensearch/search/pipeline/Pipeline.java @@ -144,6 +144,9 @@ List getSearchResponseProcessors() { } SearchRequest transformRequest(SearchRequest originalRequest) throws SearchPipelineProcessingException { + if (searchRequestProcessors.size() == 0) { + return originalRequest; + } try { // Save the original request by deep cloning the existing request. BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(); From 2019fdf58bb23361519b7a085c87c4ed1a7cbf2e Mon Sep 17 00:00:00 2001 From: Michael Froh Date: Fri, 17 Mar 2023 19:42:51 +0000 Subject: [PATCH 06/14] Use NamedWritableRegistry to deserialize SearchRequest Queries are serialized as NamedWritables, so we need to use a NamedWritableRegistry to deserialize. Signed-off-by: Michael Froh --- .../test/search_pipeline/30_filter_query.yml | 17 +++++++++++- .../main/java/org/opensearch/node/Node.java | 1 + .../opensearch/search/pipeline/Pipeline.java | 20 +++----------- .../pipeline/SearchPipelineService.java | 26 ++++++++++++++++--- .../pipeline/SearchPipelineServiceTests.java | 20 +++++++++++--- .../snapshots/SnapshotResiliencyTests.java | 1 + 6 files changed, 61 insertions(+), 24 deletions(-) diff --git a/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/30_filter_query.yml b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/30_filter_query.yml index c869f2fe62b1e..e82824f2d5734 100644 --- a/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/30_filter_query.yml +++ b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/30_filter_query.yml @@ -32,7 +32,7 @@ teardown: index: test id: 1 body: { - "field" : "foo" + "field": "foo" } - do: index: @@ -66,3 +66,18 @@ teardown: search_pipeline: "my_pipeline" - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "2" } + + # Should also work with existing query + - do: + search: + index: test + search_pipeline: "my_pipeline" + body: { + "query": { + "term": { + "field": "value" + } + } + } + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "2" } diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index 60f0bb1ad606c..fef711dd82064 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -960,6 +960,7 @@ protected Node( scriptService, analysisModule.getAnalysisRegistry(), xContentRegistry, + namedWriteableRegistry, pluginsService.filterPlugins(SearchPipelinePlugin.class), client ); diff --git a/server/src/main/java/org/opensearch/search/pipeline/Pipeline.java b/server/src/main/java/org/opensearch/search/pipeline/Pipeline.java index 029bcf5408c36..dd7e88c86f1e6 100644 --- a/server/src/main/java/org/opensearch/search/pipeline/Pipeline.java +++ b/server/src/main/java/org/opensearch/search/pipeline/Pipeline.java @@ -12,7 +12,6 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.Nullable; -import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.ingest.ConfigurationUtils; import java.util.ArrayList; @@ -143,22 +142,11 @@ List getSearchResponseProcessors() { return searchResponseProcessors; } - SearchRequest transformRequest(SearchRequest originalRequest) throws SearchPipelineProcessingException { - if (searchRequestProcessors.size() == 0) { - return originalRequest; - } - try { - // Save the original request by deep cloning the existing request. - BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(); - originalRequest.writeTo(bytesStreamOutput); - SearchRequest request = new SearchRequest(bytesStreamOutput.bytes().streamInput()); - for (SearchRequestProcessor searchRequestProcessor : searchRequestProcessors) { - request = searchRequestProcessor.processRequest(request); - } - return request; - } catch (Exception e) { - throw new SearchPipelineProcessingException(e); + SearchRequest transformRequest(SearchRequest request) throws Exception { + for (SearchRequestProcessor searchRequestProcessor : searchRequestProcessors) { + request = searchRequestProcessor.processRequest(request); } + return request; } SearchResponse transformResponse(SearchRequest request, SearchResponse response) throws SearchPipelineProcessingException { diff --git a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java index 03d48106bef29..882fa52061eca 100644 --- a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java +++ b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java @@ -29,6 +29,10 @@ import org.opensearch.cluster.service.ClusterManagerTaskKeys; import org.opensearch.cluster.service.ClusterManagerTaskThrottler; import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.opensearch.common.io.stream.NamedWriteableRegistry; +import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.regex.Regex; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentHelper; @@ -69,6 +73,7 @@ public class SearchPipelineService implements ClusterStateApplier, ReportingServ private final List> searchPipelineClusterStateListeners = new CopyOnWriteArrayList<>(); private final ClusterManagerTaskThrottler.ThrottlingKey putPipelineTaskKey; private final ClusterManagerTaskThrottler.ThrottlingKey deletePipelineTaskKey; + private final NamedWriteableRegistry namedWriteableRegistry; private volatile ClusterState state; public SearchPipelineService( @@ -78,12 +83,14 @@ public SearchPipelineService( ScriptService scriptService, AnalysisRegistry analysisRegistry, NamedXContentRegistry namedXContentRegistry, + NamedWriteableRegistry namedWriteableRegistry, List searchPipelinePlugins, Client client ) { this.clusterService = clusterService; this.scriptService = scriptService; this.threadPool = threadPool; + this.namedWriteableRegistry = namedWriteableRegistry; this.processorFactories = processorFactories( searchPipelinePlugins, new Processor.Parameters( @@ -315,16 +322,27 @@ static ClusterState innerDelete(DeleteSearchPipelineRequest request, ClusterStat return newState.build(); } - public SearchRequest transformRequest(SearchRequest searchRequest) { - String pipelineId = searchRequest.pipeline(); + public SearchRequest transformRequest(SearchRequest originalRequest) { + String pipelineId = originalRequest.pipeline(); if (pipelineId != null) { PipelineHolder pipeline = pipelines.get(pipelineId); if (pipeline == null) { throw new IllegalArgumentException("Pipeline " + pipelineId + " is not defined"); } - return pipeline.pipeline.transformRequest(searchRequest); + if (pipeline.pipeline.getSearchRequestProcessors().size() > 0) { + try { + // Save the original request by deep cloning the existing request. + BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(); + originalRequest.writeTo(bytesStreamOutput); + StreamInput input = new NamedWriteableAwareStreamInput(bytesStreamOutput.bytes().streamInput(), namedWriteableRegistry); + SearchRequest request = new SearchRequest(input); + return pipeline.pipeline.transformRequest(request); + } catch (Exception e) { + throw new SearchPipelineProcessingException(e); + } + } } - return searchRequest; + return originalRequest; } public SearchResponse transformResponse(SearchRequest request, SearchResponse searchResponse) { diff --git a/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java b/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java index 379d3237ee382..8faa96ea9f3e1 100644 --- a/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java +++ b/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java @@ -28,11 +28,15 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.bytes.BytesArray; +import org.opensearch.common.io.stream.NamedWriteableRegistry; +import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.plugins.SearchPipelinePlugin; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; +import org.opensearch.search.SearchModule; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.test.MockLogAppender; import org.opensearch.test.OpenSearchTestCase; @@ -77,6 +81,7 @@ public void testSearchPipelinePlugin() { null, null, this.xContentRegistry(), + this.writableRegistry(), List.of(DUMMY_PLUGIN), client ); @@ -96,6 +101,7 @@ public void testSearchPipelinePluginDuplicate() { null, null, this.xContentRegistry(), + this.writableRegistry(), List.of(DUMMY_PLUGIN, DUMMY_PLUGIN), client ) @@ -112,6 +118,7 @@ public void testExecuteSearchPipelineDoesNotExist() { null, null, this.xContentRegistry(), + this.writableRegistry(), List.of(DUMMY_PLUGIN), client ); @@ -180,7 +187,7 @@ public SearchResponse processResponse(SearchRequest request, SearchResponse resp } } - private static SearchPipelineService createWithProcessors() { + private SearchPipelineService createWithProcessors() { Map processors = new HashMap<>(); processors.put("scale_request_size", (processorFactories, tag, description, config) -> { float scale = ((Number) config.remove("scale")).floatValue(); @@ -198,7 +205,13 @@ private static SearchPipelineService createWithProcessors() { return createWithProcessors(processors); } - private static SearchPipelineService createWithProcessors(Map processors) { + @Override + protected NamedWriteableRegistry writableRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); + return new NamedWriteableRegistry(searchModule.getNamedWriteables()); + } + + private SearchPipelineService createWithProcessors(Map processors) { Client client = mock(Client.class); ThreadPool threadPool = mock(ThreadPool.class); ExecutorService executorService = OpenSearchExecutors.newDirectExecutorService(); @@ -211,6 +224,7 @@ private static SearchPipelineService createWithProcessors(Map getProcessors(Processor.Parameters parameters) { @@ -421,7 +435,7 @@ public void testTransformRequest() throws Exception { searchPipelineService.applyClusterState(new ClusterChangedEvent("", clusterState, previousState)); int size = 10; - SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().size(size); + SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(new TermQueryBuilder("foo", "bar")).size(size); SearchRequest request = new SearchRequest("_index").source(sourceBuilder).pipeline("p1"); SearchRequest transformedRequest = searchPipelineService.transformRequest(request); diff --git a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java index 01351833d1c3c..7054e315e5352 100644 --- a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java @@ -2088,6 +2088,7 @@ public void onFailure(final Exception e) { scriptService, new AnalysisModule(environment, Collections.emptyList()).getAnalysisRegistry(), namedXContentRegistry, + namedWriteableRegistry, List.of(), client ) From 7e32d6a59bd9d594a68942951e00e19684d4f180 Mon Sep 17 00:00:00 2001 From: Michael Froh Date: Fri, 17 Mar 2023 22:26:48 +0000 Subject: [PATCH 07/14] Check for empty pipeline with CollectionUtils.isEmpty Signed-off-by: Michael Froh --- .../org/opensearch/search/pipeline/SearchPipelineService.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java index 882fa52061eca..b3fde7147be06 100644 --- a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java +++ b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java @@ -35,6 +35,7 @@ import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.regex.Regex; import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.util.CollectionUtils; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; @@ -329,7 +330,7 @@ public SearchRequest transformRequest(SearchRequest originalRequest) { if (pipeline == null) { throw new IllegalArgumentException("Pipeline " + pipelineId + " is not defined"); } - if (pipeline.pipeline.getSearchRequestProcessors().size() > 0) { + if (CollectionUtils.isEmpty(pipeline.pipeline.getSearchRequestProcessors()) == false) { try { // Save the original request by deep cloning the existing request. BytesStreamOutput bytesStreamOutput = new BytesStreamOutput(); From ba99f9cb1c92de5f6629e0d1a759056126f24ab1 Mon Sep 17 00:00:00 2001 From: msfroh Date: Mon, 20 Mar 2023 09:37:27 -0700 Subject: [PATCH 08/14] Update server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java Co-authored-by: Navneet Verma Signed-off-by: Michael Froh --- .../org/opensearch/search/pipeline/SearchPipelineService.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java index b3fde7147be06..c471591e9e310 100644 --- a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java +++ b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java @@ -120,7 +120,7 @@ private static Map processorFactories( Map newProcessors = searchPipelinePlugin.getProcessors(parameters); for (Map.Entry entry : newProcessors.entrySet()) { if (processorFactories.put(entry.getKey(), entry.getValue()) != null) { - throw new IllegalArgumentException("Ingest processor [" + entry.getKey() + "] is already registered"); + throw new IllegalArgumentException("Search processor [" + entry.getKey() + "] is already registered"); } } } From 137904583077784c269c11f8a28d20d7966ca346 Mon Sep 17 00:00:00 2001 From: Michael Froh Date: Tue, 28 Mar 2023 21:55:10 +0000 Subject: [PATCH 09/14] Incorporate feedback from @noCharger Signed-off-by: Michael Froh --- modules/search-pipeline-common/build.gradle | 18 ---- .../common/SearchPipelineCommonIT.java | 84 ++++++++++++++++++- .../org/opensearch/OpenSearchException.java | 4 +- .../opensearch/search/pipeline/Processor.java | 25 ------ .../pipeline/SearchRequestProcessor.java | 1 - .../pipeline/SearchResponseProcessor.java | 1 - 6 files changed, 84 insertions(+), 49 deletions(-) diff --git a/modules/search-pipeline-common/build.gradle b/modules/search-pipeline-common/build.gradle index b7f3e17a17e67..33b0a52b3c1a9 100644 --- a/modules/search-pipeline-common/build.gradle +++ b/modules/search-pipeline-common/build.gradle @@ -9,24 +9,6 @@ * GitHub history for details. */ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ apply plugin: 'opensearch.yaml-rest-test' apply plugin: 'opensearch.internal-cluster-test' diff --git a/modules/search-pipeline-common/src/internalClusterTest/java/org/opensearch/search/pipeline/common/SearchPipelineCommonIT.java b/modules/search-pipeline-common/src/internalClusterTest/java/org/opensearch/search/pipeline/common/SearchPipelineCommonIT.java index e855d0c2ae880..cae6bf8168b12 100644 --- a/modules/search-pipeline-common/src/internalClusterTest/java/org/opensearch/search/pipeline/common/SearchPipelineCommonIT.java +++ b/modules/search-pipeline-common/src/internalClusterTest/java/org/opensearch/search/pipeline/common/SearchPipelineCommonIT.java @@ -8,6 +8,88 @@ package org.opensearch.search.pipeline.common; +import org.opensearch.action.admin.indices.delete.DeleteIndexRequest; +import org.opensearch.action.admin.indices.refresh.RefreshRequest; +import org.opensearch.action.admin.indices.refresh.RefreshResponse; +import org.opensearch.action.index.IndexRequest; +import org.opensearch.action.index.IndexResponse; +import org.opensearch.action.search.DeleteSearchPipelineRequest; +import org.opensearch.action.search.PutSearchPipelineRequest; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.common.bytes.BytesArray; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.index.query.MatchAllQueryBuilder; +import org.opensearch.plugins.Plugin; +import org.opensearch.rest.RestStatus; +import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.test.OpenSearchIntegTestCase; -public class SearchPipelineCommonIT extends OpenSearchIntegTestCase {} +import java.util.Collection; +import java.util.List; +import java.util.Map; + +@OpenSearchIntegTestCase.SuiteScopeTestCase +public class SearchPipelineCommonIT extends OpenSearchIntegTestCase { + @Override + protected Collection> nodePlugins() { + return List.of(SearchPipelineCommonModulePlugin.class); + } + + public void testFilterQuery() { + // Create a pipeline with a filter_query processor. + String pipelineName = "foo"; + PutSearchPipelineRequest putSearchPipelineRequest = new PutSearchPipelineRequest( + pipelineName, + new BytesArray( + "{" + + "\"request_processors\": [" + + "{" + + "\"filter_query\" : {" + + "\"query\": {" + + "\"term\" : {" + + "\"field\" : \"value\"" + + "}" + + "}" + + "}" + + "}" + + "]" + + "}" + ), + XContentType.JSON + ); + AcknowledgedResponse ackRsp = client().admin().cluster().putSearchPipeline(putSearchPipelineRequest).actionGet(); + assertTrue(ackRsp.isAcknowledged()); + + // Index some documents. + String indexName = "myindex"; + IndexRequest doc1 = new IndexRequest(indexName).id("doc1").source(Map.of("field", "value")); + IndexRequest doc2 = new IndexRequest(indexName).id("doc2").source(Map.of("field", "something else")); + + IndexResponse ir = client().index(doc1).actionGet(); + assertSame(RestStatus.CREATED, ir.status()); + ir = client().index(doc2).actionGet(); + assertSame(RestStatus.CREATED, ir.status()); + + // Refresh so the documents are visible to search. + RefreshResponse refRsp = client().admin().indices().refresh(new RefreshRequest(indexName)).actionGet(); + assertSame(RestStatus.OK, refRsp.getStatus()); + + // Search without the pipeline. Should see both documents. + SearchRequest req = new SearchRequest(indexName).source(new SearchSourceBuilder().query(new MatchAllQueryBuilder())); + SearchResponse rsp = client().search(req).actionGet(); + assertEquals(2, rsp.getHits().getTotalHits().value); + + // Search with the pipeline. Should only see document with "field":"value". + req.pipeline(pipelineName); + rsp = client().search(req).actionGet(); + assertEquals(1, rsp.getHits().getTotalHits().value); + + // Clean up. + ackRsp = client().admin().cluster().deleteSearchPipeline(new DeleteSearchPipelineRequest(pipelineName)).actionGet(); + assertTrue(ackRsp.isAcknowledged()); + ackRsp = client().admin().indices().delete(new DeleteIndexRequest(indexName)).actionGet(); + assertTrue(ackRsp.isAcknowledged()); + } +} diff --git a/server/src/main/java/org/opensearch/OpenSearchException.java b/server/src/main/java/org/opensearch/OpenSearchException.java index 68c6d01bcc028..2d94428a35140 100644 --- a/server/src/main/java/org/opensearch/OpenSearchException.java +++ b/server/src/main/java/org/opensearch/OpenSearchException.java @@ -1654,9 +1654,7 @@ private enum OpenSearchExceptionHandle { org.opensearch.cluster.block.IndexCreateBlockException::new, CUSTOM_ELASTICSEARCH_EXCEPTIONS_BASE_ID + 1, V_3_0_0 - ), - - ; + ); final Class exceptionClass; final CheckedFunction constructor; diff --git a/server/src/main/java/org/opensearch/search/pipeline/Processor.java b/server/src/main/java/org/opensearch/search/pipeline/Processor.java index 490c62b12b6a3..44f268242b83c 100644 --- a/server/src/main/java/org/opensearch/search/pipeline/Processor.java +++ b/server/src/main/java/org/opensearch/search/pipeline/Processor.java @@ -6,30 +6,6 @@ * compatible open source license. */ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - package org.opensearch.search.pipeline; import org.opensearch.client.Client; @@ -52,7 +28,6 @@ *

* Processors may get called concurrently and thus need to be thread-safe. * - * * TODO: Refactor {@link org.opensearch.ingest.Processor} to extend this interface, and specialize to IngestProcessor. * * @opensearch.internal diff --git a/server/src/main/java/org/opensearch/search/pipeline/SearchRequestProcessor.java b/server/src/main/java/org/opensearch/search/pipeline/SearchRequestProcessor.java index db20ddf596ffb..c236cde1a5cc0 100644 --- a/server/src/main/java/org/opensearch/search/pipeline/SearchRequestProcessor.java +++ b/server/src/main/java/org/opensearch/search/pipeline/SearchRequestProcessor.java @@ -15,5 +15,4 @@ */ public interface SearchRequestProcessor extends Processor { SearchRequest processRequest(SearchRequest request) throws Exception; - } diff --git a/server/src/main/java/org/opensearch/search/pipeline/SearchResponseProcessor.java b/server/src/main/java/org/opensearch/search/pipeline/SearchResponseProcessor.java index b3bc5e98453b9..2f22cedb9b5c0 100644 --- a/server/src/main/java/org/opensearch/search/pipeline/SearchResponseProcessor.java +++ b/server/src/main/java/org/opensearch/search/pipeline/SearchResponseProcessor.java @@ -16,5 +16,4 @@ */ public interface SearchResponseProcessor extends Processor { SearchResponse processResponse(SearchRequest request, SearchResponse response) throws Exception; - } From 0a2203ac633383b77225c0af0145ead51a96ec22 Mon Sep 17 00:00:00 2001 From: Michael Froh Date: Fri, 31 Mar 2023 22:35:35 +0000 Subject: [PATCH 10/14] Incorporate feedback from @reta - Renamed various classes from "SearchPipelinesSomething" to "SearchPipelineSomething" to be consistent. - Refactored NodeInfo construction in NodeService to avoid ternary operator and improved readability. Signed-off-by: Michael Froh --- .../client/RestHighLevelClient.java | 6 +- ...sClient.java => SearchPipelineClient.java} | 20 ++- ...entIT.java => SearchPipelineClientIT.java} | 14 +- modules/search-pipeline-common/build.gradle | 2 +- .../test/search_pipeline/20_crud.yml | 12 +- .../search_pipeline/25_empty_pipeline.yml | 4 +- .../test/search_pipeline/30_filter_query.yml | 4 +- ...eline.json => search_pipeline.delete.json} | 2 +- ...pipeline.json => search_pipeline.get.json} | 2 +- ...pipeline.json => search_pipeline.put.json} | 2 +- .../10_basic.yml | 40 +++--- .../admin/cluster/node/info/NodeInfo.java | 124 +++++++++++++++++- .../cluster/node/info/NodesInfoResponse.java | 6 +- .../PutSearchPipelineTransportAction.java | 8 +- .../java/org/opensearch/node/NodeService.java | 57 +++++--- ...linesInfo.java => SearchPipelineInfo.java} | 8 +- .../pipeline/SearchPipelineService.java | 16 +-- .../nodesinfo/NodeInfoStreamingTests.java | 8 +- .../pipeline/SearchPipelineServiceTests.java | 17 +-- 19 files changed, 239 insertions(+), 113 deletions(-) rename client/rest-high-level/src/main/java/org/opensearch/client/{SearchPipelinesClient.java => SearchPipelineClient.java} (86%) rename client/rest-high-level/src/test/java/org/opensearch/client/{SearchPipelinesClientIT.java => SearchPipelineClientIT.java} (89%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{search_pipelines.delete_pipeline.json => search_pipeline.delete.json} (95%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{search_pipelines.get_pipeline.json => search_pipeline.get.json} (95%) rename rest-api-spec/src/main/resources/rest-api-spec/api/{search_pipelines.put_pipeline.json => search_pipeline.put.json} (96%) rename rest-api-spec/src/main/resources/rest-api-spec/test/{search_pipelines => search_pipeline}/10_basic.yml (80%) rename server/src/main/java/org/opensearch/search/pipeline/{SearchPipelinesInfo.java => SearchPipelineInfo.java} (88%) diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java index d0d823575b563..acab91f26978e 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java @@ -268,7 +268,7 @@ public class RestHighLevelClient implements Closeable { private final IngestClient ingestClient = new IngestClient(this); private final SnapshotClient snapshotClient = new SnapshotClient(this); private final TasksClient tasksClient = new TasksClient(this); - private final SearchPipelinesClient searchPipelinesClient = new SearchPipelinesClient(this); + private final SearchPipelineClient searchPipelineClient = new SearchPipelineClient(this); /** * Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the @@ -355,8 +355,8 @@ public final TasksClient tasks() { return tasksClient; } - public final SearchPipelinesClient searchPipelines() { - return searchPipelinesClient; + public final SearchPipelineClient searchPipeline() { + return searchPipelineClient; } /** diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelinesClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelineClient.java similarity index 86% rename from client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelinesClient.java rename to client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelineClient.java index d6ec367194c99..9e65faa143be2 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelinesClient.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelineClient.java @@ -20,10 +20,10 @@ import static java.util.Collections.emptySet; -public final class SearchPipelinesClient { +public final class SearchPipelineClient { private final RestHighLevelClient restHighLevelClient; - SearchPipelinesClient(RestHighLevelClient restHighLevelClient) { + SearchPipelineClient(RestHighLevelClient restHighLevelClient) { this.restHighLevelClient = restHighLevelClient; } @@ -35,7 +35,7 @@ public final class SearchPipelinesClient { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public AcknowledgedResponse putPipeline(PutSearchPipelineRequest request, RequestOptions options) throws IOException { + public AcknowledgedResponse put(PutSearchPipelineRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity( request, SearchPipelinesRequestConverters::putPipeline, @@ -53,11 +53,7 @@ public AcknowledgedResponse putPipeline(PutSearchPipelineRequest request, Reques * @param listener the listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable putPipelineAsync( - PutSearchPipelineRequest request, - RequestOptions options, - ActionListener listener - ) { + public Cancellable putAsync(PutSearchPipelineRequest request, RequestOptions options, ActionListener listener) { return restHighLevelClient.performRequestAsyncAndParseEntity( request, SearchPipelinesRequestConverters::putPipeline, @@ -76,7 +72,7 @@ public Cancellable putPipelineAsync( * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public GetSearchPipelineResponse getPipeline(GetSearchPipelineRequest request, RequestOptions options) throws IOException { + public GetSearchPipelineResponse get(GetSearchPipelineRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity( request, SearchPipelinesRequestConverters::getPipeline, @@ -94,7 +90,7 @@ public GetSearchPipelineResponse getPipeline(GetSearchPipelineRequest request, R * @param listener the listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable getPipelineAsync( + public Cancellable getAsync( GetSearchPipelineRequest request, RequestOptions options, ActionListener listener @@ -117,7 +113,7 @@ public Cancellable getPipelineAsync( * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public AcknowledgedResponse deletePipeline(DeleteSearchPipelineRequest request, RequestOptions options) throws IOException { + public AcknowledgedResponse delete(DeleteSearchPipelineRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity( request, SearchPipelinesRequestConverters::deletePipeline, @@ -135,7 +131,7 @@ public AcknowledgedResponse deletePipeline(DeleteSearchPipelineRequest request, * @param listener the listener to be notified upon request completion * @return cancellable that may be used to cancel the request */ - public Cancellable deletePipelineAsync( + public Cancellable deleteAsync( DeleteSearchPipelineRequest request, RequestOptions options, ActionListener listener diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/SearchPipelinesClientIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/SearchPipelineClientIT.java similarity index 89% rename from client/rest-high-level/src/test/java/org/opensearch/client/SearchPipelinesClientIT.java rename to client/rest-high-level/src/test/java/org/opensearch/client/SearchPipelineClientIT.java index 383d759c7107f..ea27d8e9605d9 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/SearchPipelinesClientIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/SearchPipelineClientIT.java @@ -19,7 +19,7 @@ import java.io.IOException; -public class SearchPipelinesClientIT extends OpenSearchRestHighLevelClientTestCase { +public class SearchPipelineClientIT extends OpenSearchRestHighLevelClientTestCase { public void testPutPipeline() throws IOException { String id = "some_pipeline_id"; @@ -35,8 +35,8 @@ public void testPutPipeline() throws IOException { private static void createPipeline(PutSearchPipelineRequest request) throws IOException { AcknowledgedResponse response = execute( request, - highLevelClient().searchPipelines()::putPipeline, - highLevelClient().searchPipelines()::putPipelineAsync + highLevelClient().searchPipeline()::put, + highLevelClient().searchPipeline()::putAsync ); assertTrue(response.isAcknowledged()); } @@ -54,8 +54,8 @@ public void testGetPipeline() throws IOException { GetSearchPipelineRequest getRequest = new GetSearchPipelineRequest(id); GetSearchPipelineResponse response = execute( getRequest, - highLevelClient().searchPipelines()::getPipeline, - highLevelClient().searchPipelines()::getPipelineAsync + highLevelClient().searchPipeline()::get, + highLevelClient().searchPipeline()::getAsync ); assertTrue(response.isFound()); assertEquals(1, response.pipelines().size()); @@ -75,8 +75,8 @@ public void testDeletePipeline() throws IOException { DeleteSearchPipelineRequest deleteRequest = new DeleteSearchPipelineRequest(id); AcknowledgedResponse response = execute( deleteRequest, - highLevelClient().searchPipelines()::deletePipeline, - highLevelClient().searchPipelines()::deletePipelineAsync + highLevelClient().searchPipeline()::delete, + highLevelClient().searchPipeline()::deleteAsync ); assertTrue(response.isAcknowledged()); } diff --git a/modules/search-pipeline-common/build.gradle b/modules/search-pipeline-common/build.gradle index 33b0a52b3c1a9..9e1cc19196912 100644 --- a/modules/search-pipeline-common/build.gradle +++ b/modules/search-pipeline-common/build.gradle @@ -22,7 +22,7 @@ dependencies { restResources { restApi { - includeCore '_common', 'search_pipelines', 'cluster', 'indices', 'index', 'nodes', 'search' + includeCore '_common', 'search_pipeline', 'cluster', 'indices', 'index', 'nodes', 'search' } } diff --git a/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/20_crud.yml b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/20_crud.yml index cffb1c326462c..9b86f6dcc06c2 100644 --- a/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/20_crud.yml +++ b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/20_crud.yml @@ -1,14 +1,14 @@ --- teardown: - do: - search_pipelines.delete_pipeline: + search_pipeline.delete: id: "my_pipeline" ignore: 404 --- "Test basic pipeline crud": - do: - search_pipelines.put_pipeline: + search_pipeline.put: id: "my_pipeline" body: > { @@ -28,20 +28,20 @@ teardown: - match: { acknowledged: true } - do: - search_pipelines.get_pipeline: + search_pipeline.get: id: "my_pipeline" - match: { my_pipeline.description: "_description" } - do: - search_pipelines.get_pipeline: {} + search_pipeline.get: {} - match: { my_pipeline.description: "_description" } - do: - search_pipelines.delete_pipeline: + search_pipeline.delete: id: "my_pipeline" - match: { acknowledged: true } - do: catch: missing - search_pipelines.get_pipeline: + search_pipeline.get: id: "my_pipeline" diff --git a/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/25_empty_pipeline.yml b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/25_empty_pipeline.yml index 597dc4be6923f..0481d27bb3f4b 100644 --- a/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/25_empty_pipeline.yml +++ b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/25_empty_pipeline.yml @@ -1,14 +1,14 @@ --- teardown: - do: - search_pipelines.delete_pipeline: + search_pipeline.delete: id: "my_pipeline" ignore: 404 --- "Test explicitly empty pipeline is no-op": - do: - search_pipelines.put_pipeline: + search_pipeline.put: id: "my_pipeline" body: > { diff --git a/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/30_filter_query.yml b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/30_filter_query.yml index e82824f2d5734..9b59122ba0af8 100644 --- a/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/30_filter_query.yml +++ b/modules/search-pipeline-common/src/yamlRestTest/resources/rest-api-spec/test/search_pipeline/30_filter_query.yml @@ -1,14 +1,14 @@ --- teardown: - do: - search_pipelines.delete_pipeline: + search_pipeline.delete: id: "my_pipeline" ignore: 404 --- "Test filter_query processor": - do: - search_pipelines.put_pipeline: + search_pipeline.put: id: "my_pipeline" body: > { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.delete_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipeline.delete.json similarity index 95% rename from rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.delete_pipeline.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/search_pipeline.delete.json index 475d83807b1c0..1fa7060b974dc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.delete_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipeline.delete.json @@ -1,5 +1,5 @@ { - "search_pipelines.delete_pipeline": { + "search_pipeline.delete": { "documentation": { "description": "Deletes a search pipeline.", "url": "https://opensearch.org/docs/latest/opensearch/rest-api/search_pipelines/" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.get_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipeline.get.json similarity index 95% rename from rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.get_pipeline.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/search_pipeline.get.json index 93f9cb76f9da1..7cac6e7aa4bcf 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.get_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipeline.get.json @@ -1,5 +1,5 @@ { - "search_pipelines.get_pipeline": { + "search_pipeline.get": { "documentation": { "description": "Returns a search pipeline", "url": "https://opensearch.org/docs/latest/opensearch/rest-api/search_pipelines/" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.put_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipeline.put.json similarity index 96% rename from rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.put_pipeline.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/search_pipeline.put.json index 8bddc5bbd61af..b7375d36825a2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipelines.put_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_pipeline.put.json @@ -1,5 +1,5 @@ { - "search_pipelines.put_pipeline": { + "search_pipeline.put": { "documentation": { "description": "Creates or updates a search pipeline.", "url": "https://opensearch.org/docs/latest/opensearch/rest-api/search_pipelines/" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search_pipelines/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search_pipeline/10_basic.yml similarity index 80% rename from rest-api-spec/src/main/resources/rest-api-spec/test/search_pipelines/10_basic.yml rename to rest-api-spec/src/main/resources/rest-api-spec/test/search_pipeline/10_basic.yml index a58d39b418901..415501ded532e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search_pipelines/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search_pipeline/10_basic.yml @@ -4,7 +4,7 @@ version: " - 2.99.99" reason: "to be introduced in future release / TODO: change if/when we backport to 2.x" - do: - search_pipelines.put_pipeline: + search_pipeline.put: id: "my_pipeline" body: > { @@ -15,18 +15,18 @@ - match: { acknowledged: true } - do: - search_pipelines.get_pipeline: + search_pipeline.get: id: "my_pipeline" - match: { my_pipeline.description: "_description" } - do: - search_pipelines.delete_pipeline: + search_pipeline.delete: id: "my_pipeline" - match: { acknowledged: true } - do: catch: missing - search_pipelines.get_pipeline: + search_pipeline.get: id: "my_pipeline" --- @@ -35,7 +35,7 @@ version: " - 2.99.99" reason: "to be introduced in future release / TODO: change if/when we backport to 2.x" - do: - search_pipelines.put_pipeline: + search_pipeline.put: id: "my_pipeline" body: > { @@ -45,13 +45,13 @@ - match: { acknowledged: true } - do: - search_pipelines.get_pipeline: + search_pipeline.get: id: "my_pipeline" - match: { my_pipeline.version: 10 } # Lower version - do: - search_pipelines.put_pipeline: + search_pipeline.put: id: "my_pipeline" body: > { @@ -61,13 +61,13 @@ - match: { acknowledged: true } - do: - search_pipelines.get_pipeline: + search_pipeline.get: id: "my_pipeline" - match: { my_pipeline.version: 9 } # Higher version - do: - search_pipelines.put_pipeline: + search_pipeline.put: id: "my_pipeline" body: > { @@ -77,13 +77,13 @@ - match: { acknowledged: true } - do: - search_pipelines.get_pipeline: + search_pipeline.get: id: "my_pipeline" - match: { my_pipeline.version: 6789 } # No version - do: - search_pipelines.put_pipeline: + search_pipeline.put: id: "my_pipeline" body: > { @@ -92,13 +92,13 @@ - match: { acknowledged: true } - do: - search_pipelines.get_pipeline: + search_pipeline.get: id: "my_pipeline" - is_false: my_pipeline.version # Coming back with a version - do: - search_pipelines.put_pipeline: + search_pipeline.put: id: "my_pipeline" body: > { @@ -108,19 +108,19 @@ - match: { acknowledged: true } - do: - search_pipelines.get_pipeline: + search_pipeline.get: id: "my_pipeline" - match: { my_pipeline.version: 5385 } # Able to delete the versioned pipeline - do: - search_pipelines.delete_pipeline: + search_pipeline.delete: id: "my_pipeline" - match: { acknowledged: true } - do: catch: missing - search_pipelines.get_pipeline: + search_pipeline.get: id: "my_pipeline" --- "Test Get All Pipelines": @@ -128,7 +128,7 @@ version: " - 2.99.99" reason: "to be introduced in future release / TODO: change if/when we backport to 2.x" - do: - search_pipelines.put_pipeline: + search_pipeline.put: id: "first_pipeline" body: > { @@ -136,7 +136,7 @@ "request_processors": [] } - do: - search_pipelines.put_pipeline: + search_pipeline.put: id: "second_pipeline" body: > { @@ -145,7 +145,7 @@ } - do: - search_pipelines.get_pipeline: {} + search_pipeline.get: {} - match: { first_pipeline.description: "first" } - match: { second_pipeline.description: "second" } @@ -156,7 +156,7 @@ reason: "to be introduced in future release / TODO: change if/when we backport to 2.x" - do: catch: /parse_exception/ - search_pipelines.put_pipeline: + search_pipeline.put: id: "my_pipeline" body: > { diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java index 0e263b69a69df..af4e27012f631 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java @@ -48,7 +48,7 @@ import org.opensearch.monitor.process.ProcessInfo; import org.opensearch.node.ReportingService; import org.opensearch.search.aggregations.support.AggregationInfo; -import org.opensearch.search.pipeline.SearchPipelinesInfo; +import org.opensearch.search.pipeline.SearchPipelineInfo; import org.opensearch.threadpool.ThreadPoolInfo; import org.opensearch.transport.TransportInfo; @@ -101,7 +101,7 @@ public NodeInfo(StreamInput in) throws IOException { addInfoIfNonNull(IngestInfo.class, in.readOptionalWriteable(IngestInfo::new)); addInfoIfNonNull(AggregationInfo.class, in.readOptionalWriteable(AggregationInfo::new)); if (in.getVersion().onOrAfter(Version.V_3_0_0)) { // TODO: Change if/when we backport to 2.x - addInfoIfNonNull(SearchPipelinesInfo.class, in.readOptionalWriteable(SearchPipelinesInfo::new)); + addInfoIfNonNull(SearchPipelineInfo.class, in.readOptionalWriteable(SearchPipelineInfo::new)); } } @@ -120,7 +120,7 @@ public NodeInfo( @Nullable IngestInfo ingest, @Nullable AggregationInfo aggsInfo, @Nullable ByteSizeValue totalIndexingBuffer, - @Nullable SearchPipelinesInfo searchPipelinesInfo + @Nullable SearchPipelineInfo searchPipelineInfo ) { super(node); this.version = version; @@ -135,7 +135,7 @@ public NodeInfo( addInfoIfNonNull(PluginsAndModules.class, plugins); addInfoIfNonNull(IngestInfo.class, ingest); addInfoIfNonNull(AggregationInfo.class, aggsInfo); - addInfoIfNonNull(SearchPipelinesInfo.class, searchPipelinesInfo); + addInfoIfNonNull(SearchPipelineInfo.class, searchPipelineInfo); this.totalIndexingBuffer = totalIndexingBuffer; } @@ -225,7 +225,121 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(getInfo(IngestInfo.class)); out.writeOptionalWriteable(getInfo(AggregationInfo.class)); if (out.getVersion().onOrAfter(Version.V_3_0_0)) { // TODO: Change if/when we backport to 2.x - out.writeOptionalWriteable(getInfo(SearchPipelinesInfo.class)); + out.writeOptionalWriteable(getInfo(SearchPipelineInfo.class)); } } + + public static NodeInfo.Builder builder(Version version, Build build, DiscoveryNode node) { + return new Builder(version, build, node); + } + + /** + * Builder class to accommodate new Info types being added to NodeInfo. + */ + public static class Builder { + private final Version version; + private final Build build; + private final DiscoveryNode node; + + private Builder(Version version, Build build, DiscoveryNode node) { + this.version = version; + this.build = build; + this.node = node; + } + + private Settings settings; + private OsInfo os; + private ProcessInfo process; + private JvmInfo jvm; + private ThreadPoolInfo threadPool; + private TransportInfo transport; + private HttpInfo http; + private PluginsAndModules plugins; + private IngestInfo ingest; + private AggregationInfo aggsInfo; + private ByteSizeValue totalIndexingBuffer; + private SearchPipelineInfo searchPipelineInfo; + + public Builder setSettings(Settings settings) { + this.settings = settings; + return this; + } + + public Builder setOs(OsInfo os) { + this.os = os; + return this; + } + + public Builder setProcess(ProcessInfo process) { + this.process = process; + return this; + } + + public Builder setJvm(JvmInfo jvm) { + this.jvm = jvm; + return this; + } + + public Builder setThreadPool(ThreadPoolInfo threadPool) { + this.threadPool = threadPool; + return this; + } + + public Builder setTransport(TransportInfo transport) { + this.transport = transport; + return this; + } + + public Builder setHttp(HttpInfo http) { + this.http = http; + return this; + } + + public Builder setPlugins(PluginsAndModules plugins) { + this.plugins = plugins; + return this; + } + + public Builder setIngest(IngestInfo ingest) { + this.ingest = ingest; + return this; + } + + public Builder setAggsInfo(AggregationInfo aggsInfo) { + this.aggsInfo = aggsInfo; + return this; + } + + public Builder setTotalIndexingBuffer(ByteSizeValue totalIndexingBuffer) { + this.totalIndexingBuffer = totalIndexingBuffer; + return this; + } + + public Builder setSearchPipelineInfo(SearchPipelineInfo searchPipelineInfo) { + this.searchPipelineInfo = searchPipelineInfo; + return this; + } + + public NodeInfo build() { + return new NodeInfo( + version, + build, + node, + settings, + os, + process, + jvm, + threadPool, + transport, + http, + plugins, + ingest, + aggsInfo, + totalIndexingBuffer, + searchPipelineInfo + ); + } + + } + } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java index 93ecbac1bdcd9..82f7d58c0ba25 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java @@ -49,7 +49,7 @@ import org.opensearch.monitor.os.OsInfo; import org.opensearch.monitor.process.ProcessInfo; import org.opensearch.search.aggregations.support.AggregationInfo; -import org.opensearch.search.pipeline.SearchPipelinesInfo; +import org.opensearch.search.pipeline.SearchPipelineInfo; import org.opensearch.threadpool.ThreadPoolInfo; import org.opensearch.transport.TransportInfo; @@ -148,8 +148,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (nodeInfo.getInfo(AggregationInfo.class) != null) { nodeInfo.getInfo(AggregationInfo.class).toXContent(builder, params); } - if (nodeInfo.getInfo(SearchPipelinesInfo.class) != null) { - nodeInfo.getInfo(SearchPipelinesInfo.class).toXContent(builder, params); + if (nodeInfo.getInfo(SearchPipelineInfo.class) != null) { + nodeInfo.getInfo(SearchPipelineInfo.class).toXContent(builder, params); } builder.endObject(); diff --git a/server/src/main/java/org/opensearch/action/search/PutSearchPipelineTransportAction.java b/server/src/main/java/org/opensearch/action/search/PutSearchPipelineTransportAction.java index 342a1f4cb26c6..2c0466dc939e7 100644 --- a/server/src/main/java/org/opensearch/action/search/PutSearchPipelineTransportAction.java +++ b/server/src/main/java/org/opensearch/action/search/PutSearchPipelineTransportAction.java @@ -24,7 +24,7 @@ import org.opensearch.common.inject.Inject; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.search.pipeline.SearchPipelineService; -import org.opensearch.search.pipeline.SearchPipelinesInfo; +import org.opensearch.search.pipeline.SearchPipelineInfo; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; @@ -84,11 +84,11 @@ protected void clusterManagerOperation( ) throws Exception { NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); client.admin().cluster().nodesInfo(nodesInfoRequest, ActionListener.wrap(nodeInfos -> { - Map searchPipelinesInfos = new HashMap<>(); + Map searchPipelineInfos = new HashMap<>(); for (NodeInfo nodeInfo : nodeInfos.getNodes()) { - searchPipelinesInfos.put(nodeInfo.getNode(), nodeInfo.getInfo(SearchPipelinesInfo.class)); + searchPipelineInfos.put(nodeInfo.getNode(), nodeInfo.getInfo(SearchPipelineInfo.class)); } - searchPipelineService.putPipeline(searchPipelinesInfos, request, listener); + searchPipelineService.putPipeline(searchPipelineInfos, request, listener); }, listener::onFailure)); } diff --git a/server/src/main/java/org/opensearch/node/NodeService.java b/server/src/main/java/org/opensearch/node/NodeService.java index 767e257523819..34293efd73655 100644 --- a/server/src/main/java/org/opensearch/node/NodeService.java +++ b/server/src/main/java/org/opensearch/node/NodeService.java @@ -149,25 +149,46 @@ public NodeInfo info( boolean ingest, boolean aggs, boolean indices, - boolean searchPipelines + boolean searchPipeline ) { - return new NodeInfo( - Version.CURRENT, - Build.CURRENT, - transportService.getLocalNode(), - settings ? settingsFilter.filter(this.settings) : null, - os ? monitorService.osService().info() : null, - process ? monitorService.processService().info() : null, - jvm ? monitorService.jvmService().info() : null, - threadPool ? this.threadPool.info() : null, - transport ? transportService.info() : null, - http ? (httpServerTransport == null ? null : httpServerTransport.info()) : null, - plugin ? (pluginService == null ? null : pluginService.info()) : null, - ingest ? (ingestService == null ? null : ingestService.info()) : null, - aggs ? (aggregationUsageService == null ? null : aggregationUsageService.info()) : null, - indices ? indicesService.getTotalIndexingBufferBytes() : null, - searchPipelines ? (searchPipelineService == null ? null : searchPipelineService.info()) : null - ); + NodeInfo.Builder builder = NodeInfo.builder(Version.CURRENT, Build.CURRENT, transportService.getLocalNode()); + if (settings) { + builder.setSettings(settingsFilter.filter(this.settings)); + } + if (os) { + builder.setOs(monitorService.osService().info()); + } + if (process) { + builder.setProcess(monitorService.processService().info()); + } + if (jvm) { + builder.setJvm(monitorService.jvmService().info()); + } + if (threadPool) { + builder.setThreadPool(this.threadPool.info()); + } + if (transport) { + builder.setTransport(transportService.info()); + } + if (http && httpServerTransport != null) { + builder.setHttp(httpServerTransport.info()); + } + if (plugin && pluginService != null) { + builder.setPlugins(pluginService.info()); + } + if (ingest && ingestService != null) { + builder.setIngest(ingestService.info()); + } + if (aggs && aggregationUsageService != null) { + builder.setAggsInfo(aggregationUsageService.info()); + } + if (indices) { + builder.setTotalIndexingBuffer(indicesService.getTotalIndexingBufferBytes()); + } + if (searchPipeline && searchPipelineService != null) { + builder.setSearchPipelineInfo(searchPipelineService.info()); + } + return builder.build(); } public NodeStats stats( diff --git a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelinesInfo.java b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineInfo.java similarity index 88% rename from server/src/main/java/org/opensearch/search/pipeline/SearchPipelinesInfo.java rename to server/src/main/java/org/opensearch/search/pipeline/SearchPipelineInfo.java index 0f4a830b6e5ef..95d1e3720cbb3 100644 --- a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelinesInfo.java +++ b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineInfo.java @@ -24,18 +24,18 @@ * * @opensearch.internal */ -public class SearchPipelinesInfo implements ReportingService.Info { +public class SearchPipelineInfo implements ReportingService.Info { private final Set processors; - public SearchPipelinesInfo(List processors) { + public SearchPipelineInfo(List processors) { this.processors = new TreeSet<>(processors); // we use a treeset here to have a test-able / predictable order } /** * Read from a stream. */ - public SearchPipelinesInfo(StreamInput in) throws IOException { + public SearchPipelineInfo(StreamInput in) throws IOException { processors = new TreeSet<>(); final int size = in.readVInt(); for (int i = 0; i < size; i++) { @@ -71,7 +71,7 @@ public boolean containsProcessor(String type) { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - SearchPipelinesInfo that = (SearchPipelinesInfo) o; + SearchPipelineInfo that = (SearchPipelineInfo) o; return Objects.equals(processors, that.processors); } diff --git a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java index c471591e9e310..27b623b791b86 100644 --- a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java +++ b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java @@ -62,7 +62,7 @@ * The main entry point for search pipelines. Handles CRUD operations and exposes the API to execute search pipelines * against requests and responses. */ -public class SearchPipelineService implements ClusterStateApplier, ReportingService { +public class SearchPipelineService implements ClusterStateApplier, ReportingService { public static final String SEARCH_PIPELINE_ORIGIN = "search_pipeline"; private static final Logger logger = LogManager.getLogger(SearchPipelineService.class); @@ -206,12 +206,12 @@ void innerUpdatePipelines(SearchPipelineMetadata newSearchPipelineMetadata) { } public void putPipeline( - Map searchPipelinesInfos, + Map searchPipelineInfos, PutSearchPipelineRequest request, ActionListener listener ) throws Exception { - validatePipeline(searchPipelinesInfos, request); + validatePipeline(searchPipelineInfos, request); clusterService.submitStateUpdateTask( "put-search-pipeline-" + request.getId(), new AckedClusterStateUpdateTask<>(request, listener) { @@ -251,15 +251,15 @@ static ClusterState innerPut(PutSearchPipelineRequest request, ClusterState curr return newState.build(); } - void validatePipeline(Map searchPipelinesInfos, PutSearchPipelineRequest request) throws Exception { - if (searchPipelinesInfos.isEmpty()) { + void validatePipeline(Map searchPipelineInfos, PutSearchPipelineRequest request) throws Exception { + if (searchPipelineInfos.isEmpty()) { throw new IllegalStateException("Search pipeline info is empty"); } Map pipelineConfig = XContentHelper.convertToMap(request.getSource(), false, request.getXContentType()).v2(); Pipeline pipeline = Pipeline.create(request.getId(), pipelineConfig, processorFactories); List exceptions = new ArrayList<>(); for (Processor processor : pipeline.flattenAllProcessors()) { - for (Map.Entry entry : searchPipelinesInfos.entrySet()) { + for (Map.Entry entry : searchPipelineInfos.entrySet()) { String type = processor.getType(); if (entry.getValue().containsProcessor(type) == false) { String message = "Processor type [" + processor.getType() + "] is not installed on node [" + entry.getKey() + "]"; @@ -363,12 +363,12 @@ Map getProcessorFactories() { } @Override - public SearchPipelinesInfo info() { + public SearchPipelineInfo info() { List processorInfoList = new ArrayList<>(); for (Map.Entry entry : processorFactories.entrySet()) { processorInfoList.add(new ProcessorInfo(entry.getKey())); } - return new SearchPipelinesInfo(processorInfoList); + return new SearchPipelineInfo(processorInfoList); } public static List getPipelines(ClusterState clusterState, String... ids) { diff --git a/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java b/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java index 341f52f28e5e0..bec921bc5bf5d 100644 --- a/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java +++ b/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java @@ -54,7 +54,7 @@ import org.opensearch.plugins.PluginInfo; import org.opensearch.search.aggregations.support.AggregationInfo; import org.opensearch.search.aggregations.support.AggregationUsageService; -import org.opensearch.search.pipeline.SearchPipelinesInfo; +import org.opensearch.search.pipeline.SearchPipelineInfo; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.VersionUtils; import org.opensearch.threadpool.ThreadPool; @@ -244,14 +244,14 @@ private static NodeInfo createNodeInfo() { indexingBuffer = new ByteSizeValue(random().nextLong() & ((1L << 40) - 1)); } - SearchPipelinesInfo searchPipelinesInfo = null; + SearchPipelineInfo searchPipelineInfo = null; if (randomBoolean()) { int numProcessors = randomIntBetween(0, 5); List processors = new ArrayList<>(numProcessors); for (int i = 0; i < numProcessors; i++) { processors.add(new org.opensearch.search.pipeline.ProcessorInfo(randomAlphaOfLengthBetween(3, 10))); } - searchPipelinesInfo = new SearchPipelinesInfo(processors); + searchPipelineInfo = new SearchPipelineInfo(processors); } return new NodeInfo( @@ -269,7 +269,7 @@ private static NodeInfo createNodeInfo() { ingestInfo, aggregationInfo, indexingBuffer, - searchPipelinesInfo + searchPipelineInfo ); } } diff --git a/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java b/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java index 8faa96ea9f3e1..99c6ce972de23 100644 --- a/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java +++ b/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java @@ -576,12 +576,7 @@ public void testValidatePipeline() throws Exception { expectThrows( OpenSearchParseException.class, () -> searchPipelineService.validatePipeline( - Map.of( - n1, - new SearchPipelinesInfo(List.of(reqProcessor, rspProcessor)), - n2, - new SearchPipelinesInfo(List.of(reqProcessor)) - ), + Map.of(n1, new SearchPipelineInfo(List.of(reqProcessor, rspProcessor)), n2, new SearchPipelineInfo(List.of(reqProcessor))), putRequest ) ); @@ -605,9 +600,9 @@ public void testValidatePipeline() throws Exception { () -> searchPipelineService.validatePipeline( Map.of( n1, - new SearchPipelinesInfo(List.of(reqProcessor, rspProcessor)), + new SearchPipelineInfo(List.of(reqProcessor, rspProcessor)), n2, - new SearchPipelinesInfo(List.of(reqProcessor, rspProcessor)) + new SearchPipelineInfo(List.of(reqProcessor, rspProcessor)) ), badPutRequest ) @@ -617,9 +612,9 @@ public void testValidatePipeline() throws Exception { searchPipelineService.validatePipeline( Map.of( n1, - new SearchPipelinesInfo(List.of(reqProcessor, rspProcessor)), + new SearchPipelineInfo(List.of(reqProcessor, rspProcessor)), n2, - new SearchPipelinesInfo(List.of(reqProcessor, rspProcessor)) + new SearchPipelineInfo(List.of(reqProcessor, rspProcessor)) ), putRequest ); @@ -627,7 +622,7 @@ public void testValidatePipeline() throws Exception { public void testInfo() { SearchPipelineService searchPipelineService = createWithProcessors(); - SearchPipelinesInfo info = searchPipelineService.info(); + SearchPipelineInfo info = searchPipelineService.info(); assertTrue(info.containsProcessor("scale_request_size")); assertTrue(info.containsProcessor("fixed_score")); } From 13a0419a92d11b62d6853448764a6320c3d4d75e Mon Sep 17 00:00:00 2001 From: Michael Froh Date: Tue, 4 Apr 2023 21:31:55 +0000 Subject: [PATCH 11/14] Gate search pipelines behind a feature flag Also renamed SearchPipelinesRequestConverters. Signed-off-by: Michael Froh --- client/rest-high-level/build.gradle | 3 +++ .../opensearch/client/SearchPipelineClient.java | 12 ++++++------ ....java => SearchPipelineRequestConverters.java} | 4 ++-- distribution/src/config/opensearch.yml | 5 +++++ modules/search-pipeline-common/build.gradle | 1 + .../pipeline/common/SearchPipelineCommonIT.java | 8 ++++++++ rest-api-spec/build.gradle | 1 + .../common/settings/FeatureFlagSettings.java | 3 ++- .../org/opensearch/common/util/FeatureFlags.java | 8 ++++++++ .../search/pipeline/SearchPipelineService.java | 9 +++++++-- .../pipeline/SearchPipelineServiceTests.java | 15 +++++++++++++++ 11 files changed, 58 insertions(+), 11 deletions(-) rename client/rest-high-level/src/main/java/org/opensearch/client/{SearchPipelinesRequestConverters.java => SearchPipelineRequestConverters.java} (96%) diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 7fa2855d85487..43758560b2a15 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -103,6 +103,9 @@ testClusters.all { extraConfigFile nodeCert.name, nodeCert extraConfigFile nodeTrustStore.name, nodeTrustStore extraConfigFile pkiTrustCert.name, pkiTrustCert + + // Enable APIs behind feature flags + setting 'opensearch.experimental.feature.search_pipeline.enabled', 'true' } thirdPartyAudit.ignoreMissingClasses( diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelineClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelineClient.java index 9e65faa143be2..b6c28f57d6bd9 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelineClient.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelineClient.java @@ -38,7 +38,7 @@ public final class SearchPipelineClient { public AcknowledgedResponse put(PutSearchPipelineRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity( request, - SearchPipelinesRequestConverters::putPipeline, + SearchPipelineRequestConverters::putPipeline, options, AcknowledgedResponse::fromXContent, emptySet() @@ -56,7 +56,7 @@ public AcknowledgedResponse put(PutSearchPipelineRequest request, RequestOptions public Cancellable putAsync(PutSearchPipelineRequest request, RequestOptions options, ActionListener listener) { return restHighLevelClient.performRequestAsyncAndParseEntity( request, - SearchPipelinesRequestConverters::putPipeline, + SearchPipelineRequestConverters::putPipeline, options, AcknowledgedResponse::fromXContent, listener, @@ -75,7 +75,7 @@ public Cancellable putAsync(PutSearchPipelineRequest request, RequestOptions opt public GetSearchPipelineResponse get(GetSearchPipelineRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity( request, - SearchPipelinesRequestConverters::getPipeline, + SearchPipelineRequestConverters::getPipeline, options, GetSearchPipelineResponse::fromXContent, Collections.singleton(404) @@ -97,7 +97,7 @@ public Cancellable getAsync( ) { return restHighLevelClient.performRequestAsyncAndParseEntity( request, - SearchPipelinesRequestConverters::getPipeline, + SearchPipelineRequestConverters::getPipeline, options, GetSearchPipelineResponse::fromXContent, listener, @@ -116,7 +116,7 @@ public Cancellable getAsync( public AcknowledgedResponse delete(DeleteSearchPipelineRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity( request, - SearchPipelinesRequestConverters::deletePipeline, + SearchPipelineRequestConverters::deletePipeline, options, AcknowledgedResponse::fromXContent, emptySet() @@ -138,7 +138,7 @@ public Cancellable deleteAsync( ) { return restHighLevelClient.performRequestAsyncAndParseEntity( request, - SearchPipelinesRequestConverters::deletePipeline, + SearchPipelineRequestConverters::deletePipeline, options, AcknowledgedResponse::fromXContent, listener, diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelinesRequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelineRequestConverters.java similarity index 96% rename from client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelinesRequestConverters.java rename to client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelineRequestConverters.java index 53df19d2115d5..3186a59a1118c 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelinesRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/SearchPipelineRequestConverters.java @@ -17,8 +17,8 @@ import java.io.IOException; -final class SearchPipelinesRequestConverters { - private SearchPipelinesRequestConverters() {} +final class SearchPipelineRequestConverters { + private SearchPipelineRequestConverters() {} static Request putPipeline(PutSearchPipelineRequest putPipelineRequest) throws IOException { String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_search/pipeline") diff --git a/distribution/src/config/opensearch.yml b/distribution/src/config/opensearch.yml index 8ac10b9f677b8..7fc1e74ad6337 100644 --- a/distribution/src/config/opensearch.yml +++ b/distribution/src/config/opensearch.yml @@ -114,3 +114,8 @@ ${path.logs} # the core. # #opensearch.experimental.feature.extensions.enabled: false +# +# +# Gates the search pipeline feature. This feature enables configurable processors +# for search requests and search responses, similar to ingest pipelines. +#opensearch.experimental.feature.search_pipeline.enabled: false diff --git a/modules/search-pipeline-common/build.gradle b/modules/search-pipeline-common/build.gradle index 9e1cc19196912..cc655e10ada92 100644 --- a/modules/search-pipeline-common/build.gradle +++ b/modules/search-pipeline-common/build.gradle @@ -27,6 +27,7 @@ restResources { } testClusters.all { + setting 'opensearch.experimental.feature.search_pipeline.enabled', 'true' } thirdPartyAudit.ignoreMissingClasses( diff --git a/modules/search-pipeline-common/src/internalClusterTest/java/org/opensearch/search/pipeline/common/SearchPipelineCommonIT.java b/modules/search-pipeline-common/src/internalClusterTest/java/org/opensearch/search/pipeline/common/SearchPipelineCommonIT.java index cae6bf8168b12..2c7ffb4648a7d 100644 --- a/modules/search-pipeline-common/src/internalClusterTest/java/org/opensearch/search/pipeline/common/SearchPipelineCommonIT.java +++ b/modules/search-pipeline-common/src/internalClusterTest/java/org/opensearch/search/pipeline/common/SearchPipelineCommonIT.java @@ -19,6 +19,8 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.common.bytes.BytesArray; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.query.MatchAllQueryBuilder; import org.opensearch.plugins.Plugin; @@ -32,6 +34,12 @@ @OpenSearchIntegTestCase.SuiteScopeTestCase public class SearchPipelineCommonIT extends OpenSearchIntegTestCase { + + @Override + protected Settings featureFlagSettings() { + return Settings.builder().put(FeatureFlags.SEARCH_PIPELINE, "true").build(); + } + @Override protected Collection> nodePlugins() { return List.of(SearchPipelineCommonModulePlugin.class); diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 535749a5e0c14..dcc62d11568ae 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -28,6 +28,7 @@ artifacts { testClusters.all { module ':modules:mapper-extras' + setting 'opensearch.experimental.feature.search_pipeline.enabled', 'true' } test.enabled = false diff --git a/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java b/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java index 8fb6cd115f24b..9cd61f78e6ec7 100644 --- a/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java @@ -37,7 +37,8 @@ protected FeatureFlagSettings( FeatureFlags.REPLICATION_TYPE_SETTING, FeatureFlags.REMOTE_STORE_SETTING, FeatureFlags.SEARCHABLE_SNAPSHOT_SETTING, - FeatureFlags.EXTENSIONS_SETTING + FeatureFlags.EXTENSIONS_SETTING, + FeatureFlags.SEARCH_PIPELINE_SETTING ) ) ); diff --git a/server/src/main/java/org/opensearch/common/util/FeatureFlags.java b/server/src/main/java/org/opensearch/common/util/FeatureFlags.java index 72b7349180bad..6f5e75cc7257e 100644 --- a/server/src/main/java/org/opensearch/common/util/FeatureFlags.java +++ b/server/src/main/java/org/opensearch/common/util/FeatureFlags.java @@ -54,6 +54,12 @@ public class FeatureFlags { */ public static final String EXTENSIONS = "opensearch.experimental.feature.extensions.enabled"; + /** + * Gates the search pipeline features during initial development. + * Once the feature is complete and ready for release, this feature flag can be removed. + */ + public static final String SEARCH_PIPELINE = "opensearch.experimental.feature.search_pipeline.enabled"; + /** * Should store the settings from opensearch.yml. */ @@ -89,4 +95,6 @@ public static boolean isEnabled(String featureFlagName) { public static final Setting SEARCHABLE_SNAPSHOT_SETTING = Setting.boolSetting(SEARCHABLE_SNAPSHOT, false, Property.NodeScope); public static final Setting EXTENSIONS_SETTING = Setting.boolSetting(EXTENSIONS, false, Property.NodeScope); + + public static final Setting SEARCH_PIPELINE_SETTING = Setting.boolSetting(SEARCH_PIPELINE, false, Property.NodeScope); } diff --git a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java index 27b623b791b86..5f30f7d9a0127 100644 --- a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java +++ b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java @@ -36,6 +36,7 @@ import org.opensearch.common.regex.Regex; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.CollectionUtils; +import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; @@ -63,6 +64,7 @@ * against requests and responses. */ public class SearchPipelineService implements ClusterStateApplier, ReportingService { + public static final String SEARCH_PIPELINE_ORIGIN = "search_pipeline"; private static final Logger logger = LogManager.getLogger(SearchPipelineService.class); @@ -210,6 +212,9 @@ public void putPipeline( PutSearchPipelineRequest request, ActionListener listener ) throws Exception { + if (FeatureFlags.isEnabled(FeatureFlags.SEARCH_PIPELINE) == false) { + throw new IllegalArgumentException("Experimental search pipeline feature is not enabled"); + } validatePipeline(searchPipelineInfos, request); clusterService.submitStateUpdateTask( @@ -325,7 +330,7 @@ static ClusterState innerDelete(DeleteSearchPipelineRequest request, ClusterStat public SearchRequest transformRequest(SearchRequest originalRequest) { String pipelineId = originalRequest.pipeline(); - if (pipelineId != null) { + if (pipelineId != null && FeatureFlags.isEnabled(FeatureFlags.SEARCH_PIPELINE)) { PipelineHolder pipeline = pipelines.get(pipelineId); if (pipeline == null) { throw new IllegalArgumentException("Pipeline " + pipelineId + " is not defined"); @@ -348,7 +353,7 @@ public SearchRequest transformRequest(SearchRequest originalRequest) { public SearchResponse transformResponse(SearchRequest request, SearchResponse searchResponse) { String pipelineId = request.pipeline(); - if (pipelineId != null) { + if (pipelineId != null && FeatureFlags.isEnabled(FeatureFlags.SEARCH_PIPELINE)) { PipelineHolder pipeline = pipelines.get(pipelineId); if (pipeline == null) { throw new IllegalArgumentException("Pipeline " + pipelineId + " is not defined"); diff --git a/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java b/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java index 99c6ce972de23..1664bbc7dd159 100644 --- a/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java +++ b/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java @@ -11,7 +11,9 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.TotalHits; +import org.junit.AfterClass; import org.junit.Before; +import org.junit.BeforeClass; import org.opensearch.OpenSearchParseException; import org.opensearch.ResourceNotFoundException; import org.opensearch.Version; @@ -27,9 +29,11 @@ import org.opensearch.cluster.metadata.Metadata; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.SuppressForbidden; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.query.TermQueryBuilder; @@ -54,7 +58,18 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +@SuppressForbidden(reason = "feature flag overrides") public class SearchPipelineServiceTests extends OpenSearchTestCase { + @BeforeClass + public static void enableFeature() { + System.setProperty(FeatureFlags.SEARCH_PIPELINE, "true"); + } + + @AfterClass + public static void disableFeature() { + System.setProperty(FeatureFlags.SEARCH_PIPELINE, "false"); + } + private static final SearchPipelinePlugin DUMMY_PLUGIN = new SearchPipelinePlugin() { @Override public Map getProcessors(Processor.Parameters parameters) { From 8e1ba278c61d4af7de1078f913aa86525c413857 Mon Sep 17 00:00:00 2001 From: Michael Froh Date: Wed, 5 Apr 2023 17:15:38 +0000 Subject: [PATCH 12/14] More feature flag fixes for search pipeline testing - Don't use system properties for SearchPipelineServiceTests. - Enable feature flag for multinode smoke tests. Signed-off-by: Michael Froh --- qa/smoke-test-multinode/build.gradle | 1 + .../pipeline/SearchPipelineService.java | 16 +++++++++++++--- .../pipeline/SearchPipelineServiceTests.java | 19 ++++--------------- 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/qa/smoke-test-multinode/build.gradle b/qa/smoke-test-multinode/build.gradle index 25261f5e3ff7d..8f13e4bf8736e 100644 --- a/qa/smoke-test-multinode/build.gradle +++ b/qa/smoke-test-multinode/build.gradle @@ -43,6 +43,7 @@ File repo = file("$buildDir/testclusters/repo") testClusters.integTest { numberOfNodes = 2 setting 'path.repo', repo.absolutePath + setting 'opensearch.experimental.feature.search_pipeline.enabled', 'true' } integTest { diff --git a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java index 5f30f7d9a0127..95ae2b9c81caf 100644 --- a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java +++ b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java @@ -79,6 +79,8 @@ public class SearchPipelineService implements ClusterStateApplier, ReportingServ private final NamedWriteableRegistry namedWriteableRegistry; private volatile ClusterState state; + private boolean forceEnabled = false; + public SearchPipelineService( ClusterService clusterService, ThreadPool threadPool, @@ -212,7 +214,7 @@ public void putPipeline( PutSearchPipelineRequest request, ActionListener listener ) throws Exception { - if (FeatureFlags.isEnabled(FeatureFlags.SEARCH_PIPELINE) == false) { + if (isFeatureEnabled() == false) { throw new IllegalArgumentException("Experimental search pipeline feature is not enabled"); } @@ -330,7 +332,7 @@ static ClusterState innerDelete(DeleteSearchPipelineRequest request, ClusterStat public SearchRequest transformRequest(SearchRequest originalRequest) { String pipelineId = originalRequest.pipeline(); - if (pipelineId != null && FeatureFlags.isEnabled(FeatureFlags.SEARCH_PIPELINE)) { + if (pipelineId != null && isFeatureEnabled()) { PipelineHolder pipeline = pipelines.get(pipelineId); if (pipeline == null) { throw new IllegalArgumentException("Pipeline " + pipelineId + " is not defined"); @@ -353,7 +355,7 @@ public SearchRequest transformRequest(SearchRequest originalRequest) { public SearchResponse transformResponse(SearchRequest request, SearchResponse searchResponse) { String pipelineId = request.pipeline(); - if (pipelineId != null && FeatureFlags.isEnabled(FeatureFlags.SEARCH_PIPELINE)) { + if (pipelineId != null && isFeatureEnabled()) { PipelineHolder pipeline = pipelines.get(pipelineId); if (pipeline == null) { throw new IllegalArgumentException("Pipeline " + pipelineId + " is not defined"); @@ -426,4 +428,12 @@ static class PipelineHolder { this.pipeline = Objects.requireNonNull(pipeline); } } + + private boolean isFeatureEnabled() { + return forceEnabled || FeatureFlags.isEnabled(FeatureFlags.SEARCH_PIPELINE); + } + + void setForceEnabled(boolean forceEnabled) { + this.forceEnabled = forceEnabled; + } } diff --git a/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java b/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java index 1664bbc7dd159..c0748edfb3efb 100644 --- a/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java +++ b/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java @@ -11,9 +11,7 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.TotalHits; -import org.junit.AfterClass; import org.junit.Before; -import org.junit.BeforeClass; import org.opensearch.OpenSearchParseException; import org.opensearch.ResourceNotFoundException; import org.opensearch.Version; @@ -29,11 +27,9 @@ import org.opensearch.cluster.metadata.Metadata; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.SuppressForbidden; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.query.TermQueryBuilder; @@ -58,17 +54,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -@SuppressForbidden(reason = "feature flag overrides") public class SearchPipelineServiceTests extends OpenSearchTestCase { - @BeforeClass - public static void enableFeature() { - System.setProperty(FeatureFlags.SEARCH_PIPELINE, "true"); - } - - @AfterClass - public static void disableFeature() { - System.setProperty(FeatureFlags.SEARCH_PIPELINE, "false"); - } private static final SearchPipelinePlugin DUMMY_PLUGIN = new SearchPipelinePlugin() { @Override @@ -137,6 +123,7 @@ public void testExecuteSearchPipelineDoesNotExist() { List.of(DUMMY_PLUGIN), client ); + searchPipelineService.setForceEnabled(true); final SearchRequest searchRequest = new SearchRequest("_index").pipeline("bar"); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -232,7 +219,7 @@ private SearchPipelineService createWithProcessors(Map getProcessors(Processor.Parameters paramet }), client ); + searchPipelineService.setForceEnabled(true); + return searchPipelineService; } public void testUpdatePipelines() { From f49542cacb7dff3dca8ade20c8c9af71719b3770 Mon Sep 17 00:00:00 2001 From: Michael Froh Date: Wed, 5 Apr 2023 17:53:22 +0000 Subject: [PATCH 13/14] Move feature flag into constructor parameter Thanks for the suggestion, @reta! Signed-off-by: Michael Froh --- .../main/java/org/opensearch/node/Node.java | 4 +++- .../pipeline/SearchPipelineService.java | 21 +++++++------------ .../pipeline/SearchPipelineServiceTests.java | 14 +++++++------ .../snapshots/SnapshotResiliencyTests.java | 3 ++- 4 files changed, 20 insertions(+), 22 deletions(-) diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index 4488dfd8dad29..fbcbd75c1fded 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -247,6 +247,7 @@ import static java.util.stream.Collectors.toList; import static org.opensearch.common.util.FeatureFlags.REPLICATION_TYPE; +import static org.opensearch.common.util.FeatureFlags.SEARCH_PIPELINE; import static org.opensearch.env.NodeEnvironment.collectFileCacheDataPath; import static org.opensearch.index.ShardIndexingPressureSettings.SHARD_INDEXING_PRESSURE_ENABLED_ATTRIBUTE_KEY; @@ -979,7 +980,8 @@ protected Node( xContentRegistry, namedWriteableRegistry, pluginsService.filterPlugins(SearchPipelinePlugin.class), - client + client, + FeatureFlags.isEnabled(SEARCH_PIPELINE) ); this.nodeService = new NodeService( settings, diff --git a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java index 95ae2b9c81caf..a77523649ec53 100644 --- a/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java +++ b/server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java @@ -36,7 +36,6 @@ import org.opensearch.common.regex.Regex; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.CollectionUtils; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; @@ -79,7 +78,7 @@ public class SearchPipelineService implements ClusterStateApplier, ReportingServ private final NamedWriteableRegistry namedWriteableRegistry; private volatile ClusterState state; - private boolean forceEnabled = false; + private final boolean isEnabled; public SearchPipelineService( ClusterService clusterService, @@ -90,7 +89,8 @@ public SearchPipelineService( NamedXContentRegistry namedXContentRegistry, NamedWriteableRegistry namedWriteableRegistry, List searchPipelinePlugins, - Client client + Client client, + boolean isEnabled ) { this.clusterService = clusterService; this.scriptService = scriptService; @@ -113,6 +113,7 @@ public SearchPipelineService( ); putPipelineTaskKey = clusterService.registerClusterManagerTask(ClusterManagerTaskKeys.PUT_SEARCH_PIPELINE_KEY, true); deletePipelineTaskKey = clusterService.registerClusterManagerTask(ClusterManagerTaskKeys.DELETE_SEARCH_PIPELINE_KEY, true); + this.isEnabled = isEnabled; } private static Map processorFactories( @@ -214,7 +215,7 @@ public void putPipeline( PutSearchPipelineRequest request, ActionListener listener ) throws Exception { - if (isFeatureEnabled() == false) { + if (isEnabled == false) { throw new IllegalArgumentException("Experimental search pipeline feature is not enabled"); } @@ -332,7 +333,7 @@ static ClusterState innerDelete(DeleteSearchPipelineRequest request, ClusterStat public SearchRequest transformRequest(SearchRequest originalRequest) { String pipelineId = originalRequest.pipeline(); - if (pipelineId != null && isFeatureEnabled()) { + if (pipelineId != null && isEnabled) { PipelineHolder pipeline = pipelines.get(pipelineId); if (pipeline == null) { throw new IllegalArgumentException("Pipeline " + pipelineId + " is not defined"); @@ -355,7 +356,7 @@ public SearchRequest transformRequest(SearchRequest originalRequest) { public SearchResponse transformResponse(SearchRequest request, SearchResponse searchResponse) { String pipelineId = request.pipeline(); - if (pipelineId != null && isFeatureEnabled()) { + if (pipelineId != null && isEnabled) { PipelineHolder pipeline = pipelines.get(pipelineId); if (pipeline == null) { throw new IllegalArgumentException("Pipeline " + pipelineId + " is not defined"); @@ -428,12 +429,4 @@ static class PipelineHolder { this.pipeline = Objects.requireNonNull(pipeline); } } - - private boolean isFeatureEnabled() { - return forceEnabled || FeatureFlags.isEnabled(FeatureFlags.SEARCH_PIPELINE); - } - - void setForceEnabled(boolean forceEnabled) { - this.forceEnabled = forceEnabled; - } } diff --git a/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java b/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java index c0748edfb3efb..239ec79b91082 100644 --- a/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java +++ b/server/src/test/java/org/opensearch/search/pipeline/SearchPipelineServiceTests.java @@ -84,7 +84,8 @@ public void testSearchPipelinePlugin() { this.xContentRegistry(), this.writableRegistry(), List.of(DUMMY_PLUGIN), - client + client, + false ); Map factories = searchPipelineService.getProcessorFactories(); assertEquals(1, factories.size()); @@ -104,7 +105,8 @@ public void testSearchPipelinePluginDuplicate() { this.xContentRegistry(), this.writableRegistry(), List.of(DUMMY_PLUGIN, DUMMY_PLUGIN), - client + client, + false ) ); assertTrue(e.getMessage(), e.getMessage().contains(" already registered")); @@ -121,9 +123,9 @@ public void testExecuteSearchPipelineDoesNotExist() { this.xContentRegistry(), this.writableRegistry(), List.of(DUMMY_PLUGIN), - client + client, + true ); - searchPipelineService.setForceEnabled(true); final SearchRequest searchRequest = new SearchRequest("_index").pipeline("bar"); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -233,9 +235,9 @@ public Map getProcessors(Processor.Parameters paramet return processors; } }), - client + client, + true ); - searchPipelineService.setForceEnabled(true); return searchPipelineService; } diff --git a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java index a1181f64e060d..f218895754b7f 100644 --- a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java @@ -2101,7 +2101,8 @@ public void onFailure(final Exception e) { namedXContentRegistry, namedWriteableRegistry, List.of(), - client + client, + false ) ) ); From 402e8b666e8ef792faed9ad7305301a6b469b5c0 Mon Sep 17 00:00:00 2001 From: Michael Froh Date: Wed, 5 Apr 2023 18:45:57 +0000 Subject: [PATCH 14/14] Move REST handlers behind feature flag Signed-off-by: Michael Froh --- .../src/main/java/org/opensearch/action/ActionModule.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/opensearch/action/ActionModule.java b/server/src/main/java/org/opensearch/action/ActionModule.java index 8812bdc4e3186..7346ccc2e3829 100644 --- a/server/src/main/java/org/opensearch/action/ActionModule.java +++ b/server/src/main/java/org/opensearch/action/ActionModule.java @@ -931,9 +931,11 @@ public void initRestHandlers(Supplier nodesInCluster) { registerHandler.accept(new RestDeleteDecommissionStateAction()); // Search pipelines API - registerHandler.accept(new RestPutSearchPipelineAction()); - registerHandler.accept(new RestGetSearchPipelineAction()); - registerHandler.accept(new RestDeleteSearchPipelineAction()); + if (FeatureFlags.isEnabled(FeatureFlags.SEARCH_PIPELINE)) { + registerHandler.accept(new RestPutSearchPipelineAction()); + registerHandler.accept(new RestGetSearchPipelineAction()); + registerHandler.accept(new RestDeleteSearchPipelineAction()); + } for (ActionPlugin plugin : actionPlugins) { for (RestHandler handler : plugin.getRestHandlers(