diff --git a/build.gradle b/build.gradle index e72c10f9376c0..4fe69fb0acd8e 100644 --- a/build.gradle +++ b/build.gradle @@ -480,25 +480,17 @@ task run(type: Run) { impliesSubProjects = true } -task wrapper(type: Wrapper) - -gradle.projectsEvaluated { - - allprojects { - tasks.withType(Wrapper) { Wrapper wrapper -> - wrapper.distributionType = DistributionType.ALL - - wrapper.doLast { +wrapper { + distributionType = DistributionType.ALL + doLast { final DistributionLocator locator = new DistributionLocator() final GradleVersion version = GradleVersion.version(wrapper.gradleVersion) final URI distributionUri = locator.getDistributionFor(version, wrapper.distributionType.name().toLowerCase(Locale.ENGLISH)) final URI sha256Uri = new URI(distributionUri.toString() + ".sha256") final String sha256Sum = new String(sha256Uri.toURL().bytes) wrapper.getPropertiesFile() << "distributionSha256Sum=${sha256Sum}\n" - } + println "Added checksum to wrapper properties" } - } - } static void assertLinesInFile(final Path path, final List expectedLines) { @@ -585,7 +577,7 @@ if (System.properties.get("build.compare") != null) { } } sourceBuild { - gradleVersion = "4.7" // does not default to gradle weapper of project dir, but current version + gradleVersion = "4.8.1" // does not default to gradle weapper of project dir, but current version projectDir = referenceProject tasks = ["clean", "assemble"] arguments = ["-Dbuild.compare_friendly=true"] diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index eb94420b43d1e..0123abd7ce104 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -471,6 +471,24 @@ class BuildPlugin implements Plugin { /**Configuration generation of maven poms. */ public static void configurePomGeneration(Project project) { + // Only works with `enableFeaturePreview('STABLE_PUBLISHING')` + // https://github.com/gradle/gradle/issues/5696#issuecomment-396965185 + project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom generatePOMTask -> + // The GenerateMavenPom task is aggressive about setting the destination, instead of fighting it, + // just make a copy. + doLast { + project.copy { + from generatePOMTask.destination + into "${project.buildDir}/distributions" + rename { "${project.archivesBaseName}-${project.version}.pom" } + } + } + // build poms with assemble (if the assemble task exists) + Task assemble = project.tasks.findByName('assemble') + if (assemble) { + assemble.dependsOn(generatePOMTask) + } + } project.plugins.withType(MavenPublishPlugin.class).whenPluginAdded { project.publishing { publications { @@ -480,20 +498,6 @@ class BuildPlugin implements Plugin { } } } - - // Work around Gradle 4.8 issue until we `enableFeaturePreview('STABLE_PUBLISHING')` - // https://github.com/gradle/gradle/issues/5696#issuecomment-396965185 - project.getGradle().getTaskGraph().whenReady { - project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom t -> - // place the pom next to the jar it is for - t.destination = new File(project.buildDir, "distributions/${project.archivesBaseName}-${project.version}.pom") - // build poms with assemble (if the assemble task exists) - Task assemble = project.tasks.findByName('assemble') - if (assemble) { - assemble.dependsOn(t) - } - } - } } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index 28008f4313c97..16dde0dde6a85 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -157,16 +157,18 @@ public class PluginBuildPlugin extends BuildPlugin { /** Adds a task to move jar and associated files to a "-client" name. */ protected static void addClientJarTask(Project project) { Task clientJar = project.tasks.create('clientJar') - clientJar.dependsOn(project.jar, 'generatePomFileForClientJarPublication', project.javadocJar, project.sourcesJar) + clientJar.dependsOn(project.jar, project.tasks.generatePomFileForClientJarPublication, project.javadocJar, project.sourcesJar) clientJar.doFirst { Path jarFile = project.jar.outputs.files.singleFile.toPath() String clientFileName = jarFile.fileName.toString().replace(project.version, "client-${project.version}") Files.copy(jarFile, jarFile.resolveSibling(clientFileName), StandardCopyOption.REPLACE_EXISTING) - String pomFileName = jarFile.fileName.toString().replace('.jar', '.pom') String clientPomFileName = clientFileName.replace('.jar', '.pom') - Files.copy(jarFile.resolveSibling(pomFileName), jarFile.resolveSibling(clientPomFileName), - StandardCopyOption.REPLACE_EXISTING) + Files.copy( + project.tasks.generatePomFileForClientJarPublication.outputs.files.singleFile.toPath(), + jarFile.resolveSibling(clientPomFileName), + StandardCopyOption.REPLACE_EXISTING + ) String sourcesFileName = jarFile.fileName.toString().replace('.jar', '-sources.jar') String clientSourcesFileName = clientFileName.replace('.jar', '-sources.jar') diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy index c48dc890ab080..390821c80ff39 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy @@ -29,6 +29,7 @@ import org.gradle.api.Plugin import org.gradle.api.Project import org.gradle.api.Task import org.gradle.api.plugins.JavaBasePlugin +import org.gradle.api.tasks.compile.JavaCompile /** * Configures the build to compile tests against Elasticsearch's test framework @@ -61,5 +62,12 @@ public class StandaloneRestTestPlugin implements Plugin { PrecommitTasks.create(project, false) project.check.dependsOn(project.precommit) + + project.tasks.withType(JavaCompile) { + // This will be the default in Gradle 5.0 + if (options.compilerArgs.contains("-processor") == false) { + options.compilerArgs << '-proc:none' + } + } } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy index 89cabd999f056..6bfe835bf39aa 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy @@ -526,7 +526,11 @@ class VagrantTestPlugin implements Plugin { project.gradle.removeListener(batsPackagingReproListener) } if (project.extensions.esvagrant.boxes.contains(box)) { - packagingTest.dependsOn(batsPackagingTest) + // these tests are temporarily disabled for suse boxes while we debug an issue + // https://github.com/elastic/elasticsearch/issues/30295 + if (box.equals("opensuse-42") == false && box.equals("sles-12") == false) { + packagingTest.dependsOn(batsPackagingTest) + } } } diff --git a/client/benchmark/build.gradle b/client/benchmark/build.gradle index 9ca53292a4956..8a488d197387c 100644 --- a/client/benchmark/build.gradle +++ b/client/benchmark/build.gradle @@ -24,7 +24,7 @@ buildscript { } } dependencies { - classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.2' + classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4' } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java index 1e25a40b0084a..2091992140cf2 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java @@ -23,6 +23,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsRequest; +import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsResponse; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.rest.RestStatus; @@ -100,6 +102,35 @@ public void putSettingsAsync(ClusterUpdateSettingsRequest clusterUpdateSettingsR ClusterUpdateSettingsResponse::fromXContent, listener, emptySet(), headers); } + /** + * Get the cluster wide settings using the Cluster Get Settings API. + * See Cluster Get Settings + * API on elastic.co + * @param clusterGetSettingsRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public ClusterGetSettingsResponse getSettings(ClusterGetSettingsRequest clusterGetSettingsRequest, RequestOptions options) + throws IOException { + return restHighLevelClient.performRequestAndParseEntity(clusterGetSettingsRequest, RequestConverters::clusterGetSettings, + options, ClusterGetSettingsResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously get the cluster wide settings using the Cluster Get Settings API. + * See Cluster Get Settings + * API on elastic.co + * @param clusterGetSettingsRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void getSettingsAsync(ClusterGetSettingsRequest clusterGetSettingsRequest, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(clusterGetSettingsRequest, RequestConverters::clusterGetSettings, + options, ClusterGetSettingsResponse::fromXContent, listener, emptySet()); + } + /** * Get cluster health using the Cluster Health API. * See diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java index 5c51aa17eec47..be2cb89162a9b 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java @@ -24,6 +24,8 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; @@ -1182,4 +1184,32 @@ public void getTemplateAsync(GetIndexTemplatesRequest getIndexTemplatesRequest, restHighLevelClient.performRequestAsyncAndParseEntity(getIndexTemplatesRequest, RequestConverters::getTemplates, options, GetIndexTemplatesResponse::fromXContent, listener, emptySet()); } + + /** + * Calls the analyze API + * + * See Analyze API on elastic.co + * + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + */ + public AnalyzeResponse analyze(AnalyzeRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::analyze, options, + AnalyzeResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously calls the analyze API + * + * See Analyze API on elastic.co + * + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void analyzeAsync(AnalyzeRequest request, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::analyze, options, + AnalyzeResponse::fromXContent, listener, emptySet()); + } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 7e4520c2d0a10..dbf5851e39507 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -36,6 +36,7 @@ import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; +import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; @@ -43,6 +44,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; @@ -613,7 +615,7 @@ static Request searchTemplate(SearchTemplateRequest searchTemplateRequest) throw request.setEntity(createEntity(searchTemplateRequest, REQUEST_BODY_CONTENT_TYPE)); return request; } - + static Request multiSearchTemplate(MultiSearchTemplateRequest multiSearchTemplateRequest) throws IOException { Request request = new Request(HttpPost.METHOD_NAME, "/_msearch/template"); @@ -627,7 +629,7 @@ static Request multiSearchTemplate(MultiSearchTemplateRequest multiSearchTemplat byte[] source = MultiSearchTemplateRequest.writeMultiLineFormat(multiSearchTemplateRequest, xContent); request.setEntity(new ByteArrayEntity(source, createContentType(xContent.type()))); return request; - } + } static Request existsAlias(GetAliasesRequest getAliasesRequest) { if ((getAliasesRequest.indices() == null || getAliasesRequest.indices().length == 0) && @@ -719,6 +721,17 @@ static Request clusterPutSettings(ClusterUpdateSettingsRequest clusterUpdateSett return request; } + static Request clusterGetSettings(ClusterGetSettingsRequest clusterGetSettingsRequest) throws IOException { + Request request = new Request(HttpGet.METHOD_NAME, "/_cluster/settings"); + + Params parameters = new Params(request); + parameters.withLocal(clusterGetSettingsRequest.local()); + parameters.withIncludeDefaults(clusterGetSettingsRequest.includeDefaults()); + parameters.withMasterTimeout(clusterGetSettingsRequest.masterNodeTimeout()); + + return request; + } + static Request getPipeline(GetPipelineRequest getPipelineRequest) { String endpoint = new EndpointBuilder() .addPathPartAsIs("_ingest/pipeline") @@ -997,6 +1010,18 @@ static Request getAlias(GetAliasesRequest getAliasesRequest) { return request; } + static Request analyze(AnalyzeRequest request) throws IOException { + EndpointBuilder builder = new EndpointBuilder(); + String index = request.index(); + if (index != null) { + builder.addPathPart(index); + } + builder.addPathPartAsIs("_analyze"); + Request req = new Request(HttpGet.METHOD_NAME, builder.build()); + req.setEntity(createEntity(request, REQUEST_BODY_CONTENT_TYPE)); + return req; + } + static Request getScript(GetStoredScriptRequest getStoredScriptRequest) { String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(getStoredScriptRequest.id()).build(); Request request = new Request(HttpGet.METHOD_NAME, endpoint); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java index 8e31d32ac7b3d..b0b23ed9fdd46 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java @@ -22,6 +22,8 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsRequest; +import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsResponse; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.cluster.health.ClusterHealthStatus; @@ -42,6 +44,7 @@ import static java.util.Collections.emptyMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -115,6 +118,46 @@ public void testClusterUpdateSettingNonExistent() { "Elasticsearch exception [type=illegal_argument_exception, reason=transient setting [" + setting + "], not recognized]")); } + public void testClusterGetSettings() throws IOException { + final String transientSettingKey = RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(); + final int transientSettingValue = 10; + + final String persistentSettingKey = EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(); + final String persistentSettingValue = EnableAllocationDecider.Allocation.NONE.name(); + + Settings transientSettings = + Settings.builder().put(transientSettingKey, transientSettingValue, ByteSizeUnit.BYTES).build(); + Settings persistentSettings = Settings.builder().put(persistentSettingKey, persistentSettingValue).build(); + clusterUpdateSettings(persistentSettings, transientSettings); + + ClusterGetSettingsRequest request = new ClusterGetSettingsRequest(); + ClusterGetSettingsResponse response = execute( + request, highLevelClient().cluster()::getSettings, highLevelClient().cluster()::getSettingsAsync); + assertEquals(persistentSettings, response.getPersistentSettings()); + assertEquals(transientSettings, response.getTransientSettings()); + assertEquals(0, response.getDefaultSettings().size()); + } + + public void testClusterGetSettingsWithDefault() throws IOException { + final String transientSettingKey = RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(); + final int transientSettingValue = 10; + + final String persistentSettingKey = EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(); + final String persistentSettingValue = EnableAllocationDecider.Allocation.NONE.name(); + + Settings transientSettings = + Settings.builder().put(transientSettingKey, transientSettingValue, ByteSizeUnit.BYTES).build(); + Settings persistentSettings = Settings.builder().put(persistentSettingKey, persistentSettingValue).build(); + clusterUpdateSettings(persistentSettings, transientSettings); + + ClusterGetSettingsRequest request = new ClusterGetSettingsRequest().includeDefaults(true); + ClusterGetSettingsResponse response = execute( + request, highLevelClient().cluster()::getSettings, highLevelClient().cluster()::getSettingsAsync); + assertEquals(persistentSettings, response.getPersistentSettings()); + assertEquals(transientSettings, response.getTransientSettings()); + assertThat(response.getDefaultSettings().size(), greaterThan(0)); + } + public void testClusterHealthGreen() throws IOException { ClusterHealthRequest request = new ClusterHealthRequest(); request.timeout("5s"); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java index d9d57a49b4f8a..5f2b191a66024 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java @@ -21,9 +21,11 @@ import org.apache.http.Header; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.ingest.Pipeline; @@ -164,4 +166,12 @@ protected static void createPipeline(String pipelineId) throws IOException { protected static void createPipeline(PutPipelineRequest putPipelineRequest) throws IOException { assertOK(client().performRequest(RequestConverters.putPipeline(putPipelineRequest))); } + + protected static void clusterUpdateSettings(Settings persistentSettings, + Settings transientSettings) throws IOException { + ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest(); + request.persistentSettings(persistentSettings); + request.transientSettings(transientSettings); + assertOK(client().performRequest(RequestConverters.clusterPutSettings(request))); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/GetAliasesResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/GetAliasesResponseTests.java index 5f3354ad2b95d..c5bc74e7517c3 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/GetAliasesResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/GetAliasesResponseTests.java @@ -59,7 +59,7 @@ private static Map> createIndicesAliasesMap(int min, return map; } - private static AliasMetaData createAliasMetaData() { + public static AliasMetaData createAliasMetaData() { AliasMetaData.Builder builder = AliasMetaData.builder(randomAlphaOfLengthBetween(3, 10)); if (randomBoolean()) { builder.routing(randomAlphaOfLengthBetween(3, 10)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index ba910f91dc855..f94f8776ff1a2 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -29,6 +29,8 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; @@ -1320,4 +1322,20 @@ public void testGetIndexTemplate() throws Exception { new GetIndexTemplatesRequest().names("the-template-*"), client.indices()::getTemplate, client.indices()::getTemplateAsync)); assertThat(notFound.status(), equalTo(RestStatus.NOT_FOUND)); } + + public void testAnalyze() throws Exception { + + RestHighLevelClient client = highLevelClient(); + + AnalyzeRequest noindexRequest = new AnalyzeRequest().text("One two three").analyzer("english"); + AnalyzeResponse noindexResponse = execute(noindexRequest, client.indices()::analyze, client.indices()::analyzeAsync); + + assertThat(noindexResponse.getTokens(), hasSize(3)); + + AnalyzeRequest detailsRequest = new AnalyzeRequest().text("One two three").analyzer("english").explain(true); + AnalyzeResponse detailsResponse = execute(detailsRequest, client.indices()::analyze, client.indices()::analyzeAsync); + + assertNotNull(detailsResponse.detail()); + + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 33ba303187e2a..fc34fafc212d4 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; +import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; @@ -45,6 +46,7 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; @@ -1402,42 +1404,42 @@ public void testRenderSearchTemplate() throws Exception { assertEquals(Collections.emptyMap(), request.getParameters()); assertToXContentBody(searchTemplateRequest, request.getEntity()); } - + public void testMultiSearchTemplate() throws Exception { final int numSearchRequests = randomIntBetween(1, 10); MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest(); - + for (int i = 0; i < numSearchRequests; i++) { // Create a random request. String[] indices = randomIndicesNames(0, 5); SearchRequest searchRequest = new SearchRequest(indices); - + Map expectedParams = new HashMap<>(); setRandomSearchParams(searchRequest, expectedParams); - + // scroll is not supported in the current msearch or msearchtemplate api, so unset it: searchRequest.scroll((Scroll) null); // batched reduce size is currently not set-able on a per-request basis as it is a query string parameter only searchRequest.setBatchedReduceSize(SearchRequest.DEFAULT_BATCHED_REDUCE_SIZE); - + setRandomIndicesOptions(searchRequest::indicesOptions, searchRequest::indicesOptions, expectedParams); - + SearchTemplateRequest searchTemplateRequest = new SearchTemplateRequest(searchRequest); - + searchTemplateRequest.setScript("{\"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" }}}"); searchTemplateRequest.setScriptType(ScriptType.INLINE); searchTemplateRequest.setProfile(randomBoolean()); - + Map scriptParams = new HashMap<>(); scriptParams.put("field", "name"); scriptParams.put("value", randomAlphaOfLengthBetween(2, 5)); searchTemplateRequest.setScriptParams(scriptParams); - - multiSearchTemplateRequest.add(searchTemplateRequest); + + multiSearchTemplateRequest.add(searchTemplateRequest); } Request multiRequest = RequestConverters.multiSearchTemplate(multiSearchTemplateRequest); - + assertEquals(HttpPost.METHOD_NAME, multiRequest.getMethod()); assertEquals("/_msearch/template", multiRequest.getEndpoint()); List searchRequests = multiSearchTemplateRequest.requests(); @@ -1446,9 +1448,9 @@ public void testMultiSearchTemplate() throws Exception { HttpEntity actualEntity = multiRequest.getEntity(); byte[] expectedBytes = MultiSearchTemplateRequest.writeMultiLineFormat(multiSearchTemplateRequest, XContentType.JSON.xContent()); assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), actualEntity.getContentType().getValue()); - assertEquals(new BytesArray(expectedBytes), new BytesArray(EntityUtils.toByteArray(actualEntity))); + assertEquals(new BytesArray(expectedBytes), new BytesArray(EntityUtils.toByteArray(actualEntity))); } - + public void testExistsAlias() { GetAliasesRequest getAliasesRequest = new GetAliasesRequest(); String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5); @@ -1667,6 +1669,21 @@ public void testClusterPutSettings() throws IOException { assertEquals(expectedParams, expectedRequest.getParameters()); } + public void testClusterGetSettings() throws IOException { + ClusterGetSettingsRequest request = new ClusterGetSettingsRequest(); + Map expectedParams = new HashMap<>(); + setRandomMasterTimeout(request, expectedParams); + request.includeDefaults(randomBoolean()); + if (request.includeDefaults()) { + expectedParams.put("include_defaults", String.valueOf(true)); + } + + Request expectedRequest = RequestConverters.clusterGetSettings(request); + assertEquals("/_cluster/settings", expectedRequest.getEndpoint()); + assertEquals(HttpGet.METHOD_NAME, expectedRequest.getMethod()); + assertEquals(expectedParams, expectedRequest.getParameters()); + } + public void testPutPipeline() throws IOException { String pipelineId = "some_pipeline_id"; PutPipelineRequest request = new PutPipelineRequest( @@ -2203,6 +2220,22 @@ public void testGetTemplateRequest() throws Exception { assertThat(request.getEntity(), nullValue()); } + public void testAnalyzeRequest() throws Exception { + AnalyzeRequest indexAnalyzeRequest = new AnalyzeRequest() + .text("Here is some text") + .index("test_index") + .analyzer("test_analyzer"); + + Request request = RequestConverters.analyze(indexAnalyzeRequest); + assertThat(request.getEndpoint(), equalTo("/test_index/_analyze")); + assertToXContentBody(indexAnalyzeRequest, request.getEntity()); + + AnalyzeRequest analyzeRequest = new AnalyzeRequest() + .text("more text") + .analyzer("test_analyzer"); + assertThat(RequestConverters.analyze(analyzeRequest).getEndpoint(), equalTo("/_analyze")); + } + public void testGetScriptRequest() { GetStoredScriptRequest getStoredScriptRequest = new GetStoredScriptRequest("x-script"); Map expectedParams = new HashMap<>(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java index 84a124f764b38..dedd50096f826 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java @@ -23,6 +23,8 @@ import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsRequest; +import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsResponse; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; @@ -49,6 +51,7 @@ import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.notNullValue; /** @@ -189,6 +192,71 @@ public void onFailure(Exception e) { } } + public void testClusterGetSettings() throws IOException { + RestHighLevelClient client = highLevelClient(); + + // tag::get-settings-request + ClusterGetSettingsRequest request = new ClusterGetSettingsRequest(); + // end::get-settings-request + + // tag::get-settings-request-includeDefaults + request.includeDefaults(true); // <1> + // end::get-settings-request-includeDefaults + + // tag::get-settings-request-local + request.local(true); // <1> + // end::get-settings-request-local + + // tag::get-settings-request-masterTimeout + request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1> + request.masterNodeTimeout("1m"); // <2> + // end::get-settings-request-masterTimeout + + // tag::get-settings-execute + ClusterGetSettingsResponse response = client.cluster().getSettings(request, RequestOptions.DEFAULT); // <1> + // end::get-settings-execute + + // tag::get-settings-response + Settings persistentSettings = response.getPersistentSettings(); // <1> + Settings transientSettings = response.getTransientSettings(); // <2> + Settings defaultSettings = response.getDefaultSettings(); // <3> + String settingValue = response.getSetting("cluster.routing.allocation.enable"); // <4> + // end::get-settings-response + + assertThat(defaultSettings.size(), greaterThan(0)); + } + + public void testClusterGetSettingsAsync() throws InterruptedException { + RestHighLevelClient client = highLevelClient(); + + ClusterGetSettingsRequest request = new ClusterGetSettingsRequest(); + + // tag::get-settings-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(ClusterGetSettingsResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::get-settings-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::get-settings-execute-async + client.cluster().getSettingsAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::get-settings-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + public void testClusterHealth() throws IOException { RestHighLevelClient client = highLevelClient(); client.indices().create(new CreateIndexRequest("index"), RequestOptions.DEFAULT); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index d5bc5f96395a4..4fbee55c104c5 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -27,6 +27,9 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; +import org.elasticsearch.action.admin.indices.analyze.DetailAnalyzeResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; @@ -2315,4 +2318,127 @@ public void onFailure(Exception e) { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + + public void testAnalyze() throws IOException, InterruptedException { + + RestHighLevelClient client = highLevelClient(); + + { + // tag::analyze-builtin-request + AnalyzeRequest request = new AnalyzeRequest(); + request.text("Some text to analyze", "Some more text to analyze"); // <1> + request.analyzer("english"); // <2> + // end::analyze-builtin-request + } + + { + // tag::analyze-custom-request + AnalyzeRequest request = new AnalyzeRequest(); + request.text("Some text to analyze"); + request.addCharFilter("html_strip"); // <1> + request.tokenizer("standard"); // <2> + request.addTokenFilter("lowercase"); // <3> + + Map stopFilter = new HashMap<>(); + stopFilter.put("type", "stop"); + stopFilter.put("stopwords", new String[]{ "to" }); // <4> + request.addTokenFilter(stopFilter); // <5> + // end::analyze-custom-request + } + + { + // tag::analyze-custom-normalizer-request + AnalyzeRequest request = new AnalyzeRequest(); + request.text("BaR"); + request.addTokenFilter("lowercase"); + // end::analyze-custom-normalizer-request + + // tag::analyze-request-explain + request.explain(true); // <1> + request.attributes("keyword", "type"); // <2> + // end::analyze-request-explain + + // tag::analyze-request-sync + AnalyzeResponse response = client.indices().analyze(request, RequestOptions.DEFAULT); + // end::analyze-request-sync + + // tag::analyze-response-tokens + List tokens = response.getTokens(); // <1> + // end::analyze-response-tokens + // tag::analyze-response-detail + DetailAnalyzeResponse detail = response.detail(); // <1> + // end::analyze-response-detail + + assertNull(tokens); + assertNotNull(detail.tokenizer()); + } + + CreateIndexRequest req = new CreateIndexRequest("my_index"); + CreateIndexResponse resp = client.indices().create(req, RequestOptions.DEFAULT); + assertTrue(resp.isAcknowledged()); + + PutMappingRequest pmReq = new PutMappingRequest() + .indices("my_index") + .type("_doc") + .source("my_field", "type=text,analyzer=english"); + PutMappingResponse pmResp = client.indices().putMapping(pmReq, RequestOptions.DEFAULT); + assertTrue(pmResp.isAcknowledged()); + + { + // tag::analyze-index-request + AnalyzeRequest request = new AnalyzeRequest(); + request.index("my_index"); // <1> + request.analyzer("my_analyzer"); // <2> + request.text("some text to analyze"); + // end::analyze-index-request + + // tag::analyze-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(AnalyzeResponse analyzeTokens) { + + } + + @Override + public void onFailure(Exception e) { + + } + }; + // end::analyze-execute-listener + + // use a built-in analyzer in the test + request = new AnalyzeRequest(); + request.index("my_index"); + request.field("my_field"); + request.text("some text to analyze"); + // Use a blocking listener in the test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::analyze-request-async + client.indices().analyzeAsync(request, RequestOptions.DEFAULT, listener); + // end::analyze-request-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + + { + // tag::analyze-index-normalizer-request + AnalyzeRequest request = new AnalyzeRequest(); + request.index("my_index"); // <1> + request.normalizer("my_normalizer"); // <2> + request.text("some text to analyze"); + // end::analyze-index-normalizer-request + } + + { + // tag::analyze-field-request + AnalyzeRequest request = new AnalyzeRequest(); + request.index("my_index"); + request.field("my_field"); + request.text("some text to analyze"); + // end::analyze-field-request + } + + } } diff --git a/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc b/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc index b23a683b05610..5b68fa7be451f 100644 --- a/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc +++ b/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc @@ -13,8 +13,8 @@ Here is an example on how to create the aggregation request: -------------------------------------------------- ScriptedMetricAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") - .initScript(new Script("params._agg.heights = []")) - .mapScript(new Script("params._agg.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")); + .initScript(new Script("state.heights = []")) + .mapScript(new Script("state.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")); -------------------------------------------------- You can also specify a `combine` script which will be executed on each shard: @@ -23,9 +23,9 @@ You can also specify a `combine` script which will be executed on each shard: -------------------------------------------------- ScriptedMetricAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") - .initScript(new Script("params._agg.heights = []")) - .mapScript(new Script("params._agg.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")) - .combineScript(new Script("double heights_sum = 0.0; for (t in params._agg.heights) { heights_sum += t } return heights_sum")); + .initScript(new Script("state.heights = []")) + .mapScript(new Script("state.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")) + .combineScript(new Script("double heights_sum = 0.0; for (t in state.heights) { heights_sum += t } return heights_sum")); -------------------------------------------------- You can also specify a `reduce` script which will be executed on the node which gets the request: @@ -34,10 +34,10 @@ You can also specify a `reduce` script which will be executed on the node which -------------------------------------------------- ScriptedMetricAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") - .initScript(new Script("params._agg.heights = []")) - .mapScript(new Script("params._agg.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")) - .combineScript(new Script("double heights_sum = 0.0; for (t in params._agg.heights) { heights_sum += t } return heights_sum")) - .reduceScript(new Script("double heights_sum = 0.0; for (a in params._aggs) { heights_sum += a } return heights_sum")); + .initScript(new Script("state.heights = []")) + .mapScript(new Script("state.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")) + .combineScript(new Script("double heights_sum = 0.0; for (t in state.heights) { heights_sum += t } return heights_sum")) + .reduceScript(new Script("double heights_sum = 0.0; for (a in states) { heights_sum += a } return heights_sum")); -------------------------------------------------- diff --git a/docs/java-rest/high-level/cluster/get_settings.asciidoc b/docs/java-rest/high-level/cluster/get_settings.asciidoc new file mode 100644 index 0000000000000..999bd92d79117 --- /dev/null +++ b/docs/java-rest/high-level/cluster/get_settings.asciidoc @@ -0,0 +1,92 @@ +[[java-rest-high-cluster-get-settings]] +=== Cluster Get Settings API + +The Cluster Get Settings API allows to get the cluster wide settings. + +[[java-rest-high-cluster-get-settings-request]] +==== Cluster Get Settings Request + +A `ClusterGetSettingsRequest`: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-request] +-------------------------------------------------- + +==== Optional arguments +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-request-includeDefaults] +-------------------------------------------------- +<1> By default only those settings that were explicitly set are returned. Setting this to true also returns +the default settings. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-request-local] +-------------------------------------------------- +<1> By default the request goes to the master of the cluster to get the latest results. If local is specified it gets +the results from whichever node the request goes to. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-request-masterTimeout] +-------------------------------------------------- +<1> Timeout to connect to the master node as a `TimeValue` +<2> Timeout to connect to the master node as a `String` + +[[java-rest-high-cluster-get-settings-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-execute] +-------------------------------------------------- +<1> Execute the request and get back the response in a `ClusterGetSettingsResponse` object. + +[[java-rest-high-cluster-get-settings-async]] +==== Asynchronous Execution + +The asynchronous execution of a cluster get settings requires both the +`ClusterGetSettingsRequest` instance and an `ActionListener` instance to be +passed to the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-execute-async] +-------------------------------------------------- +<1> The `ClusterGetSettingsRequest` to execute and the `ActionListener` +to use when the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `ClusterGetSettingsResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of a failure. The raised exception is provided as an argument + +[[java-rest-high-cluster-get-settings-response]] +==== Cluster Get Settings Response + +The returned `ClusterGetSettingsResponse` allows to retrieve information about the +executed operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-response] +-------------------------------------------------- +<1> Get the persistent settings. +<2> Get the transient settings. +<3> Get the default settings (returns empty settings if `includeDefaults` was not set to `true`). +<4> Get the value as a `String` for a particular setting. The order of searching is first in `persistentSettings` then in +`transientSettings` and finally, if not found in either, in `defaultSettings`. diff --git a/docs/java-rest/high-level/indices/analyze.asciidoc b/docs/java-rest/high-level/indices/analyze.asciidoc new file mode 100644 index 0000000000000..4bffe2f020382 --- /dev/null +++ b/docs/java-rest/high-level/indices/analyze.asciidoc @@ -0,0 +1,119 @@ +[[java-rest-high-analyze]] +=== Analyze API + +[[java-rest-high-analyze-request]] +==== Analyze Request + +An `AnalyzeRequest` contains the text to analyze, and one of several options to +specify how the analysis should be performed. + +The simplest version uses a built-in analyzer: + +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-builtin-request] +--------------------------------------------------- +<1> The text to include. Multiple strings are treated as a multi-valued field +<2> A built-in analyzer + +You can configure a custom analyzer: +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-custom-request] +--------------------------------------------------- +<1> Configure char filters +<2> Configure the tokenizer +<3> Add a built-in tokenfilter +<4> Configuration for a custom tokenfilter +<5> Add the custom tokenfilter + +You can also build a custom normalizer, by including only charfilters and +tokenfilters: +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-custom-normalizer-request] +--------------------------------------------------- + +You can analyze text using an analyzer defined in an existing index: +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-index-request] +--------------------------------------------------- +<1> The index containing the mappings +<2> The analyzer defined on this index to use + +Or you can use a normalizer: +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-index-normalizer-request] +--------------------------------------------------- +<1> The index containing the mappings +<2> The normalizer defined on this index to use + +You can analyze text using the mappings for a particular field in an index: +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-field-request] +--------------------------------------------------- + +==== Optional arguemnts +The following arguments can also optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-request-explain] +--------------------------------------------------- +<1> Setting `explain` to true will add further details to the response +<2> Setting `attributes` allows you to return only token attributes that you are +interested in + +[[java-rest-high-analyze-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-request-sync] +--------------------------------------------------- + +[[java-rest-high-analyze-async]] +==== Asynchronous Execution + +The asynchronous execution of an analyze request requires both the `AnalyzeRequest` +instance and an `ActionListener` instance to be passed to the asyncronous method: + +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-request-async] +--------------------------------------------------- + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method if the +execution successfully completed or using the `onFailure` method if it failed. + +A typical listener for `AnalyzeResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-execute-listener] +--------------------------------------------------- + +[[java-rest-high-analyze-response]] +==== Analyze Response + +The returned `AnalyzeResponse` allows you to retrieve details of the analysis as +follows: +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-response-tokens] +--------------------------------------------------- +<1> `AnalyzeToken` holds information about the individual tokens produced by analysis + +If `explain` was set to `true`, then information is instead returned from the `detail()` +method: + +["source","java",subs="attributes,callouts,macros"] +--------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[analyze-response-detail] +--------------------------------------------------- +<1> `DetailAnalyzeResponse` holds more detailed information about tokens produced by +the various substeps in the analysis chain. \ No newline at end of file diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 7ff4b875902d4..2212ef59d3c63 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -81,6 +81,7 @@ Index Management:: Mapping Management:: * <> +* <> * <> Alias Management:: @@ -88,6 +89,7 @@ Alias Management:: * <> * <> +include::indices/analyze.asciidoc[] include::indices/create_index.asciidoc[] include::indices/delete_index.asciidoc[] include::indices/indices_exists.asciidoc[] @@ -118,9 +120,11 @@ include::indices/get_templates.asciidoc[] The Java High Level REST Client supports the following Cluster APIs: * <> +* <> * <> include::cluster/put_settings.asciidoc[] +include::cluster/get_settings.asciidoc[] include::cluster/health.asciidoc[] == Ingest APIs diff --git a/docs/painless/index.asciidoc b/docs/painless/index.asciidoc index abfd4d4f00abe..92e0a33bf1347 100644 --- a/docs/painless/index.asciidoc +++ b/docs/painless/index.asciidoc @@ -7,4 +7,6 @@ include::painless-getting-started.asciidoc[] include::painless-lang-spec.asciidoc[] -include::painless-api-reference.asciidoc[] +include::painless-contexts.asciidoc[] + +include::painless-api-reference.asciidoc[] \ No newline at end of file diff --git a/docs/painless/painless-contexts.asciidoc b/docs/painless/painless-contexts.asciidoc new file mode 100644 index 0000000000000..ff46f6bd74dde --- /dev/null +++ b/docs/painless/painless-contexts.asciidoc @@ -0,0 +1,58 @@ +[[painless-contexts]] +== Painless contexts + +:es_version: https://www.elastic.co/guide/en/elasticsearch/reference/master +:xp_version: https://www.elastic.co/guide/en/x-pack/current + +A Painless script is evaluated within a context. Each context has values that +are available as local variables, a whitelist that controls the available +classes, and the methods and fields within those classes (API), and +if and what type of value is returned. + +A Painless script is typically executed within one of the contexts in the table +below. Note this is not necessarily a comprehensive list as custom plugins and +specialized code may define new ways to use a Painless script. + +[options="header",cols="<1,<1,<1"] +|==== +| Name | Painless Documentation + | Elasticsearch Documentation +| Update | <> + | {es_version}/docs-update.html[Elasticsearch Documentation] +| Update by query | <> + | {es_version}/docs-update-by-query.html[Elasticsearch Documentation] +| Reindex | <> + | {es_version}/docs-reindex.html[Elasticsearch Documentation] +| Sort | <> + | {es_version}/search-request-sort.html[Elasticsearch Documentation] +| Similarity | <> + | {es_version}/index-modules-similarity.html[Elasticsearch Documentation] +| Weight | <> + | {es_version}/index-modules-similarity.html[Elasticsearch Documentation] +| Score | <> + | {es_version}/query-dsl-function-score-query.html[Elasticsearch Documentation] +| Field | <> + | {es_version}/search-request-script-fields.html[Elasticsearch Documentation] +| Filter | <> + | {es_version}/query-dsl-script-query.html[Elasticsearch Documentation] +| Minimum should match | <> + | {es_version}/query-dsl-terms-set-query.html[Elasticsearch Documentation] +| Metric aggregation initialization | <> + | {es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation] +| Metric aggregation map | <> + | {es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation] +| Metric aggregation combine | <> + | {es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation] +| Metric aggregation reduce | <> + | {es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation] +| Bucket aggregation | <> + | {es_version}/search-aggregations-pipeline-bucket-script-aggregation.html[Elasticsearch Documentation] +| Ingest processor | <> + | {es_version}/script-processor.html[Elasticsearch Documentation] +| Watcher condition | <> + | {xp_version}/condition-script.html[Elasticsearch Documentation] +| Watcher transform | <> + | {xp_version}/transform-script.html[Elasticsearch Documentation] +|==== + +include::painless-contexts/index.asciidoc[] diff --git a/docs/painless/painless-contexts/index.asciidoc b/docs/painless/painless-contexts/index.asciidoc new file mode 100644 index 0000000000000..64e4326e052f2 --- /dev/null +++ b/docs/painless/painless-contexts/index.asciidoc @@ -0,0 +1,35 @@ +include::painless-update-context.asciidoc[] + +include::painless-update-by-query-context.asciidoc[] + +include::painless-reindex-context.asciidoc[] + +include::painless-sort-context.asciidoc[] + +include::painless-similarity-context.asciidoc[] + +include::painless-weight-context.asciidoc[] + +include::painless-score-context.asciidoc[] + +include::painless-field-context.asciidoc[] + +include::painless-filter-context.asciidoc[] + +include::painless-min-should-match-context.asciidoc[] + +include::painless-metric-agg-init-context.asciidoc[] + +include::painless-metric-agg-map-context.asciidoc[] + +include::painless-metric-agg-combine-context.asciidoc[] + +include::painless-metric-agg-reduce-context.asciidoc[] + +include::painless-bucket-agg-context.asciidoc[] + +include::painless-ingest-processor-context.asciidoc[] + +include::painless-watcher-condition-context.asciidoc[] + +include::painless-watcher-transform-context.asciidoc[] diff --git a/docs/painless/painless-contexts/painless-bucket-agg-context.asciidoc b/docs/painless/painless-contexts/painless-bucket-agg-context.asciidoc new file mode 100644 index 0000000000000..b277055d87d8b --- /dev/null +++ b/docs/painless/painless-contexts/painless-bucket-agg-context.asciidoc @@ -0,0 +1,21 @@ +[[painless-bucket-agg-context]] +=== Bucket aggregation context + +Use a Painless script in an +{es_version}/search-aggregations-pipeline-bucket-script-aggregation.html[bucket aggregation] +to calculate a value as a result in a bucket. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. The parameters + include values defined as part of the `buckets_path`. + +*Return* + +numeric:: + The calculated value as the result. + +*API* + +The standard <> is available. \ No newline at end of file diff --git a/docs/painless/painless-contexts/painless-field-context.asciidoc b/docs/painless/painless-contexts/painless-field-context.asciidoc new file mode 100644 index 0000000000000..bf44703001bc0 --- /dev/null +++ b/docs/painless/painless-contexts/painless-field-context.asciidoc @@ -0,0 +1,31 @@ +[[painless-field-context]] +=== Field context + +Use a Painless script to create a +{es_version}/search-request-script-fields.html[script field] to return +a customized value for each document in the results of a query. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`doc` (`Map`, read-only):: + Contains the fields of the specified document where each field is a + `List` of values. + +{es_version}/mapping-source-field.html[`ctx['_source']`] (`Map`):: + Contains extracted JSON in a `Map` and `List` structure for the fields + existing in a stored document. + +`_score` (`double` read-only):: + The original score of the specified document. + +*Return* + +`Object`:: + The customized value for each document. + +*API* + +The standard <> is available. \ No newline at end of file diff --git a/docs/painless/painless-contexts/painless-filter-context.asciidoc b/docs/painless/painless-contexts/painless-filter-context.asciidoc new file mode 100644 index 0000000000000..ea0393893c882 --- /dev/null +++ b/docs/painless/painless-contexts/painless-filter-context.asciidoc @@ -0,0 +1,26 @@ +[[painless-filter-context]] +=== Filter context + +Use a Painless script as a {es_version}/query-dsl-script-query.html[filter] in a +query to include and exclude documents. + + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`doc` (`Map`, read-only):: + Contains the fields of the current document where each field is a + `List` of values. + +*Return* + +`boolean`:: + Return `true` if the current document should be returned as a result of + the query, and `false` otherwise. + + +*API* + +The standard <> is available. \ No newline at end of file diff --git a/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc b/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc new file mode 100644 index 0000000000000..ba3be0739631f --- /dev/null +++ b/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc @@ -0,0 +1,41 @@ +[[painless-ingest-processor-context]] +=== Ingest processor context + +Use a Painless script in an {es_version}/script-processor.html[ingest processor] +to modify documents upon insertion. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +{es_version}/mapping-index-field.html[`ctx['_index']`] (`String`):: + The name of the index. + +{es_version}/mapping-type-field.html[`ctx['_type']`] (`String`):: + The type of document within an index. + +`ctx` (`Map`):: + Contains extracted JSON in a `Map` and `List` structure for the fields + that are part of the document. + +*Side Effects* + +{es_version}/mapping-index-field.html[`ctx['_index']`]:: + Modify this to change the destination index for the current document. + +{es_version}/mapping-type-field.html[`ctx['_type']`]:: + Modify this to change the type for the current document. + +`ctx` (`Map`, read-only):: + Modify the values in the `Map/List` structure to add, modify, or delete + the fields of a document. + +*Return* + +void:: + No expected return value. + +*API* + +The standard <> is available. \ No newline at end of file diff --git a/docs/painless/painless-contexts/painless-metric-agg-combine-context.asciidoc b/docs/painless/painless-contexts/painless-metric-agg-combine-context.asciidoc new file mode 100644 index 0000000000000..1fec63ef4466f --- /dev/null +++ b/docs/painless/painless-contexts/painless-metric-agg-combine-context.asciidoc @@ -0,0 +1,27 @@ +[[painless-metric-agg-combine-context]] +=== Metric aggregation combine context + +Use a Painless script to +{es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[combine] +values for use in a scripted metric aggregation. A combine script is run once +per shard following a <> and is +optional as part of a full metric aggregation. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`params['_agg']` (`Map`):: + `Map` with values available from the prior map script. + +*Return* + +`List`, `Map`, `String`, or primitive:: + A value collected for use in a + <>. If no reduce + script is specified, the value is used as part of the result. + +*API* + +The standard <> is available. diff --git a/docs/painless/painless-contexts/painless-metric-agg-init-context.asciidoc b/docs/painless/painless-contexts/painless-metric-agg-init-context.asciidoc new file mode 100644 index 0000000000000..ed7e01ddd003a --- /dev/null +++ b/docs/painless/painless-contexts/painless-metric-agg-init-context.asciidoc @@ -0,0 +1,32 @@ +[[painless-metric-agg-init-context]] +=== Metric aggregation initialization context + +Use a Painless script to +{es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[initialize] +values for use in a scripted metric aggregation. An initialization script is +run prior to document collection once per shard and is optional as part of the +full metric aggregation. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`params['_agg']` (`Map`):: + Empty `Map` used to add values for use in a + <>. + +*Side Effects* + +`params['_agg']` (`Map`):: + Add values to this `Map` to for use in a map. Additional values must + be of the type `Map`, `List`, `String` or primitive. + +*Return* + +`void`:: + No expected return value. + +*API* + +The standard <> is available. diff --git a/docs/painless/painless-contexts/painless-metric-agg-map-context.asciidoc b/docs/painless/painless-contexts/painless-metric-agg-map-context.asciidoc new file mode 100644 index 0000000000000..51f06e010db35 --- /dev/null +++ b/docs/painless/painless-contexts/painless-metric-agg-map-context.asciidoc @@ -0,0 +1,47 @@ +[[painless-metric-agg-map-context]] +=== Metric aggregation map context + +Use a Painless script to +{es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[map] +values for use in a scripted metric aggregation. A map script is run once per +collected document following an optional +<> and is required as +part of a full metric aggregation. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`params['_agg']` (`Map`):: + `Map` used to add values for processing in a + <> or returned + directly. + +`doc` (`Map`, read-only):: + Contains the fields of the current document where each field is a + `List` of values. + +`_score` (`double` read-only):: + The similarity score of the current document. + +*Side Effects* + +`params['_agg']` (`Map`):: + Use this `Map` to add values for processing in a combine script. + Additional values must be of the type `Map`, `List`, `String` or + primitive. If an initialization script is provided as part the + aggregation then values added from the initialization script are + available as well. If no combine script is specified, values must be + directly stored in `_agg`. If no combine script and no + <> are specified, the + values are used as the result. + +*Return* + +`void`:: + No expected return value. + +*API* + +The standard <> is available. diff --git a/docs/painless/painless-contexts/painless-metric-agg-reduce-context.asciidoc b/docs/painless/painless-contexts/painless-metric-agg-reduce-context.asciidoc new file mode 100644 index 0000000000000..1b64b85392d26 --- /dev/null +++ b/docs/painless/painless-contexts/painless-metric-agg-reduce-context.asciidoc @@ -0,0 +1,28 @@ +[[painless-metric-agg-reduce-context]] +=== Metric aggregation reduce context + +Use a Painless script to +{es_version}/search-aggregations-metrics-scripted-metric-aggregation.html[reduce] +values to produce the result of a scripted metric aggregation. A reduce script +is run once on the coordinating node following a +<> (or a +<> if no combine script is +specified) and is optional as part of a full metric aggregation. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`params['_aggs']` (`Map`):: + `Map` with values available from the prior combine script (or a map + script if no combine script is specified). + +*Return* + +`List`, `Map`, `String`, or primitive:: + A value used as the result. + +*API* + +The standard <> is available. diff --git a/docs/painless/painless-contexts/painless-min-should-match-context.asciidoc b/docs/painless/painless-contexts/painless-min-should-match-context.asciidoc new file mode 100644 index 0000000000000..c310f42928eb4 --- /dev/null +++ b/docs/painless/painless-contexts/painless-min-should-match-context.asciidoc @@ -0,0 +1,28 @@ +[[painless-min-should-match-context]] +=== Minimum should match context + +Use a Painless script to specify the +{es_version}/query-dsl-terms-set-query.html[minimum] number of terms that a +specified field needs to match with for a document to be part of the query +results. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`params['num_terms']` (`int`, read-only):: + The number of terms specified to match with. + +`doc` (`Map`, read-only):: + Contains the fields of the current document where each field is a + `List` of values. + +*Return* + +`int`:: + The minimum number of terms required to match the current document. + +*API* + +The standard <> is available. \ No newline at end of file diff --git a/docs/painless/painless-contexts/painless-reindex-context.asciidoc b/docs/painless/painless-contexts/painless-reindex-context.asciidoc new file mode 100644 index 0000000000000..a8477c8c61996 --- /dev/null +++ b/docs/painless/painless-contexts/painless-reindex-context.asciidoc @@ -0,0 +1,68 @@ +[[painless-reindex-context]] +=== Reindex context + +Use a Painless script in a {es_version}/docs-reindex.html[reindex] operation to +add, modify, or delete fields within each document in an original index as its +reindexed into a target index. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`ctx['_op']` (`String`):: + The name of the operation. + +{es_version}/mapping-routing-field.html[`ctx['_routing']`] (`String`):: + The value used to select a shard for document storage. + +{es_version}/mapping-index-field.html[`ctx['_index']`] (`String`):: + The name of the index. + +{es_version}/mapping-type-field.html[`ctx['_type']`] (`String`):: + The type of document within an index. + +{es_version}/mapping-id-field.html[`ctx['_id']`] (`int`, read-only):: + The unique document id. + +`ctx['_version']` (`int`):: + The current version of the document. + +{es_version}/mapping-source-field.html[`ctx['_source']`] (`Map`):: + Contains extracted JSON in a `Map` and `List` structure for the fields + existing in a stored document. + +*Side Effects* + +`ctx['_op']`:: + Use the default of `index` to update a document. Set to `none` to + specify no operation or `delete` to delete the current document from + the index. + +{es_version}/mapping-routing-field.html[`ctx['_routing']`]:: + Modify this to change the routing value for the current document. + +{es_version}/mapping-index-field.html[`ctx['_index']`]:: + Modify this to change the destination index for the current document. + +{es_version}/mapping-type-field.html[`ctx['_type']`]:: + Modify this to change the type for the current document. + +{es_version}/mapping-id-field.html[`ctx['_id']`]:: + Modify this to change the id for the current document. + +`ctx['_version']` (`int`):: + Modify this to modify the version for the current document. + +{es_version}/mapping-source-field.html[`ctx['_source']`]:: + Modify the values in the `Map/List` structure to add, modify, or delete + the fields of a document. + +*Return* + +`void`:: + No expected return value. + +*API* + +The standard <> is available. \ No newline at end of file diff --git a/docs/painless/painless-contexts/painless-score-context.asciidoc b/docs/painless/painless-contexts/painless-score-context.asciidoc new file mode 100644 index 0000000000000..21667fd31f3b1 --- /dev/null +++ b/docs/painless/painless-contexts/painless-score-context.asciidoc @@ -0,0 +1,27 @@ +[[painless-score-context]] +=== Score context + +Use a Painless script in a +{es_version}/query-dsl-function-score-query.html[function score] to apply a new +score to documents returned from a query. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`doc` (`Map`, read-only):: + Contains the fields of the current document where each field is a + `List` of values. + +`_score` (`double` read-only):: + The similarity score of the current document. + +*Return* + +`double`:: + The score for the current document. + +*API* + +The standard <> is available. \ No newline at end of file diff --git a/docs/painless/painless-contexts/painless-similarity-context.asciidoc b/docs/painless/painless-contexts/painless-similarity-context.asciidoc new file mode 100644 index 0000000000000..052844c3111a7 --- /dev/null +++ b/docs/painless/painless-contexts/painless-similarity-context.asciidoc @@ -0,0 +1,53 @@ +[[painless-similarity-context]] +=== Similarity context + +Use a Painless script to create a +{es_version}/index-modules-similarity.html[similarity] equation for scoring +documents in a query. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in at query-time. + +`query.boost` (`float`, read-only):: + The boost value if provided by the query. If this is not provided the + value is `1.0f`. + +`field.docCount` (`long`, read-only):: + The number of documents that have a value for the current field. + +`field.sumDocFreq` (`long`, read-only):: + The sum of all terms that exist for the current field. If this is not + available the value is `-1`. + +`field.sumTotalTermFreq` (`long`, read-only):: + The sum of occurrences in the index for all the terms that exist in the + current field. If this is not available the value is `-1`. + +`term.docFreq` (`long`, read-only):: + The number of documents that contain the current term in the index. + +`term.totalTermFreq` (`long`, read-only):: + The total occurrences of the current term in the index. + +`doc.length` (`long`, read-only):: + The number of tokens the current document has in the current field. + +`doc.freq` (`long`, read-only):: + The number of occurrences of the current term in the current + document for the current field. + +*Return* + +`double`:: + The similarity score for the current document. + +*API* + +The standard <> is available. \ No newline at end of file diff --git a/docs/painless/painless-contexts/painless-sort-context.asciidoc b/docs/painless/painless-contexts/painless-sort-context.asciidoc new file mode 100644 index 0000000000000..7f510fb6a9251 --- /dev/null +++ b/docs/painless/painless-contexts/painless-sort-context.asciidoc @@ -0,0 +1,26 @@ +[[painless-sort-context]] +=== Sort context + +Use a Painless script to +{es_version}/search-request-sort.html[sort] the documents in a query. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`doc` (`Map`, read-only):: + Contains the fields of the current document where each field is a + `List` of values. + +`_score` (`double` read-only):: + The similarity score of the current document. + +*Return* + +`double`:: + The score for the specified document. + +*API* + +The standard <> is available. \ No newline at end of file diff --git a/docs/painless/painless-contexts/painless-update-by-query-context.asciidoc b/docs/painless/painless-contexts/painless-update-by-query-context.asciidoc new file mode 100644 index 0000000000000..65666e15844bf --- /dev/null +++ b/docs/painless/painless-contexts/painless-update-by-query-context.asciidoc @@ -0,0 +1,54 @@ +[[painless-update-by-query-context]] +=== Update by query context + +Use a Painless script in an +{es_version}/docs-update-by-query.html[update by query] operation to add, +modify, or delete fields within each of a set of documents collected as the +result of query. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`ctx['_op']` (`String`):: + The name of the operation. + +{es_version}/mapping-routing-field.html[`ctx['_routing']`] (`String`, read-only):: + The value used to select a shard for document storage. + +{es_version}/mapping-index-field.html[`ctx['_index']`] (`String`, read-only):: + The name of the index. + +{es_version}/mapping-type-field.html[`ctx['_type']`] (`String`, read-only):: + The type of document within an index. + +{es_version}/mapping-id-field.html[`ctx['_id']`] (`int`, read-only):: + The unique document id. + +`ctx['_version']` (`int`, read-only):: + The current version of the document. + +{es_version}/mapping-source-field.html[`ctx['_source']`] (`Map`):: + Contains extracted JSON in a `Map` and `List` structure for the fields + existing in a stored document. + +*Side Effects* + +`ctx['_op']`:: + Use the default of `index` to update a document. Set to `none` to + specify no operation or `delete` to delete the current document from + the index. + +{es_version}/mapping-source-field.html[`ctx['_source']`]:: + Modify the values in the `Map/List` structure to add, modify, or delete + the fields of a document. + +*Return* + +`void`:: + No expected return value. + +*API* + +The standard <> is available. \ No newline at end of file diff --git a/docs/painless/painless-contexts/painless-update-context.asciidoc b/docs/painless/painless-contexts/painless-update-context.asciidoc new file mode 100644 index 0000000000000..b04ba8d9ffb56 --- /dev/null +++ b/docs/painless/painless-contexts/painless-update-context.asciidoc @@ -0,0 +1,55 @@ +[[painless-update-context]] +=== Update context + +Use a Painless script in an {es_version}/docs-update.html[update] operation to +add, modify, or delete fields within a single document. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`ctx['_op']` (`String`):: + The name of the operation. + +{es_version}/mapping-routing-field.html[`ctx['_routing']`] (`String`, read-only):: + The value used to select a shard for document storage. + +{es_version}/mapping-index-field.html[`ctx['_index']`] (`String`, read-only):: + The name of the index. + +{es_version}/mapping-type-field.html[`ctx['_type']`] (`String`, read-only):: + The type of document within an index. + +{es_version}/mapping-id-field.html[`ctx['_id']`] (`int`, read-only):: + The unique document id. + +`ctx['_version']` (`int`, read-only):: + The current version of the document. + +`ctx['_now']` (`long`, read-only):: + The current timestamp in milliseconds. + +{es_version}/mapping-source-field.html[`ctx['_source']`] (`Map`):: + Contains extracted JSON in a `Map` and `List` structure for the fields + existing in a stored document. + +*Side Effects* + +`ctx['_op']`:: + Use the default of `index` to update a document. Set to `none` to + specify no operation or `delete` to delete the current document from + the index. + +{es_version}/mapping-source-field.html[`ctx['_source']`]:: + Modify the values in the `Map/List` structure to add, modify, or delete + the fields of a document. + +*Return* + +`void`:: + No expected return value. + +*API* + +The standard <> is available. \ No newline at end of file diff --git a/docs/painless/painless-contexts/painless-watcher-condition-context.asciidoc b/docs/painless/painless-contexts/painless-watcher-condition-context.asciidoc new file mode 100644 index 0000000000000..3a5e460a55de7 --- /dev/null +++ b/docs/painless/painless-contexts/painless-watcher-condition-context.asciidoc @@ -0,0 +1,38 @@ +[[painless-watcher-condition-context]] +=== Watcher condition context + +Use a Painless script as a {xp_version}/condition-script.html[watcher condition] +to test if a response is necessary. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`ctx['watch_id']` (`String`, read-only):: + The id of the watch. + +`ctx['execution_time']` (`DateTime`, read-only):: + The start time for the watch. + +`ctx['trigger']['scheduled_time']` (`DateTime`, read-only):: + The scheduled trigger time for the watch. + +`ctx['trigger']['triggered_time']` (`DateTime`, read-only):: + The actual trigger time for the watch. + +`ctx['metadata']` (`Map`, read-only):: + Any metadata associated with the watch. + +`ctx['payload']` (`Map`, read-only):: + The accessible watch data based upon the + {xp_version}/input.html[watch input]. + +*Return* + +`boolean`:: + Expects `true` if the condition is met, and `false` otherwise. + +*API* + +The standard <> is available. \ No newline at end of file diff --git a/docs/painless/painless-contexts/painless-watcher-transform-context.asciidoc b/docs/painless/painless-contexts/painless-watcher-transform-context.asciidoc new file mode 100644 index 0000000000000..1831da5a9f87b --- /dev/null +++ b/docs/painless/painless-contexts/painless-watcher-transform-context.asciidoc @@ -0,0 +1,39 @@ +[[painless-watcher-transform-context]] +=== Watcher transform context + +Use a Painless script to {xp_version}/transform-script.html[transform] watch +data into a new payload for use in a response to a condition. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`ctx['watch_id']` (`String`, read-only):: + The id of the watch. + +`ctx['execution_time']` (`DateTime`, read-only):: + The start time for the watch. + +`ctx['trigger']['scheduled_time']` (`DateTime`, read-only):: + The scheduled trigger time for the watch. + +`ctx['trigger']['triggered_time']` (`DateTime`, read-only):: + The actual trigger time for the watch. + +`ctx['metadata']` (`Map`, read-only):: + Any metadata associated with the watch. + +`ctx['payload']` (`Map`, read-only):: + The accessible watch data based upon the + {xp_version}/input.html[watch input]. + + +*Return* + +`Object`:: + The new payload. + +*API* + +The standard <> is available. \ No newline at end of file diff --git a/docs/painless/painless-contexts/painless-weight-context.asciidoc b/docs/painless/painless-contexts/painless-weight-context.asciidoc new file mode 100644 index 0000000000000..0aef936183c0f --- /dev/null +++ b/docs/painless/painless-contexts/painless-weight-context.asciidoc @@ -0,0 +1,42 @@ +[[painless-weight-context]] +=== Weight context + +Use a Painless script to create a +{es_version}/index-modules-similarity.html[weight] for use in a +<>. Weight is used to prevent +recalculation of constants that remain the same across documents. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`query.boost` (`float`, read-only):: + The boost value if provided by the query. If this is not provided the + value is `1.0f`. + +`field.docCount` (`long`, read-only):: + The number of documents that have a value for the current field. + +`field.sumDocFreq` (`long`, read-only):: + The sum of all terms that exist for the current field. If this is not + available the value is `-1`. + +`field.sumTotalTermFreq` (`long`, read-only):: + The sum of occurrences in the index for all the terms that exist in the + current field. If this is not available the value is `-1`. + +`term.docFreq` (`long`, read-only):: + The number of documents that contain the current term in the index. + +`term.totalTermFreq` (`long`, read-only):: + The total occurrences of the current term in the index. + +*Return* + +`double`:: + A scoring factor used across all documents. + +*API* + +The standard <> is available. \ No newline at end of file diff --git a/docs/painless/painless-operators-array.asciidoc b/docs/painless/painless-operators-array.asciidoc index e91c07acef5c0..acfb87d30af1b 100644 --- a/docs/painless/painless-operators-array.asciidoc +++ b/docs/painless/painless-operators-array.asciidoc @@ -184,7 +184,7 @@ brace_access: '[' expression ']' store `def` to `x` <5> declare `def y`; implicit cast `int 1` to `def` -> `def`; - store `def ` to `y`; + store `def` to `y`; <6> declare `int i`; load from `d` -> `def` implicit cast `def` to `1-d int array reference` diff --git a/docs/painless/painless-scripts.asciidoc b/docs/painless/painless-scripts.asciidoc index 87e5b60159060..81fdbbe7367db 100644 --- a/docs/painless/painless-scripts.asciidoc +++ b/docs/painless/painless-scripts.asciidoc @@ -3,4 +3,4 @@ Scripts are composed of one-to-many <> and are run in a sandbox that determines what local variables are immediately available -along with what APIs are whitelisted for use. \ No newline at end of file +along with what APIs are whitelisted for use. diff --git a/docs/plugins/discovery-ec2.asciidoc b/docs/plugins/discovery-ec2.asciidoc index d4061fc5d74fb..fac44056fb4a4 100644 --- a/docs/plugins/discovery-ec2.asciidoc +++ b/docs/plugins/discovery-ec2.asciidoc @@ -40,11 +40,15 @@ Those that must be stored in the keystore are marked as `Secure`. `access_key`:: - An s3 access key. The `secret_key` setting must also be specified. (Secure) + An ec2 access key. The `secret_key` setting must also be specified. (Secure) `secret_key`:: - An s3 secret key. The `access_key` setting must also be specified. (Secure) + An ec2 secret key. The `access_key` setting must also be specified. (Secure) + +`session_token`:: + An ec2 session token. The `access_key` and `secret_key` settings must also + be specified. (Secure) `endpoint`:: diff --git a/docs/plugins/repository-gcs.asciidoc b/docs/plugins/repository-gcs.asciidoc index 8cbec8ce4f2c6..e91a8a9de73d0 100644 --- a/docs/plugins/repository-gcs.asciidoc +++ b/docs/plugins/repository-gcs.asciidoc @@ -27,9 +27,9 @@ To create a new bucket: 1. Connect to the https://console.cloud.google.com/[Google Cloud Platform Console] 2. Select your project -3. Got to the https://console.cloud.google.com/storage/browser[Storage Browser] +3. Go to the https://console.cloud.google.com/storage/browser[Storage Browser] 4. Click the "Create Bucket" button -5. Enter a the name of the new bucket +5. Enter the name of the new bucket 6. Select a storage class 7. Select a location 8. Click the "Create" button diff --git a/docs/plugins/repository-s3.asciidoc b/docs/plugins/repository-s3.asciidoc index 6701d53c24047..0d73e35f18ec3 100644 --- a/docs/plugins/repository-s3.asciidoc +++ b/docs/plugins/repository-s3.asciidoc @@ -73,6 +73,10 @@ are marked as `Secure`. An s3 secret key. The `access_key` setting must also be specified. (Secure) +`session_token`:: + An s3 session token. The `access_key` and `secret_key` settings must also + be specified. (Secure) + `endpoint`:: The s3 service endpoint to connect to. This will be automatically diff --git a/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc b/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc index 4ca9c849b9b61..1903bbc6bcadf 100644 --- a/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc @@ -69,8 +69,8 @@ percentiles: `[ 1, 5, 25, 50, 75, 95, 99 ]`. The response will look like this: As you can see, the aggregation will return a calculated value for each percentile in the default range. If we assume response times are in milliseconds, it is -immediately obvious that the webpage normally loads in 10-723ms, but occasionally -spikes to 941-980ms. +immediately obvious that the webpage normally loads in 10-725ms, but occasionally +spikes to 945-985ms. Often, administrators are only interested in outliers -- the extreme percentiles. We can specify just the percents we are interested in (requested percentiles diff --git a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc index 1a4d6d4774c49..c4857699f9805 100644 --- a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc @@ -15,10 +15,10 @@ POST ledger/_search?size=0 "aggs": { "profit": { "scripted_metric": { - "init_script" : "params._agg.transactions = []", - "map_script" : "params._agg.transactions.add(doc.type.value == 'sale' ? doc.amount.value : -1 * doc.amount.value)", <1> - "combine_script" : "double profit = 0; for (t in params._agg.transactions) { profit += t } return profit", - "reduce_script" : "double profit = 0; for (a in params._aggs) { profit += a } return profit" + "init_script" : "state.transactions = []", + "map_script" : "state.transactions.add(doc.type.value == 'sale' ? doc.amount.value : -1 * doc.amount.value)", <1> + "combine_script" : "double profit = 0; for (t in state.transactions) { profit += t } return profit", + "reduce_script" : "double profit = 0; for (a in states) { profit += a } return profit" } } } @@ -67,8 +67,7 @@ POST ledger/_search?size=0 "id": "my_combine_script" }, "params": { - "field": "amount", <1> - "_agg": {} <2> + "field": "amount" <1> }, "reduce_script" : { "id": "my_reduce_script" @@ -82,8 +81,7 @@ POST ledger/_search?size=0 // TEST[setup:ledger,stored_scripted_metric_script] <1> script parameters for `init`, `map` and `combine` scripts must be specified -in a global `params` object so that it can be share between the scripts. -<2> if you specify script parameters then you must specify `"_agg": {}`. +in a global `params` object so that it can be shared between the scripts. //// Verify this response as well but in a hidden block. @@ -108,7 +106,7 @@ For more details on specifying scripts see <> +* <> * <> * <> * <> diff --git a/docs/reference/release-notes/6.3.asciidoc b/docs/reference/release-notes/6.3.asciidoc index ee30670acfd4a..9235db16dee03 100644 --- a/docs/reference/release-notes/6.3.asciidoc +++ b/docs/reference/release-notes/6.3.asciidoc @@ -1,3 +1,8 @@ +[[release-notes-6.3.1]] +== {es} version 6.3.1 + +coming[6.3.1] + [[release-notes-6.3.0]] == {es} version 6.3.0 diff --git a/docs/reference/search/request/highlighting.asciidoc b/docs/reference/search/request/highlighting.asciidoc index 97f31c0db7a08..596e404505ac5 100644 --- a/docs/reference/search/request/highlighting.asciidoc +++ b/docs/reference/search/request/highlighting.asciidoc @@ -261,7 +261,6 @@ type:: The highlighter to use: `unified`, `plain`, or `fvh`. Defaults to * <> * <> * <> -* <> * <> * <> * <> @@ -485,32 +484,6 @@ GET /_search // CONSOLE // TEST[setup:twitter] - -[[highlight-all]] -[float] -=== Highlight in all fields - -By default, only fields that contains a query match are highlighted. Set -`require_field_match` to `false` to highlight all fields. - -[source,js] --------------------------------------------------- -GET /_search -{ - "query" : { - "match": { "user": "kimchy" } - }, - "highlight" : { - "require_field_match": false, - "fields": { - "_all" : { "pre_tags" : [""], "post_tags" : [""] } - } - } -} --------------------------------------------------- -// CONSOLE -// TEST[setup:twitter] - [[matched-fields]] [float] === Combine matches on multiple fields diff --git a/docs/reference/search/request/rescore.asciidoc b/docs/reference/search/request/rescore.asciidoc index 6e1bb2a9e6ce2..58d459a9456de 100644 --- a/docs/reference/search/request/rescore.asciidoc +++ b/docs/reference/search/request/rescore.asciidoc @@ -29,8 +29,7 @@ The query rescorer executes a second query only on the Top-K results returned by the <> and <> phases. The number of docs which will be examined on each shard can be controlled by -the `window_size` parameter, which defaults to -<>. +the `window_size` parameter, which defaults to 10. By default the scores from the original query and the rescore query are combined linearly to produce the final `_score` for each document. The diff --git a/docs/reference/setup.asciidoc b/docs/reference/setup.asciidoc index b8516bdc6cb5c..60e3c1dac2948 100644 --- a/docs/reference/setup.asciidoc +++ b/docs/reference/setup.asciidoc @@ -55,4 +55,6 @@ include::setup/sysconfig.asciidoc[] include::setup/bootstrap-checks.asciidoc[] +include::setup/starting.asciidoc[] + include::setup/stopping.asciidoc[] diff --git a/docs/reference/setup/install/deb-init.asciidoc b/docs/reference/setup/install/deb-init.asciidoc new file mode 100644 index 0000000000000..0e6e142a82927 --- /dev/null +++ b/docs/reference/setup/install/deb-init.asciidoc @@ -0,0 +1,20 @@ +==== Running Elasticsearch with SysV `init` + +Use the `update-rc.d` command to configure Elasticsearch to start automatically +when the system boots up: + +[source,sh] +-------------------------------------------------- +sudo update-rc.d elasticsearch defaults 95 10 +-------------------------------------------------- + +Elasticsearch can be started and stopped using the `service` command: + +[source,sh] +-------------------------------------------- +sudo -i service elasticsearch start +sudo -i service elasticsearch stop +-------------------------------------------- + +If Elasticsearch fails to start for any reason, it will print the reason for +failure to STDOUT. Log files can be found in `/var/log/elasticsearch/`. \ No newline at end of file diff --git a/docs/reference/setup/install/deb.asciidoc b/docs/reference/setup/install/deb.asciidoc index 2abacf947c7bc..629abe37afe62 100644 --- a/docs/reference/setup/install/deb.asciidoc +++ b/docs/reference/setup/install/deb.asciidoc @@ -143,29 +143,12 @@ include::xpack-indices.asciidoc[] endif::include-xpack[] +==== SysV `init` vs `systemd` + include::init-systemd.asciidoc[] [[deb-running-init]] -==== Running Elasticsearch with SysV `init` - -Use the `update-rc.d` command to configure Elasticsearch to start automatically -when the system boots up: - -[source,sh] --------------------------------------------------- -sudo update-rc.d elasticsearch defaults 95 10 --------------------------------------------------- - -Elasticsearch can be started and stopped using the `service` command: - -[source,sh] --------------------------------------------- -sudo -i service elasticsearch start -sudo -i service elasticsearch stop --------------------------------------------- - -If Elasticsearch fails to start for any reason, it will print the reason for -failure to STDOUT. Log files can be found in `/var/log/elasticsearch/`. +include::deb-init.asciidoc[] [[deb-running-systemd]] include::systemd.asciidoc[] diff --git a/docs/reference/setup/install/init-systemd.asciidoc b/docs/reference/setup/install/init-systemd.asciidoc index 1532c5313aefd..144fe4c481275 100644 --- a/docs/reference/setup/install/init-systemd.asciidoc +++ b/docs/reference/setup/install/init-systemd.asciidoc @@ -1,5 +1,3 @@ -==== SysV `init` vs `systemd` - Elasticsearch is not started automatically after installation. How to start and stop Elasticsearch depends on whether your system uses SysV `init` or `systemd` (used by newer distributions). You can tell which is being used by diff --git a/docs/reference/setup/install/msi-windows-start.asciidoc b/docs/reference/setup/install/msi-windows-start.asciidoc new file mode 100644 index 0000000000000..28bcfed6af3de --- /dev/null +++ b/docs/reference/setup/install/msi-windows-start.asciidoc @@ -0,0 +1,16 @@ +==== Running Elasticsearch from the command line + +Once installed, Elasticsearch can be started from the command line, if not installed as a service +and configured to start when installation completes, as follows: + +["source","sh",subs="attributes,callouts"] +-------------------------------------------- +.\bin\elasticsearch.exe +-------------------------------------------- + +The command line terminal will display output similar to the following: + +image::images/msi_installer/elasticsearch_exe.png[] + +By default, Elasticsearch runs in the foreground, prints its logs to `STDOUT` in addition +to the `.log` file within `LOGSDIRECTORY`, and can be stopped by pressing `Ctrl-C`. diff --git a/docs/reference/setup/install/rpm-init.asciidoc b/docs/reference/setup/install/rpm-init.asciidoc new file mode 100644 index 0000000000000..a3db166308caf --- /dev/null +++ b/docs/reference/setup/install/rpm-init.asciidoc @@ -0,0 +1,20 @@ +==== Running Elasticsearch with SysV `init` + +Use the `chkconfig` command to configure Elasticsearch to start automatically +when the system boots up: + +[source,sh] +-------------------------------------------------- +sudo chkconfig --add elasticsearch +-------------------------------------------------- + +Elasticsearch can be started and stopped using the `service` command: + +[source,sh] +-------------------------------------------- +sudo -i service elasticsearch start +sudo -i service elasticsearch stop +-------------------------------------------- + +If Elasticsearch fails to start for any reason, it will print the reason for +failure to STDOUT. Log files can be found in `/var/log/elasticsearch/`. diff --git a/docs/reference/setup/install/rpm.asciidoc b/docs/reference/setup/install/rpm.asciidoc index aad7cf5bf3b73..a6f106497e9d2 100644 --- a/docs/reference/setup/install/rpm.asciidoc +++ b/docs/reference/setup/install/rpm.asciidoc @@ -130,30 +130,12 @@ include::xpack-indices.asciidoc[] endif::include-xpack[] +==== SysV `init` vs `systemd` + include::init-systemd.asciidoc[] [[rpm-running-init]] -==== Running Elasticsearch with SysV `init` - -Use the `chkconfig` command to configure Elasticsearch to start automatically -when the system boots up: - -[source,sh] --------------------------------------------------- -sudo chkconfig --add elasticsearch --------------------------------------------------- - -Elasticsearch can be started and stopped using the `service` command: - -[source,sh] --------------------------------------------- -sudo -i service elasticsearch start -sudo -i service elasticsearch stop --------------------------------------------- - -If Elasticsearch fails to start for any reason, it will print the reason for -failure to STDOUT. Log files can be found in `/var/log/elasticsearch/`. - +include::rpm-init.asciidoc[] [[rpm-running-systemd]] include::systemd.asciidoc[] diff --git a/docs/reference/setup/install/windows.asciidoc b/docs/reference/setup/install/windows.asciidoc index 56bb953c18ebb..1535e5415e4a4 100644 --- a/docs/reference/setup/install/windows.asciidoc +++ b/docs/reference/setup/install/windows.asciidoc @@ -342,23 +342,7 @@ include::xpack-indices.asciidoc[] endif::include-xpack[] [[msi-installer-command-line-running]] -==== Running Elasticsearch from the command line - -Once installed, Elasticsearch can be started from the command line, if not installed as a service -and configured to start when installation completes, as follows: - -["source","sh",subs="attributes,callouts"] --------------------------------------------- -.\bin\elasticsearch.exe --------------------------------------------- - -The command line terminal will display output similar to the following: - -[[msi-installer-elasticsearch-exe]] -image::images/msi_installer/elasticsearch_exe.png[] - -By default, Elasticsearch runs in the foreground, prints its logs to `STDOUT` in addition -to the `.log` file within `LOGSDIRECTORY`, and can be stopped by pressing `Ctrl-C`. +include::msi-windows-start.asciidoc[] [[msi-installer-command-line-configuration]] ==== Configuring Elasticsearch on the command line diff --git a/docs/reference/setup/install/zip-targz-daemon.asciidoc b/docs/reference/setup/install/zip-targz-daemon.asciidoc new file mode 100644 index 0000000000000..31d9c3c2e7437 --- /dev/null +++ b/docs/reference/setup/install/zip-targz-daemon.asciidoc @@ -0,0 +1,21 @@ +==== Running as a daemon + +To run Elasticsearch as a daemon, specify `-d` on the command line, and record +the process ID in a file using the `-p` option: + +[source,sh] +-------------------------------------------- +./bin/elasticsearch -d -p pid +-------------------------------------------- + +Log messages can be found in the `$ES_HOME/logs/` directory. + +To shut down Elasticsearch, kill the process ID recorded in the `pid` file: + +[source,sh] +-------------------------------------------- +kill `cat pid` +-------------------------------------------- + +NOTE: The startup scripts provided in the <> and <> +packages take care of starting and stopping the Elasticsearch process for you. diff --git a/docs/reference/setup/install/zip-targz-start.asciidoc b/docs/reference/setup/install/zip-targz-start.asciidoc new file mode 100644 index 0000000000000..907b2a7317d79 --- /dev/null +++ b/docs/reference/setup/install/zip-targz-start.asciidoc @@ -0,0 +1,17 @@ +==== Running Elasticsearch from the command line + +Elasticsearch can be started from the command line as follows: + +[source,sh] +-------------------------------------------- +./bin/elasticsearch +-------------------------------------------- + +By default, Elasticsearch runs in the foreground, prints its logs to the +standard output (`stdout`), and can be stopped by pressing `Ctrl-C`. + +NOTE: All scripts packaged with Elasticsearch require a version of Bash +that supports arrays and assume that Bash is available at `/bin/bash`. +As such, Bash should be available at this path either directly or via a +symbolic link. + diff --git a/docs/reference/setup/install/zip-targz.asciidoc b/docs/reference/setup/install/zip-targz.asciidoc index f44742c648e8e..735ca5b4ea0d1 100644 --- a/docs/reference/setup/install/zip-targz.asciidoc +++ b/docs/reference/setup/install/zip-targz.asciidoc @@ -90,22 +90,7 @@ include::xpack-indices.asciidoc[] endif::include-xpack[] [[zip-targz-running]] -==== Running Elasticsearch from the command line - -Elasticsearch can be started from the command line as follows: - -[source,sh] --------------------------------------------- -./bin/elasticsearch --------------------------------------------- - -By default, Elasticsearch runs in the foreground, prints its logs to the -standard output (`stdout`), and can be stopped by pressing `Ctrl-C`. - -NOTE: All scripts packaged with Elasticsearch require a version of Bash -that supports arrays and assume that Bash is available at `/bin/bash`. -As such, Bash should be available at this path either directly or via a -symbolic link. +include::zip-targz-start.asciidoc[] include::check-running.asciidoc[] @@ -113,27 +98,7 @@ Log printing to `stdout` can be disabled using the `-q` or `--quiet` option on the command line. [[setup-installation-daemon]] -==== Running as a daemon - -To run Elasticsearch as a daemon, specify `-d` on the command line, and record -the process ID in a file using the `-p` option: - -[source,sh] --------------------------------------------- -./bin/elasticsearch -d -p pid --------------------------------------------- - -Log messages can be found in the `$ES_HOME/logs/` directory. - -To shut down Elasticsearch, kill the process ID recorded in the `pid` file: - -[source,sh] --------------------------------------------- -kill `cat pid` --------------------------------------------- - -NOTE: The startup scripts provided in the <> and <> -packages take care of starting and stopping the Elasticsearch process for you. +include::zip-targz-daemon.asciidoc[] [[zip-targz-configuring]] ==== Configuring Elasticsearch on the command line diff --git a/docs/reference/setup/install/zip-windows-start.asciidoc b/docs/reference/setup/install/zip-windows-start.asciidoc new file mode 100644 index 0000000000000..7ecea449d2895 --- /dev/null +++ b/docs/reference/setup/install/zip-windows-start.asciidoc @@ -0,0 +1,11 @@ +==== Running Elasticsearch from the command line + +Elasticsearch can be started from the command line as follows: + +[source,sh] +-------------------------------------------- +.\bin\elasticsearch.bat +-------------------------------------------- + +By default, Elasticsearch runs in the foreground, prints its logs to `STDOUT`, +and can be stopped by pressing `Ctrl-C`. diff --git a/docs/reference/setup/install/zip-windows.asciidoc b/docs/reference/setup/install/zip-windows.asciidoc index cd86a6268911c..254fb63f6157d 100644 --- a/docs/reference/setup/install/zip-windows.asciidoc +++ b/docs/reference/setup/install/zip-windows.asciidoc @@ -58,17 +58,7 @@ include::xpack-indices.asciidoc[] endif::include-xpack[] [[windows-running]] -==== Running Elasticsearch from the command line - -Elasticsearch can be started from the command line as follows: - -[source,sh] --------------------------------------------- -.\bin\elasticsearch.bat --------------------------------------------- - -By default, Elasticsearch runs in the foreground, prints its logs to `STDOUT`, -and can be stopped by pressing `Ctrl-C`. +include::zip-windows-start.asciidoc[] [[windows-configuring]] ==== Configuring Elasticsearch on the command line diff --git a/docs/reference/setup/starting.asciidoc b/docs/reference/setup/starting.asciidoc new file mode 100644 index 0000000000000..6fab871e7c9ca --- /dev/null +++ b/docs/reference/setup/starting.asciidoc @@ -0,0 +1,72 @@ +[[starting-elasticsearch]] +== Starting Elasticsearch + +The method for starting {es} varies depending on how you installed it. + +[float] +[[start-targz]] +=== Archive packages (`.tar.gz`) + +If you installed {es} with a `.tar.gz` package, you can start {es} from the +command line. + +[float] +include::install/zip-targz-start.asciidoc[] + +[float] +include::install/zip-targz-daemon.asciidoc[] + +[float] +[[start-zip]] +=== Archive packages (`.zip`) + +If you installed {es} on Windows with a `.zip` package, you can start {es} from +the command line. If you want {es} to start automatically at boot time without +any user interaction, <>. + +[float] +include::install/zip-windows-start.asciidoc[] + +[float] +[[start-deb]] +=== Debian packages + +include::install/init-systemd.asciidoc[] + +[float] +include::install/deb-init.asciidoc[] + +[float] +include::install/systemd.asciidoc[] + +[float] +[[start-docker]] +=== Docker images + +If you installed a Docker image, you can start {es} from the command line. There +are different methods depending on whether you're using development mode or +production mode. See <>. + +[float] +[[start-msi]] +=== MSI packages + +If you installed {es} on Windows using the `.msi` package, you can start {es} +from the command line. If you want it to start automatically at boot time +without any user interaction, +<>. + +[float] +include::install/msi-windows-start.asciidoc[] + +[float] +[[start-rpm]] +=== RPM packages + +include::install/init-systemd.asciidoc[] + +[float] +include::install/rpm-init.asciidoc[] + +[float] +include::install/systemd.asciidoc[] \ No newline at end of file diff --git a/docs/reference/sql/language/syntax/describe-table.asciidoc b/docs/reference/sql/language/syntax/describe-table.asciidoc index dd2d27a5781d2..396be25bb5170 100644 --- a/docs/reference/sql/language/syntax/describe-table.asciidoc +++ b/docs/reference/sql/language/syntax/describe-table.asciidoc @@ -20,3 +20,8 @@ DESC table .Description `DESC` and `DESCRIBE` are aliases to <>. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[describeTable] +---- diff --git a/docs/reference/sql/language/syntax/select.asciidoc b/docs/reference/sql/language/syntax/select.asciidoc index 4a7c0534b68a3..b58173097b0ae 100644 --- a/docs/reference/sql/language/syntax/select.asciidoc +++ b/docs/reference/sql/language/syntax/select.asciidoc @@ -36,23 +36,26 @@ The general execution of `SELECT` is as follows: As with a table, every output column of a `SELECT` has a name which can be either specified per column through the `AS` keyword : -[source,sql] +["source","sql",subs="attributes,callouts,macros"] ---- -SELECT column AS c +include-tagged::{sql-specs}/docs.csv-spec[selectColumnAlias] ---- +Note: `AS` is an optional keyword however it helps with the readability and in some case ambiguity of the query +which is why it is recommended to specify it. + assigned by {es-sql} if no name is given: -[source,sql] +["source","sql",subs="attributes,callouts,macros"] ---- -SELECT 1 + 1 +include-tagged::{sql-specs}/docs.csv-spec[selectInline] ---- or if it's a simple column reference, use its name as the column name: -[source,sql] +["source","sql",subs="attributes,callouts,macros"] ---- -SELECT col FROM table +include-tagged::{sql-specs}/docs.csv-spec[selectColumn] ---- [[sql-syntax-select-wildcard]] @@ -61,11 +64,11 @@ SELECT col FROM table To select all the columns in the source, one can use `*`: ["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/select.sql-spec[wildcardWithOrder] --------------------------------------------------- +---- +include-tagged::{sql-specs}/docs.csv-spec[wildcardWithOrder] +---- -which essentially returns all columsn found. +which essentially returns all(top-level fields, sub-fields, such as multi-fields are ignored] columns found. [[sql-syntax-from]] [float] @@ -83,17 +86,30 @@ where: `table_name`:: Represents the name (optionally qualified) of an existing table, either a concrete or base one (actual index) or alias. + + If the table name contains special SQL characters (such as `.`,`-`,etc...) use double quotes to escape them: -[source, sql] + +["source","sql",subs="attributes,callouts,macros"] ---- -SELECT ... FROM "some-table" +include-tagged::{sql-specs}/docs.csv-spec[fromTableQuoted] ---- The name can be a <> pointing to multiple indices (likely requiring quoting as mentioned above) with the restriction that *all* resolved concrete tables have **exact mapping**. +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[fromTablePatternQuoted] +---- + `alias`:: A substitute name for the `FROM` item containing the alias. An alias is used for brevity or to eliminate ambiguity. When an alias is provided, it completely hides the actual name of the table and must be used in its place. +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[fromTableAlias] +---- + [[sql-syntax-where]] [float] ==== WHERE Clause @@ -111,6 +127,11 @@ where: Represents an expression that evaluates to a `boolean`. Only the rows that match the condition (to `true`) are returned. +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[basicWhere] +---- + [[sql-syntax-group-by]] [float] ==== GROUP BY @@ -126,9 +147,79 @@ where: `grouping_element`:: -Represents an expression on which rows are being grouped _on_. It can be a column name, name or ordinal number of a column or an arbitrary expression of column values. +Represents an expression on which rows are being grouped _on_. It can be a column name, alias or ordinal number of a column or an arbitrary expression of column values. + +A common, group by column name: + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[groupByColumn] +---- + +Grouping by output ordinal: + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[groupByOrdinal] +---- + +Grouping by alias: + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[groupByAlias] +---- + +And grouping by column expression (typically used along-side an alias): + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[groupByExpression] +---- + +When a `GROUP BY` clause is used in a `SELECT`, _all_ output expressions must be either aggregate functions or expressions used for grouping or derivatives of (otherwise there would be more than one possible value to return for each ungrouped column). + +To wit: + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[groupByAndAgg] +---- + +Expressions over aggregates used in output: + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[groupByAndAggExpression] +---- + +Multiple aggregates used: + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[groupByAndMultipleAggs] +---- + +[[sql-syntax-group-by-implicit]] +[float] +===== Implicit Grouping + +When an aggregation is used without an associated `GROUP BY`, an __implicit grouping__ is applied, meaning all selected rows are considered to form a single default, or implicit group. +As such, the query emits only a single row (as there is only a single group). + +A common example is counting the number of records: + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[groupByImplicitCount] +---- + +Of course, multiple aggregations can be applied: -When a `GROUP BY` clause is used in a `SELECT`, _all_ output expressions must be either aggregate functions or expressions used for grouping or derivates of (otherwise there would be more than one possible value to return for each ungrouped column). +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[groupByImplicitMultipleAggs] +---- [[sql-syntax-having]] [float] @@ -147,13 +238,44 @@ where: Represents an expression that evaluates to a `boolean`. Only groups that match the condition (to `true`) are returned. -Both `WHERE` and `HAVING` are used for filtering however there are several differences between them: +Both `WHERE` and `HAVING` are used for filtering however there are several significant differences between them: . `WHERE` works on individual *rows*, `HAVING` works on the *groups* created by ``GROUP BY`` . `WHERE` is evaluated *before* grouping, `HAVING` is evaluated *after* grouping -Note that it is possible to have a `HAVING` clause without a ``GROUP BY``. In this case, an __implicit grouping__ is applied, meaning all selected rows are considered to form a single group and `HAVING` can be applied on any of the aggregate functions specified on this group. ` -As such a query emits only a single row (as there is only a single group), `HAVING` condition returns either one row (the group) or zero if the condition fails. +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[groupByHaving] +---- + +Further more, one can use multiple aggregate expressions inside `HAVING` even ones that are not used in the output (`SELECT`): + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[groupByHavingMultiple] +---- + +[[sql-syntax-having-group-by-implicit]] +[float] +===== Implicit Grouping + +As indicated above, it is possible to have a `HAVING` clause without a ``GROUP BY``. In this case, the so-called <> is applied, meaning all selected rows are considered to form a single group and `HAVING` can be applied on any of the aggregate functions specified on this group. ` +As such, the query emits only a single row (as there is only a single group) and `HAVING` condition returns either one row (the group) or zero if the condition fails. + +In this example, `HAVING` matches: + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[groupByHavingImplicitMatch] +---- + +//However `HAVING` can also not match, in which case an empty result is returned: +// +//["source","sql",subs="attributes,callouts,macros"] +//---- +//include-tagged::{sql-specs}/docs.csv-spec[groupByHavingImplicitNoMatch] +//---- + [[sql-syntax-order-by]] [float] @@ -178,30 +300,10 @@ IMPORTANT: When used along-side, `GROUP BY` expression can point _only_ to the c For example, the following query sorts by an arbitrary input field (`page_count`): -[source,js] --------------------------------------------------- -POST /_xpack/sql?format=txt -{ - "query": "SELECT * FROM library ORDER BY page_count DESC LIMIT 5" -} --------------------------------------------------- -// CONSOLE -// TEST[setup:library] - -which results in something like: - -[source,text] --------------------------------------------------- - author | name | page_count | release_date ------------------+--------------------+---------------+------------------------ -Peter F. Hamilton|Pandora's Star |768 |2004-03-02T00:00:00.000Z -Vernor Vinge |A Fire Upon the Deep|613 |1992-06-01T00:00:00.000Z -Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z -Alastair Reynolds|Revelation Space |585 |2000-03-15T00:00:00.000Z -James S.A. Corey |Leviathan Wakes |561 |2011-06-02T00:00:00.000Z --------------------------------------------------- -// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/] -// TESTRESPONSE[_cat] +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[orderByBasic] +---- [[sql-syntax-order-by-score]] ==== Order By Score @@ -215,58 +317,22 @@ combined using the same rules as {es}'s To sort based on the `score`, use the special function `SCORE()`: -[source,js] --------------------------------------------------- -POST /_xpack/sql?format=txt -{ - "query": "SELECT SCORE(), * FROM library WHERE match(name, 'dune') ORDER BY SCORE() DESC" -} --------------------------------------------------- -// CONSOLE -// TEST[setup:library] - -Which results in something like: - -[source,text] --------------------------------------------------- - SCORE() | author | name | page_count | release_date ----------------+---------------+-------------------+---------------+------------------------ -2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z -1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00.000Z -1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00.000Z -1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00.000Z --------------------------------------------------- -// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/ s/\(/\\\(/ s/\)/\\\)/] -// TESTRESPONSE[_cat] - -Note that you can return `SCORE()` by adding it to the where clause. This -is possible even if you are not sorting by `SCORE()`: - -[source,js] --------------------------------------------------- -POST /_xpack/sql?format=txt -{ - "query": "SELECT SCORE(), * FROM library WHERE match(name, 'dune') ORDER BY page_count DESC" -} --------------------------------------------------- -// CONSOLE -// TEST[setup:library] - -[source,text] --------------------------------------------------- - SCORE() | author | name | page_count | release_date ----------------+---------------+-------------------+---------------+------------------------ -2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z -1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00.000Z -1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00.000Z -1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00.000Z --------------------------------------------------- -// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/ s/\(/\\\(/ s/\)/\\\)/] -// TESTRESPONSE[_cat] +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[orderByScore] +---- + +Note that you can return `SCORE()` by using a full-text search predicate in the `WHERE` clause. +This is possible even if `SCORE()` is not used for sorting: + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[orderByScoreWithMatch] +---- NOTE: -Trying to return `score` from a non full-text queries will return the same value for all results, as -all are equilley relevant. +Trying to return `score` from a non full-text query will return the same value for all results, as +all are equally relevant. [[sql-syntax-limit]] [float] @@ -284,3 +350,10 @@ where count:: is a positive integer or zero indicating the maximum *possible* number of results being returned (as there might be less matches than the limit). If `0` is specified, no results are returned. ALL:: indicates there is no limit and thus all results are being returned. + +To return + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[limitBasic] +---- \ No newline at end of file diff --git a/docs/reference/sql/language/syntax/show-columns.asciidoc b/docs/reference/sql/language/syntax/show-columns.asciidoc index a52c744f17a97..539c35c57952a 100644 --- a/docs/reference/sql/language/syntax/show-columns.asciidoc +++ b/docs/reference/sql/language/syntax/show-columns.asciidoc @@ -12,3 +12,8 @@ SHOW COLUMNS [ FROM | IN ] ? table .Description List the columns in table and their data type (and other attributes). + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[showColumns] +---- diff --git a/docs/reference/sql/language/syntax/show-functions.asciidoc b/docs/reference/sql/language/syntax/show-functions.asciidoc index 964cdf39081c6..1e4220ef5295c 100644 --- a/docs/reference/sql/language/syntax/show-functions.asciidoc +++ b/docs/reference/sql/language/syntax/show-functions.asciidoc @@ -14,3 +14,34 @@ SHOW FUNCTIONS [ LIKE? pattern<1>? ]? .Description List all the SQL functions and their type. The `LIKE` clause can be used to restrict the list of names to the given pattern. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[showFunctions] +---- + +The list of functions returned can be customized based on the pattern. + +It can be an exact match: +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[showFunctionsLikeExact] +---- + +A wildcard for exactly one character: +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[showFunctionsLikeChar] +---- + +A wildcard matching zero or more characters: +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[showFunctionsLikeWildcard] +---- + +Or of course, a variation of the above: +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[showFunctionsWithPattern] +---- diff --git a/docs/reference/sql/language/syntax/show-tables.asciidoc b/docs/reference/sql/language/syntax/show-tables.asciidoc index 7772c39c6fc21..b401e9f7d900a 100644 --- a/docs/reference/sql/language/syntax/show-tables.asciidoc +++ b/docs/reference/sql/language/syntax/show-tables.asciidoc @@ -13,4 +13,36 @@ SHOW TABLES [ LIKE? pattern<1>? ]? .Description -List the tables available to the current user and their type. The `LIKE` clause can be used to restrict the list of names to the given pattern. +List the tables available to the current user and their type. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[showTables] +---- + +The `LIKE` clause can be used to restrict the list of names to the given pattern. + +The pattern can be an exact match: +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[showTablesLikeExact] +---- + +Multiple chars: +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[showTablesLikeWildcard] +---- + +A single char: +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[showTablesLikeOneChar] +---- + + +Or a mixture of single and multiple chars: +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[showTablesLikeMixed] +---- diff --git a/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java b/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java index 02388d838bc2a..6c68710c6d8bd 100644 --- a/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java +++ b/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java @@ -52,7 +52,7 @@ public final class Grok { "%\\{" + "(?" + "(?[A-z0-9]+)" + - "(?::(?[A-z0-9_:.-]+))?" + + "(?::(?[[:alnum:]@\\[\\]_:.-]+))?" + ")" + "(?:=(?" + "(?:" + @@ -81,11 +81,11 @@ public final class Grok { public Grok(Map patternBank, String grokPattern) { this(patternBank, grokPattern, true, ThreadWatchdog.noop()); } - + public Grok(Map patternBank, String grokPattern, ThreadWatchdog threadWatchdog) { this(patternBank, grokPattern, true, threadWatchdog); } - + Grok(Map patternBank, String grokPattern, boolean namedCaptures) { this(patternBank, grokPattern, namedCaptures, ThreadWatchdog.noop()); } diff --git a/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java b/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java index 983c84cf76b4c..7c1b473c6b3a1 100644 --- a/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java +++ b/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java @@ -379,10 +379,10 @@ public void testMultipleNamedCapturesWithSameName() { expected.put("num", "1"); assertThat(grok.captures("12"), equalTo(expected)); } - + public void testExponentialExpressions() { AtomicBoolean run = new AtomicBoolean(true); // to avoid a lingering thread when test has completed - + String grokPattern = "Bonsuche mit folgender Anfrage: Belegart->\\[%{WORD:param2},(?(\\s*%{NOTSPACE})*)\\] " + "Zustand->ABGESCHLOSSEN Kassennummer->%{WORD:param9} Bonnummer->%{WORD:param10} Datum->%{DATESTAMP_OTHER:param11}"; String logLine = "Bonsuche mit folgender Anfrage: Belegart->[EINGESCHRAENKTER_VERKAUF, VERKAUF, NACHERFASSUNG] " + @@ -406,4 +406,50 @@ public void testExponentialExpressions() { run.set(false); assertThat(e.getMessage(), equalTo("grok pattern matching was interrupted after [200] ms")); } + + public void testAtInFieldName() { + assertGrokedField("@metadata"); + } + + public void assertNonAsciiLetterInFieldName() { + assertGrokedField("metädata"); + } + + public void assertSquareBracketInFieldName() { + assertGrokedField("metadat[a]"); + assertGrokedField("metad[a]ta"); + assertGrokedField("[m]etadata"); + } + + public void testUnderscoreInFieldName() { + assertGrokedField("meta_data"); + } + + public void testDotInFieldName() { + assertGrokedField("meta.data"); + } + + public void testMinusInFieldName() { + assertGrokedField("meta-data"); + } + + public void testAlphanumericFieldName() { + assertGrokedField(randomAlphaOfLengthBetween(1, 5)); + assertGrokedField(randomAlphaOfLengthBetween(1, 5) + randomIntBetween(0, 100)); + assertGrokedField(randomIntBetween(0, 100) + randomAlphaOfLengthBetween(1, 5)); + assertGrokedField(String.valueOf(randomIntBetween(0, 100))); + } + + public void testUnsupportedBracketsInFieldName() { + Grok grok = new Grok(basePatterns, "%{WORD:unsuppo(r)ted}"); + Map matches = grok.captures("line"); + assertNull(matches); + } + + private void assertGrokedField(String fieldName) { + String line = "foo"; + Grok grok = new Grok(basePatterns, "%{WORD:" + fieldName + "}"); + Map matches = grok.captures(line); + assertEquals(line, matches.get(fieldName)); + } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AbstractStringProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AbstractStringProcessor.java index 9c163290757bd..23c98ca1e0c0e 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AbstractStringProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AbstractStringProcessor.java @@ -27,10 +27,12 @@ import java.util.Map; /** - * Base class for processors that manipulate strings and require a single "fields" array config value, which + * Base class for processors that manipulate source strings and require a single "fields" array config value, which * holds a list of field names in string format. + * + * @param The resultant type for the target field */ -abstract class AbstractStringProcessor extends AbstractProcessor { +abstract class AbstractStringProcessor extends AbstractProcessor { private final String field; private final boolean ignoreMissing; private final String targetField; @@ -67,7 +69,7 @@ public final void execute(IngestDocument document) { document.setFieldValue(targetField, process(val)); } - protected abstract String process(String value); + protected abstract T process(String value); abstract static class Factory implements Processor.Factory { final String processorType; diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/BytesProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/BytesProcessor.java new file mode 100644 index 0000000000000..dfe9a054acf07 --- /dev/null +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/BytesProcessor.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.common; + +import org.elasticsearch.common.unit.ByteSizeValue; + +import java.util.Map; + +/** + * Processor that converts the content of string fields to the byte value. + * Throws exception is the field is not of type string or can not convert to the numeric byte value + */ +public final class BytesProcessor extends AbstractStringProcessor { + + public static final String TYPE = "bytes"; + + BytesProcessor(String processorTag, String field, boolean ignoreMissing, String targetField) { + super(processorTag, field, ignoreMissing, targetField); + } + + @Override + protected Long process(String value) { + return ByteSizeValue.parseBytesSizeValue(value, null, getField()).getBytes(); + } + + @Override + public String getType() { + return TYPE; + } + + public static final class Factory extends AbstractStringProcessor.Factory { + + public Factory() { + super(TYPE); + } + + @Override + protected BytesProcessor newProcessor(String tag, Map config, String field, + boolean ignoreMissing, String targetField) { + return new BytesProcessor(tag, field, ignoreMissing, targetField); + } + } +} diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java index 591060098b728..bc900d325104a 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java @@ -81,6 +81,7 @@ public Map getProcessors(Processor.Parameters paramet processors.put(JsonProcessor.TYPE, new JsonProcessor.Factory()); processors.put(KeyValueProcessor.TYPE, new KeyValueProcessor.Factory()); processors.put(URLDecodeProcessor.TYPE, new URLDecodeProcessor.Factory()); + processors.put(BytesProcessor.TYPE, new BytesProcessor.Factory()); return Collections.unmodifiableMap(processors); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AbstractStringProcessorTestCase.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AbstractStringProcessorTestCase.java index 9d37f27bb33e5..4e4182bfdc891 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AbstractStringProcessorTestCase.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AbstractStringProcessorTestCase.java @@ -31,7 +31,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -public abstract class AbstractStringProcessorTestCase extends ESTestCase { +public abstract class AbstractStringProcessorTestCase extends ESTestCase { protected abstract AbstractStringProcessor newProcessor(String field, boolean ignoreMissing, String targetField); @@ -39,7 +39,11 @@ protected String modifyInput(String input) { return input; } - protected abstract String expectedResult(String input); + protected abstract T expectedResult(String input); + + protected Class expectedResultType(){ + return (Class) String.class; // most results types are Strings + } public void testProcessor() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); @@ -47,7 +51,7 @@ public void testProcessor() throws Exception { String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, modifyInput(fieldValue)); Processor processor = newProcessor(fieldName, randomBoolean(), fieldName); processor.execute(ingestDocument); - assertThat(ingestDocument.getFieldValue(fieldName, String.class), equalTo(expectedResult(fieldValue))); + assertThat(ingestDocument.getFieldValue(fieldName, expectedResultType()), equalTo(expectedResult(fieldValue))); } public void testFieldNotFound() throws Exception { @@ -109,6 +113,6 @@ public void testTargetField() throws Exception { String targetFieldName = fieldName + "foo"; Processor processor = newProcessor(fieldName, randomBoolean(), targetFieldName); processor.execute(ingestDocument); - assertThat(ingestDocument.getFieldValue(targetFieldName, String.class), equalTo(expectedResult(fieldValue))); + assertThat(ingestDocument.getFieldValue(targetFieldName, expectedResultType()), equalTo(expectedResult(fieldValue))); } } diff --git a/plugins/repository-s3/qa/amazon-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3RepositoryClientYamlTestSuiteIT.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/BytesProcessorFactoryTests.java similarity index 53% rename from plugins/repository-s3/qa/amazon-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3RepositoryClientYamlTestSuiteIT.java rename to modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/BytesProcessorFactoryTests.java index afcc0fa353482..10050240310e4 100644 --- a/plugins/repository-s3/qa/amazon-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3RepositoryClientYamlTestSuiteIT.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/BytesProcessorFactoryTests.java @@ -17,21 +17,11 @@ * under the License. */ -package org.elasticsearch.repositories.s3; +package org.elasticsearch.ingest.common; -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; -import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; - -public class AmazonS3RepositoryClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - - public AmazonS3RepositoryClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { - super(testCandidate); - } - - @ParametersFactory - public static Iterable parameters() throws Exception { - return ESClientYamlSuiteTestCase.createParameters(); +public class BytesProcessorFactoryTests extends AbstractStringProcessorFactoryTestCase { + @Override + protected AbstractStringProcessor.Factory newFactory() { + return new BytesProcessor.Factory(); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/BytesProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/BytesProcessorTests.java new file mode 100644 index 0000000000000..0da3434adf178 --- /dev/null +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/BytesProcessorTests.java @@ -0,0 +1,98 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.common; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.hamcrest.CoreMatchers; + +import static org.hamcrest.Matchers.equalTo; + +public class BytesProcessorTests extends AbstractStringProcessorTestCase { + + private String modifiedInput; + + @Override + protected AbstractStringProcessor newProcessor(String field, boolean ignoreMissing, String targetField) { + return new BytesProcessor(randomAlphaOfLength(10), field, ignoreMissing, targetField); + } + + @Override + protected String modifyInput(String input) { + //largest value that allows all results < Long.MAX_VALUE bytes + long randomNumber = randomLongBetween(1, Long.MAX_VALUE / ByteSizeUnit.PB.toBytes(1)); + ByteSizeUnit randomUnit = randomFrom(ByteSizeUnit.values()); + modifiedInput = randomNumber + randomUnit.getSuffix(); + return modifiedInput; + } + + @Override + protected Long expectedResult(String input) { + return ByteSizeValue.parseBytesSizeValue(modifiedInput, null, "").getBytes(); + } + + @Override + protected Class expectedResultType() { + return Long.class; + } + + public void testTooLarge() { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "8912pb"); + Processor processor = newProcessor(fieldName, randomBoolean(), fieldName); + ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> processor.execute(ingestDocument)); + assertThat(exception.getMessage(), + CoreMatchers.equalTo("failed to parse setting [" + fieldName + "] with value [8912pb] as a size in bytes")); + assertThat(exception.getCause().getMessage(), + CoreMatchers.containsString("Values greater than 9223372036854775807 bytes are not supported")); + } + + public void testNotBytes() { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "junk"); + Processor processor = newProcessor(fieldName, randomBoolean(), fieldName); + ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> processor.execute(ingestDocument)); + assertThat(exception.getMessage(), + CoreMatchers.equalTo("failed to parse [junk]")); + } + + public void testMissingUnits() { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "1"); + Processor processor = newProcessor(fieldName, randomBoolean(), fieldName); + ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> processor.execute(ingestDocument)); + assertThat(exception.getMessage(), + CoreMatchers.containsString("unit is missing or unrecognized")); + } + + public void testFractional() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "1.1kb"); + Processor processor = newProcessor(fieldName, randomBoolean(), fieldName); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue(fieldName, expectedResultType()), equalTo(1126L)); + assertWarnings("Fractional bytes values are deprecated. Use non-fractional bytes values instead: [1.1kb] found for setting " + + "[" + fieldName + "]"); + } +} diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml index a58c329a7c525..10eb58e97def8 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml @@ -10,23 +10,25 @@ - match: { nodes.$master.modules.0.name: ingest-common } - match: { nodes.$master.ingest.processors.0.type: append } - - match: { nodes.$master.ingest.processors.1.type: convert } - - match: { nodes.$master.ingest.processors.2.type: date } - - match: { nodes.$master.ingest.processors.3.type: date_index_name } - - match: { nodes.$master.ingest.processors.4.type: dot_expander } - - match: { nodes.$master.ingest.processors.5.type: fail } - - match: { nodes.$master.ingest.processors.6.type: foreach } - - match: { nodes.$master.ingest.processors.7.type: grok } - - match: { nodes.$master.ingest.processors.8.type: gsub } - - match: { nodes.$master.ingest.processors.9.type: join } - - match: { nodes.$master.ingest.processors.10.type: json } - - match: { nodes.$master.ingest.processors.11.type: kv } - - match: { nodes.$master.ingest.processors.12.type: lowercase } - - match: { nodes.$master.ingest.processors.13.type: remove } - - match: { nodes.$master.ingest.processors.14.type: rename } - - match: { nodes.$master.ingest.processors.15.type: script } - - match: { nodes.$master.ingest.processors.16.type: set } - - match: { nodes.$master.ingest.processors.17.type: sort } - - match: { nodes.$master.ingest.processors.18.type: split } - - match: { nodes.$master.ingest.processors.19.type: trim } - - match: { nodes.$master.ingest.processors.20.type: uppercase } + - match: { nodes.$master.ingest.processors.1.type: bytes } + - match: { nodes.$master.ingest.processors.2.type: convert } + - match: { nodes.$master.ingest.processors.3.type: date } + - match: { nodes.$master.ingest.processors.4.type: date_index_name } + - match: { nodes.$master.ingest.processors.5.type: dot_expander } + - match: { nodes.$master.ingest.processors.6.type: fail } + - match: { nodes.$master.ingest.processors.7.type: foreach } + - match: { nodes.$master.ingest.processors.8.type: grok } + - match: { nodes.$master.ingest.processors.9.type: gsub } + - match: { nodes.$master.ingest.processors.10.type: join } + - match: { nodes.$master.ingest.processors.11.type: json } + - match: { nodes.$master.ingest.processors.12.type: kv } + - match: { nodes.$master.ingest.processors.13.type: lowercase } + - match: { nodes.$master.ingest.processors.14.type: remove } + - match: { nodes.$master.ingest.processors.15.type: rename } + - match: { nodes.$master.ingest.processors.16.type: script } + - match: { nodes.$master.ingest.processors.17.type: set } + - match: { nodes.$master.ingest.processors.18.type: sort } + - match: { nodes.$master.ingest.processors.19.type: split } + - match: { nodes.$master.ingest.processors.20.type: trim } + - match: { nodes.$master.ingest.processors.21.type: uppercase } + diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/180_bytes_processor.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/180_bytes_processor.yml new file mode 100644 index 0000000000000..bc48720966c5f --- /dev/null +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/180_bytes_processor.yml @@ -0,0 +1,42 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "my_pipeline" + ignore: 404 + +--- +"Test bytes processor": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "bytes" : { + "field" : "bytes_source_field", + "target_field" : "bytes_target_field" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {bytes_source_field: "1kb"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.bytes_source_field: "1kb" } + - match: { _source.bytes_target_field: 1024 } + diff --git a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 b/modules/lang-painless/src/main/antlr/PainlessLexer.g4 index 6ab6a86113595..fe58984fa8712 100644 --- a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 +++ b/modules/lang-painless/src/main/antlr/PainlessLexer.g4 @@ -19,14 +19,14 @@ lexer grammar PainlessLexer; -@members{ +@members { /** * Check against the current whitelist to determine whether a token is a type * or not. Called by the {@code TYPE} token defined in {@code PainlessLexer.g4}. * See also * The lexer hack. */ -protected abstract boolean isSimpleType(String name); +protected abstract boolean isType(String name); /** * Is the preceding {@code /} a the beginning of a regex (true) or a division @@ -133,7 +133,7 @@ NULL: 'null'; // or not. Note this works by processing one character at a time // and the rule is added or removed as this happens. This is also known // as "the lexer hack." See (https://en.wikipedia.org/wiki/The_lexer_hack). -TYPE: ID ( DOT ID )* { isSimpleType(getText()) }?; +TYPE: ID ( DOT ID )* { isType(getText()) }?; ID: [_a-zA-Z] [_a-zA-Z0-9]*; mode AFTER_DOT; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index 988a31a24ee27..8694ff7903859 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -185,7 +185,7 @@ static Method lookupMethodInternal(Definition definition, Class receiverClass Definition.MethodKey key = new Definition.MethodKey(name, arity); // check whitelist for matching method for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - Struct struct = definition.RuntimeClassToStruct(clazz); + Struct struct = definition.getPainlessStructFromJavaClass(clazz); if (struct != null) { Method method = struct.methods.get(key); @@ -195,7 +195,7 @@ static Method lookupMethodInternal(Definition definition, Class receiverClass } for (Class iface : clazz.getInterfaces()) { - struct = definition.RuntimeClassToStruct(iface); + struct = definition.getPainlessStructFromJavaClass(iface); if (struct != null) { Method method = struct.methods.get(key); @@ -279,7 +279,7 @@ static MethodHandle lookupMethod(Definition definition, Lookup lookup, MethodTyp captures[capture] = callSiteType.parameterType(i + 1 + capture); } MethodHandle filter; - Definition.Type interfaceType = definition.ClassToType(method.arguments.get(i - 1 - replaced)); + Class interfaceType = method.arguments.get(i - 1 - replaced); if (signature.charAt(0) == 'S') { // the implementation is strongly typed, now that we know the interface type, // we have everything. @@ -293,14 +293,14 @@ static MethodHandle lookupMethod(Definition definition, Lookup lookup, MethodTyp // the interface type is now known, but we need to get the implementation. // this is dynamically based on the receiver type (and cached separately, underneath // this cache). It won't blow up since we never nest here (just references) - MethodType nestedType = MethodType.methodType(interfaceType.clazz, captures); + MethodType nestedType = MethodType.methodType(interfaceType, captures); CallSite nested = DefBootstrap.bootstrap(definition, lookup, call, nestedType, 0, DefBootstrap.REFERENCE, - interfaceType.name); + Definition.ClassToName(interfaceType)); filter = nested.dynamicInvoker(); } else { throw new AssertionError(); @@ -324,8 +324,8 @@ static MethodHandle lookupMethod(Definition definition, Lookup lookup, MethodTyp */ static MethodHandle lookupReference(Definition definition, Lookup lookup, String interfaceClass, Class receiverClass, String name) throws Throwable { - Definition.Type interfaceType = definition.getType(interfaceClass); - Method interfaceMethod = interfaceType.struct.functionalMethod; + Class interfaceType = definition.getJavaClassFromPainlessType(interfaceClass); + Method interfaceMethod = definition.getPainlessStructFromJavaClass(interfaceType).functionalMethod; if (interfaceMethod == null) { throw new IllegalArgumentException("Class [" + interfaceClass + "] is not a functional interface"); } @@ -337,15 +337,15 @@ static MethodHandle lookupReference(Definition definition, Lookup lookup, String /** Returns a method handle to an implementation of clazz, given method reference signature. */ private static MethodHandle lookupReferenceInternal(Definition definition, Lookup lookup, - Definition.Type clazz, String type, String call, Class... captures) + Class clazz, String type, String call, Class... captures) throws Throwable { final FunctionRef ref; if ("this".equals(type)) { // user written method - Method interfaceMethod = clazz.struct.functionalMethod; + Method interfaceMethod = definition.getPainlessStructFromJavaClass(clazz).functionalMethod; if (interfaceMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + clazz.name + "], not a functional interface"); + "to [" + Definition.ClassToName(clazz) + "], not a functional interface"); } int arity = interfaceMethod.arguments.size() + captures.length; final MethodHandle handle; @@ -359,14 +359,14 @@ private static MethodHandle lookupReferenceInternal(Definition definition, Looku // because the arity does not match the expected interface type. if (call.contains("$")) { throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.name + - "] in [" + clazz.clazz + "]"); + "] in [" + clazz + "]"); } throw new IllegalArgumentException("Unknown call [" + call + "] with [" + arity + "] arguments."); } - ref = new FunctionRef(clazz.clazz, interfaceMethod, call, handle.type(), captures.length); + ref = new FunctionRef(clazz, interfaceMethod, call, handle.type(), captures.length); } else { // whitelist lookup - ref = new FunctionRef(definition, clazz.clazz, type, call, captures.length); + ref = new FunctionRef(definition, clazz, type, call, captures.length); } final CallSite callSite = LambdaBootstrap.lambdaBootstrap( lookup, @@ -379,7 +379,7 @@ private static MethodHandle lookupReferenceInternal(Definition definition, Looku ref.delegateMethodType, ref.isDelegateInterface ? 1 : 0 ); - return callSite.dynamicInvoker().asType(MethodType.methodType(clazz.clazz, captures)); + return callSite.dynamicInvoker().asType(MethodType.methodType(clazz, captures)); } /** gets the field name used to lookup up the MethodHandle for a function. */ @@ -416,7 +416,7 @@ public static String getUserFunctionHandleFieldName(String name, int arity) { static MethodHandle lookupGetter(Definition definition, Class receiverClass, String name) { // first try whitelist for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - Struct struct = definition.RuntimeClassToStruct(clazz); + Struct struct = definition.getPainlessStructFromJavaClass(clazz); if (struct != null) { MethodHandle handle = struct.getters.get(name); @@ -426,7 +426,7 @@ static MethodHandle lookupGetter(Definition definition, Class receiverClass, } for (final Class iface : clazz.getInterfaces()) { - struct = definition.RuntimeClassToStruct(iface); + struct = definition.getPainlessStructFromJavaClass(iface); if (struct != null) { MethodHandle handle = struct.getters.get(name); @@ -487,7 +487,7 @@ static MethodHandle lookupGetter(Definition definition, Class receiverClass, static MethodHandle lookupSetter(Definition definition, Class receiverClass, String name) { // first try whitelist for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - Struct struct = definition.RuntimeClassToStruct(clazz); + Struct struct = definition.getPainlessStructFromJavaClass(clazz); if (struct != null) { MethodHandle handle = struct.setters.get(name); @@ -497,7 +497,7 @@ static MethodHandle lookupSetter(Definition definition, Class receiverClass, } for (final Class iface : clazz.getInterfaces()) { - struct = definition.RuntimeClassToStruct(iface); + struct = definition.getPainlessStructFromJavaClass(iface); if (struct != null) { MethodHandle handle = struct.setters.get(name); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java index 31fba8f757954..9c7c7f631b68d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java @@ -1,7 +1,3 @@ -package org.elasticsearch.painless; - -import org.elasticsearch.common.SuppressForbidden; - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -21,6 +17,10 @@ * under the License. */ +package org.elasticsearch.painless; + +import org.elasticsearch.common.SuppressForbidden; + import java.lang.invoke.CallSite; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; @@ -72,16 +72,16 @@ private DefBootstrap() {} // no instance! public static final int SHIFT_OPERATOR = 9; /** static bootstrap parameter indicating a request to normalize an index for array-like-access */ public static final int INDEX_NORMALIZE = 10; - + // constants for the flags parameter of operators - /** - * static bootstrap parameter indicating the binary operator allows nulls (e.g. == and +) + /** + * static bootstrap parameter indicating the binary operator allows nulls (e.g. == and +) *

* requires additional {@link MethodHandles#catchException} guard, which will invoke * the fallback if a null is encountered. */ public static final int OPERATOR_ALLOWS_NULL = 1 << 0; - + /** * static bootstrap parameter indicating the binary operator is part of compound assignment (e.g. +=). *

@@ -89,7 +89,7 @@ private DefBootstrap() {} // no instance! * to cast back to the receiver's type, depending on types seen. */ public static final int OPERATOR_COMPOUND_ASSIGNMENT = 1 << 1; - + /** * static bootstrap parameter indicating an explicit cast to the return type. *

@@ -129,7 +129,7 @@ static final class PIC extends MutableCallSite { setTarget(fallback); } - + /** * guard method for inline caching: checks the receiver's class is the same * as the cached class @@ -162,7 +162,7 @@ private MethodHandle lookup(int flavor, String name, Class receiver) throws T default: throw new AssertionError(); } } - + /** * Creates the {@link MethodHandle} for the megamorphic call site * using {@link ClassValue} and {@link MethodHandles#exactInvoker(MethodType)}: @@ -182,7 +182,7 @@ protected MethodHandle computeValue(Class receiverType) { } }; return MethodHandles.foldArguments(MethodHandles.exactInvoker(type), - MEGAMORPHIC_LOOKUP.bindTo(megamorphicCache)); + MEGAMORPHIC_LOOKUP.bindTo(megamorphicCache)); } /** @@ -195,18 +195,18 @@ Object fallback(final Object[] callArgs) throws Throwable { if (depth >= MAX_DEPTH) { // we revert the whole cache and build a new megamorphic one final MethodHandle target = this.createMegamorphicHandle(); - + setTarget(target); - return target.invokeWithArguments(callArgs); + return target.invokeWithArguments(callArgs); } else { final Class receiver = callArgs[0].getClass(); final MethodHandle target = lookup(flavor, name, receiver).asType(type()); - + MethodHandle test = CHECK_CLASS.bindTo(receiver); MethodHandle guard = MethodHandles.guardWithTest(test, target, getTarget()); - + depth++; - + setTarget(guard); return target.invokeWithArguments(callArgs); } @@ -225,7 +225,7 @@ Object fallback(final Object[] callArgs) throws Throwable { MethodType.methodType(Object.class, Object[].class)); MethodHandle mh = publicLookup.findVirtual(ClassValue.class, "get", MethodType.methodType(Object.class, Class.class)); - mh = MethodHandles.filterArguments(mh, 1, + mh = MethodHandles.filterArguments(mh, 1, publicLookup.findVirtual(Object.class, "getClass", MethodType.methodType(Class.class))); MEGAMORPHIC_LOOKUP = mh.asType(mh.type().changeReturnType(MethodHandle.class)); } catch (ReflectiveOperationException e) { @@ -233,7 +233,7 @@ Object fallback(final Object[] callArgs) throws Throwable { } } } - + /** * CallSite that implements the monomorphic inlining cache (for operators). */ @@ -252,14 +252,14 @@ static final class MIC extends MutableCallSite { if (initialDepth > 0) { initialized = true; } - + MethodHandle fallback = FALLBACK.bindTo(this) .asCollector(Object[].class, type.parameterCount()) .asType(type); setTarget(fallback); } - + /** * Does a slow lookup for the operator */ @@ -290,7 +290,7 @@ private MethodHandle lookup(Object[] args) throws Throwable { default: throw new AssertionError(); } } - + private MethodHandle lookupGeneric() { MethodHandle target = DefMath.lookupGeneric(name); if ((flags & OPERATOR_EXPLICIT_CAST) != 0) { @@ -302,7 +302,7 @@ private MethodHandle lookupGeneric() { } return target; } - + /** * Called when a new type is encountered or if cached type does not match. * In that case we revert to a generic, but slower operator handling. @@ -315,7 +315,7 @@ Object fallback(Object[] args) throws Throwable { setTarget(generic.asType(type())); return generic.invokeWithArguments(args); } - + final MethodType type = type(); MethodHandle target = lookup(args); // for math operators: WrongMethodType can be confusing. convert into a ClassCastException if they screw up. @@ -361,18 +361,18 @@ Object fallback(Object[] args) throws Throwable { // very special cases, where even the receiver can be null (see JLS rules for string concat) // we wrap + with an NPE catcher, and use our generic method in that case. if (flavor == BINARY_OPERATOR && (flags & OPERATOR_ALLOWS_NULL) != 0) { - MethodHandle handler = MethodHandles.dropArguments(lookupGeneric().asType(type()), - 0, + MethodHandle handler = MethodHandles.dropArguments(lookupGeneric().asType(type()), + 0, NullPointerException.class); guard = MethodHandles.catchException(guard, NullPointerException.class, handler); } - + initialized = true; setTarget(guard); return target.invokeWithArguments(args); } - + /** * guard method for inline caching: checks the receiver's class is the same * as the cached class @@ -388,7 +388,7 @@ static boolean checkLHS(Class clazz, Object leftObject) { static boolean checkRHS(Class left, Class right, Object leftObject, Object rightObject) { return rightObject.getClass() == right; } - + /** * guard method for inline caching: checks the receiver's class and the first argument * are the same as the cached receiver and first argument. @@ -396,7 +396,7 @@ static boolean checkRHS(Class left, Class right, Object leftObject, Object static boolean checkBoth(Class left, Class right, Object leftObject, Object rightObject) { return leftObject.getClass() == left && rightObject.getClass() == right; } - + private static final MethodHandle CHECK_LHS; private static final MethodHandle CHECK_RHS; private static final MethodHandle CHECK_BOTH; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefMath.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefMath.java index 6628484660699..f903c0571b2bd 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefMath.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefMath.java @@ -21,8 +21,8 @@ import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; -import java.lang.invoke.MethodType; import java.lang.invoke.MethodHandles.Lookup; +import java.lang.invoke.MethodType; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -31,38 +31,38 @@ import java.util.stream.Stream; /** - * Dynamic operators for painless. + * Dynamic operators for painless. *

* Each operator must "support" the following types: - * {@code int,long,float,double,boolean,Object}. Operators can throw exceptions if + * {@code int,long,float,double,boolean,Object}. Operators can throw exceptions if * the type is illegal. The {@code Object} type must be a "generic" handler that * handles all legal types: it must be convertible to every possible legal signature. */ @SuppressWarnings("unused") public class DefMath { - + // Unary not: only applicable to integral types private static int not(int v) { return ~v; } - + private static long not(long v) { return ~v; } - + private static float not(float v) { throw new ClassCastException("Cannot apply not [~] to type [float]"); } - + private static double not(double v) { throw new ClassCastException("Cannot apply not [~] to type [double]"); } - + private static boolean not(boolean v) { throw new ClassCastException("Cannot apply not [~] to type [boolean]"); } - + private static Object not(Object unary) { if (unary instanceof Long) { return ~(Long)unary; @@ -79,29 +79,29 @@ private static Object not(Object unary) { throw new ClassCastException("Cannot apply [~] operation to type " + "[" + unary.getClass().getCanonicalName() + "]."); } - + // unary negation and plus: applicable to all numeric types private static int neg(int v) { return -v; } - + private static long neg(long v) { return -v; } - + private static float neg(float v) { return -v; } - + private static double neg(double v) { return -v; } - + private static boolean neg(boolean v) { throw new ClassCastException("Cannot apply [-] operation to type [boolean]"); } - + private static Object neg(final Object unary) { if (unary instanceof Double) { return -(double)unary; @@ -122,27 +122,27 @@ private static Object neg(final Object unary) { throw new ClassCastException("Cannot apply [-] operation to type " + "[" + unary.getClass().getCanonicalName() + "]."); } - + private static int plus(int v) { return +v; } - + private static long plus(long v) { return +v; } - + private static float plus(float v) { return +v; } - + private static double plus(double v) { return +v; } - + private static boolean plus(boolean v) { throw new ClassCastException("Cannot apply [+] operation to type [boolean]"); } - + private static Object plus(final Object unary) { if (unary instanceof Double) { return +(double)unary; @@ -163,29 +163,29 @@ private static Object plus(final Object unary) { throw new ClassCastException("Cannot apply [+] operation to type " + "[" + unary.getClass().getCanonicalName() + "]."); } - + // multiplication/division/remainder/subtraction: applicable to all integer types - + private static int mul(int a, int b) { return a * b; } - + private static long mul(long a, long b) { return a * b; } - + private static float mul(float a, float b) { return a * b; } - + private static double mul(double a, double b) { return a * b; } - + private static boolean mul(boolean a, boolean b) { throw new ClassCastException("Cannot apply [*] operation to type [boolean]"); } - + private static Object mul(Object left, Object right) { if (left instanceof Number) { if (right instanceof Number) { @@ -228,27 +228,27 @@ private static Object mul(Object left, Object right) { throw new ClassCastException("Cannot apply [*] operation to types " + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); } - + private static int div(int a, int b) { return a / b; } - + private static long div(long a, long b) { return a / b; } - + private static float div(float a, float b) { return a / b; } - + private static double div(double a, double b) { return a / b; } - + private static boolean div(boolean a, boolean b) { throw new ClassCastException("Cannot apply [/] operation to type [boolean]"); } - + private static Object div(Object left, Object right) { if (left instanceof Number) { if (right instanceof Number) { @@ -291,27 +291,27 @@ private static Object div(Object left, Object right) { throw new ClassCastException("Cannot apply [/] operation to types " + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); } - + private static int rem(int a, int b) { return a % b; } - + private static long rem(long a, long b) { return a % b; } - + private static float rem(float a, float b) { return a % b; } - + private static double rem(double a, double b) { return a % b; } - + private static boolean rem(boolean a, boolean b) { throw new ClassCastException("Cannot apply [%] operation to type [boolean]"); } - + private static Object rem(Object left, Object right) { if (left instanceof Number) { if (right instanceof Number) { @@ -354,30 +354,30 @@ private static Object rem(Object left, Object right) { throw new ClassCastException("Cannot apply [%] operation to types " + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); } - + // addition: applicable to all numeric types. // additionally, if either type is a string, the other type can be any arbitrary type (including null) - + private static int add(int a, int b) { return a + b; } - + private static long add(long a, long b) { return a + b; } - + private static float add(float a, float b) { return a + b; } - + private static double add(double a, double b) { return a + b; } - + private static boolean add(boolean a, boolean b) { throw new ClassCastException("Cannot apply [+] operation to type [boolean]"); } - + private static Object add(Object left, Object right) { if (left instanceof String) { return (String) left + right; @@ -424,27 +424,27 @@ private static Object add(Object left, Object right) { throw new ClassCastException("Cannot apply [+] operation to types " + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); } - + private static int sub(int a, int b) { return a - b; } - + private static long sub(long a, long b) { return a - b; } - + private static float sub(float a, float b) { return a - b; } - + private static double sub(double a, double b) { return a - b; } - + private static boolean sub(boolean a, boolean b) { throw new ClassCastException("Cannot apply [-] operation to type [boolean]"); } - + private static Object sub(Object left, Object right) { if (left instanceof Number) { if (right instanceof Number) { @@ -487,29 +487,29 @@ private static Object sub(Object left, Object right) { throw new ClassCastException("Cannot apply [-] operation to types " + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); } - + // eq: applicable to any arbitrary type, including nulls for both arguments!!! private static boolean eq(int a, int b) { return a == b; } - + private static boolean eq(long a, long b) { return a == b; } - + private static boolean eq(float a, float b) { return a == b; } - + private static boolean eq(double a, double b) { return a == b; } - + private static boolean eq(boolean a, boolean b) { return a == b; } - + private static boolean eq(Object left, Object right) { if (left != null && right != null) { if (left instanceof Double) { @@ -565,29 +565,29 @@ private static boolean eq(Object left, Object right) { return left == null && right == null; } - + // comparison operators: applicable for any numeric type private static boolean lt(int a, int b) { return a < b; } - + private static boolean lt(long a, long b) { return a < b; } - + private static boolean lt(float a, float b) { return a < b; } - + private static boolean lt(double a, double b) { return a < b; } - + private static boolean lt(boolean a, boolean b) { - throw new ClassCastException("Cannot apply [<] operation to type [boolean]"); + throw new ClassCastException("Cannot apply [<] operation to type [boolean]"); } - + private static boolean lt(Object left, Object right) { if (left instanceof Number) { if (right instanceof Number) { @@ -634,23 +634,23 @@ private static boolean lt(Object left, Object right) { private static boolean lte(int a, int b) { return a <= b; } - + private static boolean lte(long a, long b) { return a <= b; } - + private static boolean lte(float a, float b) { return a <= b; } - + private static boolean lte(double a, double b) { return a <= b; } - + private static boolean lte(boolean a, boolean b) { - throw new ClassCastException("Cannot apply [<=] operation to type [boolean]"); + throw new ClassCastException("Cannot apply [<=] operation to type [boolean]"); } - + private static boolean lte(Object left, Object right) { if (left instanceof Number) { if (right instanceof Number) { @@ -697,23 +697,23 @@ private static boolean lte(Object left, Object right) { private static boolean gt(int a, int b) { return a > b; } - + private static boolean gt(long a, long b) { return a > b; } - + private static boolean gt(float a, float b) { return a > b; } - + private static boolean gt(double a, double b) { return a > b; } - + private static boolean gt(boolean a, boolean b) { - throw new ClassCastException("Cannot apply [>] operation to type [boolean]"); + throw new ClassCastException("Cannot apply [>] operation to type [boolean]"); } - + private static boolean gt(Object left, Object right) { if (left instanceof Number) { if (right instanceof Number) { @@ -756,25 +756,25 @@ private static boolean gt(Object left, Object right) { throw new ClassCastException("Cannot apply [>] operation to types " + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); } - + private static boolean gte(int a, int b) { return a >= b; } - + private static boolean gte(long a, long b) { return a >= b; } - + private static boolean gte(float a, float b) { return a >= b; } - + private static boolean gte(double a, double b) { return a >= b; } - + private static boolean gte(boolean a, boolean b) { - throw new ClassCastException("Cannot apply [>=] operation to type [boolean]"); + throw new ClassCastException("Cannot apply [>=] operation to type [boolean]"); } private static boolean gte(Object left, Object right) { @@ -819,10 +819,10 @@ private static boolean gte(Object left, Object right) { throw new ClassCastException("Cannot apply [>] operation to types " + "[" + left.getClass().getCanonicalName() + "] and [" + right.getClass().getCanonicalName() + "]."); } - + // helper methods to convert an integral according to numeric promotion // this is used by the generic code for bitwise and shift operators - + private static long longIntegralValue(Object o) { if (o instanceof Long) { return (long)o; @@ -834,7 +834,7 @@ private static long longIntegralValue(Object o) { throw new ClassCastException("Cannot convert [" + o.getClass().getCanonicalName() + "] to an integral value."); } } - + private static int intIntegralValue(Object o) { if (o instanceof Integer || o instanceof Short || o instanceof Byte) { return ((Number)o).intValue(); @@ -844,29 +844,29 @@ private static int intIntegralValue(Object o) { throw new ClassCastException("Cannot convert [" + o.getClass().getCanonicalName() + "] to an integral value."); } } - + // bitwise operators: valid only for integral types private static int and(int a, int b) { return a & b; } - + private static long and(long a, long b) { return a & b; } - + private static float and(float a, float b) { - throw new ClassCastException("Cannot apply [&] operation to type [float]"); + throw new ClassCastException("Cannot apply [&] operation to type [float]"); } - + private static double and(double a, double b) { - throw new ClassCastException("Cannot apply [&] operation to type [float]"); + throw new ClassCastException("Cannot apply [&] operation to type [float]"); } - + private static boolean and(boolean a, boolean b) { return a & b; } - + private static Object and(Object left, Object right) { if (left instanceof Boolean && right instanceof Boolean) { return (boolean)left & (boolean)right; @@ -876,23 +876,23 @@ private static Object and(Object left, Object right) { return intIntegralValue(left) & intIntegralValue(right); } } - + private static int xor(int a, int b) { return a ^ b; } - + private static long xor(long a, long b) { return a ^ b; } - + private static float xor(float a, float b) { - throw new ClassCastException("Cannot apply [^] operation to type [float]"); + throw new ClassCastException("Cannot apply [^] operation to type [float]"); } - + private static double xor(double a, double b) { - throw new ClassCastException("Cannot apply [^] operation to type [float]"); + throw new ClassCastException("Cannot apply [^] operation to type [float]"); } - + private static boolean xor(boolean a, boolean b) { return a ^ b; } @@ -910,23 +910,23 @@ private static Object xor(Object left, Object right) { private static int or(int a, int b) { return a | b; } - + private static long or(long a, long b) { return a | b; } - + private static float or(float a, float b) { - throw new ClassCastException("Cannot apply [|] operation to type [float]"); + throw new ClassCastException("Cannot apply [|] operation to type [float]"); } - + private static double or(double a, double b) { - throw new ClassCastException("Cannot apply [|] operation to type [float]"); + throw new ClassCastException("Cannot apply [|] operation to type [float]"); } - + private static boolean or(boolean a, boolean b) { return a | b; } - + private static Object or(Object left, Object right) { if (left instanceof Boolean && right instanceof Boolean) { return (boolean)left | (boolean)right; @@ -936,30 +936,30 @@ private static Object or(Object left, Object right) { return intIntegralValue(left) | intIntegralValue(right); } } - + // shift operators, valid for any integral types, but does not promote. // we implement all shifts as long shifts, because the extra bits are ignored anyway. - + private static int lsh(int a, long b) { return a << b; } - + private static long lsh(long a, long b) { return a << b; } - + private static float lsh(float a, long b) { - throw new ClassCastException("Cannot apply [<<] operation to type [float]"); + throw new ClassCastException("Cannot apply [<<] operation to type [float]"); } - + private static double lsh(double a, long b) { - throw new ClassCastException("Cannot apply [<<] operation to type [double]"); + throw new ClassCastException("Cannot apply [<<] operation to type [double]"); } - + private static boolean lsh(boolean a, long b) { - throw new ClassCastException("Cannot apply [<<] operation to type [boolean]"); + throw new ClassCastException("Cannot apply [<<] operation to type [boolean]"); } - + public static Object lsh(Object left, long right) { if (left instanceof Long) { return (long)(left) << right; @@ -967,25 +967,25 @@ public static Object lsh(Object left, long right) { return intIntegralValue(left) << right; } } - + private static int rsh(int a, long b) { return a >> b; } - + private static long rsh(long a, long b) { return a >> b; } - + private static float rsh(float a, long b) { - throw new ClassCastException("Cannot apply [>>] operation to type [float]"); + throw new ClassCastException("Cannot apply [>>] operation to type [float]"); } - + private static double rsh(double a, long b) { - throw new ClassCastException("Cannot apply [>>] operation to type [double]"); + throw new ClassCastException("Cannot apply [>>] operation to type [double]"); } - + private static boolean rsh(boolean a, long b) { - throw new ClassCastException("Cannot apply [>>] operation to type [boolean]"); + throw new ClassCastException("Cannot apply [>>] operation to type [boolean]"); } public static Object rsh(Object left, long right) { @@ -995,25 +995,25 @@ public static Object rsh(Object left, long right) { return intIntegralValue(left) >> right; } } - + private static int ush(int a, long b) { return a >>> b; } - + private static long ush(long a, long b) { return a >>> b; } - + private static float ush(float a, long b) { - throw new ClassCastException("Cannot apply [>>>] operation to type [float]"); + throw new ClassCastException("Cannot apply [>>>] operation to type [float]"); } - + private static double ush(double a, long b) { - throw new ClassCastException("Cannot apply [>>>] operation to type [double]"); + throw new ClassCastException("Cannot apply [>>>] operation to type [double]"); } - + private static boolean ush(boolean a, long b) { - throw new ClassCastException("Cannot apply [>>>] operation to type [boolean]"); + throw new ClassCastException("Cannot apply [>>>] operation to type [boolean]"); } public static Object ush(Object left, long right) { @@ -1023,15 +1023,15 @@ public static Object ush(Object left, long right) { return intIntegralValue(left) >>> right; } } - - /** - * unboxes a class to its primitive type, or returns the original + + /** + * unboxes a class to its primitive type, or returns the original * class if its not a boxed type. */ private static Class unbox(Class clazz) { return MethodType.methodType(clazz).unwrap().returnType(); } - + /** Unary promotion. All Objects are promoted to Object. */ private static Class promote(Class clazz) { // if either is a non-primitive type -> Object. @@ -1039,25 +1039,25 @@ private static Class promote(Class clazz) { return Object.class; } // always promoted to integer - if (clazz == byte.class || clazz == short.class || clazz == char.class || clazz == int.class) { - return int.class; - } else { - return clazz; - } + if (clazz == byte.class || clazz == short.class || clazz == char.class || clazz == int.class) { + return int.class; + } else { + return clazz; + } } - + /** Binary promotion. */ private static Class promote(Class a, Class b) { // if either is a non-primitive type -> Object. if (a.isPrimitive() == false || b.isPrimitive() == false) { return Object.class; } - + // boolean -> boolean if (a == boolean.class && b == boolean.class) { return boolean.class; } - + // ordinary numeric promotion if (a == double.class || b == double.class) { return double.class; @@ -1069,7 +1069,7 @@ private static Class promote(Class a, Class b) { return int.class; } } - + private static final Lookup PRIV_LOOKUP = MethodHandles.lookup(); private static final Map,Map> TYPE_OP_MAPPING = Collections.unmodifiableMap( @@ -1107,7 +1107,7 @@ private static Class promote(Class a, Class b) { } })) ); - + /** Returns an appropriate method handle for a unary or shift operator, based only on the receiver (LHS) */ public static MethodHandle lookupUnary(Class receiverClass, String name) { MethodHandle handle = TYPE_OP_MAPPING.get(promote(unbox(receiverClass))).get(name); @@ -1116,7 +1116,7 @@ public static MethodHandle lookupUnary(Class receiverClass, String name) { } return handle; } - + /** Returns an appropriate method handle for a binary operator, based on promotion of the LHS and RHS arguments */ public static MethodHandle lookupBinary(Class classA, Class classB, String name) { MethodHandle handle = TYPE_OP_MAPPING.get(promote(promote(unbox(classA)), promote(unbox(classB)))).get(name); @@ -1125,7 +1125,7 @@ public static MethodHandle lookupBinary(Class classA, Class classB, String } return handle; } - + /** Returns a generic method handle for any operator, that can handle all valid signatures, nulls, corner cases */ public static MethodHandle lookupGeneric(String name) { return TYPE_OP_MAPPING.get(Object.class).get(name); @@ -1143,7 +1143,7 @@ static Object dynamicReceiverCast(Object returnValue, Object lhs) { return returnValue; } } - + /** * Slow dynamic cast: casts {@code value} to an instance of {@code clazz} * based upon inspection. If {@code lhs} is null, no cast takes place. @@ -1173,7 +1173,7 @@ static Object dynamicCast(Class clazz, Object value) { return value; } } - + /** Slowly returns a Number for o. Just for supporting dynamicCast */ static Number getNumber(Object o) { if (o instanceof Number) { @@ -1184,17 +1184,17 @@ static Number getNumber(Object o) { throw new ClassCastException("Cannot convert [" + o.getClass() + "] to a Number"); } } - + private static final MethodHandle DYNAMIC_CAST; private static final MethodHandle DYNAMIC_RECEIVER_CAST; static { final Lookup lookup = MethodHandles.lookup(); try { - DYNAMIC_CAST = lookup.findStatic(lookup.lookupClass(), - "dynamicCast", + DYNAMIC_CAST = lookup.findStatic(lookup.lookupClass(), + "dynamicCast", MethodType.methodType(Object.class, Class.class, Object.class)); - DYNAMIC_RECEIVER_CAST = lookup.findStatic(lookup.lookupClass(), - "dynamicReceiverCast", + DYNAMIC_RECEIVER_CAST = lookup.findStatic(lookup.lookupClass(), + "dynamicReceiverCast", MethodType.methodType(Object.class, Object.class, Object.class)); } catch (ReflectiveOperationException e) { throw new AssertionError(e); @@ -1204,7 +1204,7 @@ static Number getNumber(Object o) { /** Looks up generic method, with a dynamic cast to the receiver's type. (compound assignment) */ public static MethodHandle dynamicCast(MethodHandle target) { // adapt dynamic receiver cast to the generic method - MethodHandle cast = DYNAMIC_RECEIVER_CAST.asType(MethodType.methodType(target.type().returnType(), + MethodHandle cast = DYNAMIC_RECEIVER_CAST.asType(MethodType.methodType(target.type().returnType(), target.type().returnType(), target.type().parameterType(0))); // drop the RHS parameter @@ -1212,7 +1212,7 @@ public static MethodHandle dynamicCast(MethodHandle target) { // combine: f(x,y) -> g(f(x,y), x, y); return MethodHandles.foldArguments(cast, target); } - + /** Looks up generic method, with a dynamic cast to the specified type. (explicit assignment) */ public static MethodHandle dynamicCast(MethodHandle target, Class desired) { // adapt dynamic cast to the generic method @@ -1221,23 +1221,23 @@ public static MethodHandle dynamicCast(MethodHandle target, Class desired) { MethodHandle cast = DYNAMIC_CAST.bindTo(desired); return MethodHandles.filterReturnValue(target, cast); } - + /** Forces a cast to class A for target (only if types differ) */ public static MethodHandle cast(Class classA, MethodHandle target) { MethodType newType = MethodType.methodType(classA).unwrap(); MethodType targetType = MethodType.methodType(target.type().returnType()).unwrap(); - + // don't do a conversion if types are the same. explicitCastArguments has this opto, // but we do it explicitly, to make the boolean check simpler if (newType.returnType() == targetType.returnType()) { return target; } - + // we don't allow the to/from boolean conversions of explicitCastArguments if (newType.returnType() == boolean.class || targetType.returnType() == boolean.class) { throw new ClassCastException("Cannot cast " + targetType.returnType() + " to " + newType.returnType()); } - + // null return values are not possible for our arguments. return MethodHandles.explicitCastArguments(target, target.type().changeReturnType(newType.returnType())); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java index 75575d6f12568..25145a44b5853 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java @@ -21,6 +21,7 @@ import org.elasticsearch.painless.spi.Whitelist; import org.objectweb.asm.Opcodes; +import org.objectweb.asm.Type; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; @@ -48,35 +49,6 @@ public final class Definition { private static final Pattern TYPE_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$"); - /** Some native types as constants: */ - public final Type voidType; - public final Type booleanType; - public final Type BooleanType; - public final Type byteType; - public final Type ByteType; - public final Type shortType; - public final Type ShortType; - public final Type intType; - public final Type IntegerType; - public final Type longType; - public final Type LongType; - public final Type floatType; - public final Type FloatType; - public final Type doubleType; - public final Type DoubleType; - public final Type charType; - public final Type CharacterType; - public final Type ObjectType; - public final Type DefType; - public final Type NumberType; - public final Type StringType; - public final Type ExceptionType; - public final Type PatternType; - public final Type MatcherType; - public final Type IteratorType; - public final Type ArrayListType; - public final Type HashMapType; - /** Marker class for def type to be used during type analysis. */ public static final class def { private def() { @@ -84,53 +56,6 @@ private def() { } } - public static final class Type { - public final String name; - public final int dimensions; - public final boolean dynamic; - public final Struct struct; - public final Class clazz; - public final org.objectweb.asm.Type type; - - private Type(final String name, final int dimensions, final boolean dynamic, - final Struct struct, final Class clazz, final org.objectweb.asm.Type type) { - this.name = name; - this.dimensions = dimensions; - this.dynamic = dynamic; - this.struct = struct; - this.clazz = clazz; - this.type = type; - } - - @Override - public boolean equals(final Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - final Type type = (Type)object; - - return this.type.equals(type.type) && struct.equals(type.struct); - } - - @Override - public int hashCode() { - int result = struct.hashCode(); - result = 31 * result + type.hashCode(); - - return result; - } - - @Override - public String toString() { - return name; - } - } - public static class Method { public final String name; public final Struct owner; @@ -431,21 +356,6 @@ private Cast(Class from, Class to, boolean explicit, Class unboxFrom, C } } - /** Returns whether or not a non-array type exists. */ - public boolean isSimpleType(final String name) { - return structsMap.containsKey(name); - } - - /** Gets the type given by its name */ - public Type getType(final String name) { - return getTypeInternal(name); - } - - /** Creates an array type from the given Struct. */ - public Type getType(final Struct struct, final int dimensions) { - return getTypeInternal(struct, dimensions); - } - public static Class getBoxedType(Class clazz) { if (clazz == boolean.class) { return Boolean.class; @@ -502,6 +412,10 @@ public static boolean isConstantType(Class clazz) { clazz == String.class; } + public Class getClassFromBinaryName(String painlessType) { + return painlessTypesToJavaClasses.get(painlessType.replace('$', '.')); + } + public static Class ObjectClassTodefClass(Class clazz) { if (clazz.isArray()) { Class component = clazz.getComponentType(); @@ -590,53 +504,6 @@ public static String ClassToName(Class clazz) { return clazz.getCanonicalName().replace('$', '.'); } - public Type ClassToType(Class clazz) { - if (clazz == null) { - return null; - } else if (clazz.isArray()) { - Class component = clazz.getComponentType(); - int dimensions = 1; - - while (component.isArray()) { - component = component.getComponentType(); - ++dimensions; - } - - if (component == def.class) { - return getType(structsMap.get(def.class.getSimpleName()), dimensions); - } else { - return getType(structsMap.get(ClassToName(component)), dimensions); - } - } else if (clazz == def.class) { - return getType(structsMap.get(def.class.getSimpleName()), 0); - } - - return getType(structsMap.get(ClassToName(clazz)), 0); - } - - public Struct RuntimeClassToStruct(Class clazz) { - return structsMap.get(ClassToName(clazz)); - } - - public static Class TypeToClass(Type type) { - if (def.class.getSimpleName().equals(type.struct.name)) { - return ObjectClassTodefClass(type.clazz); - } - - return type.clazz; - } - - public Class getClassFromBinaryName(String name) { - Struct struct = structsMap.get(name.replace('$', '.')); - - return struct == null ? null : struct.clazz; - } - - /** Collection of all simple types. Used by {@code PainlessDocGenerator} to generate an API reference. */ - Collection allSimpleTypes() { - return simpleTypesMap.values(); - } - private static String buildMethodCacheKey(String structName, String methodName, List> arguments) { StringBuilder key = new StringBuilder(); key.append(structName); @@ -653,21 +520,21 @@ private static String buildFieldCacheKey(String structName, String fieldName, St return structName + fieldName + typeName; } - // INTERNAL IMPLEMENTATION: + public Collection getStructs() { + return javaClassesToPainlessStructs.values(); + } - private final Map structsMap; - private final Map simpleTypesMap; + private final Map> painlessTypesToJavaClasses; + private final Map, Struct> javaClassesToPainlessStructs; public Definition(List whitelists) { - structsMap = new HashMap<>(); - simpleTypesMap = new HashMap<>(); + painlessTypesToJavaClasses = new HashMap<>(); + javaClassesToPainlessStructs = new HashMap<>(); - Map, Struct> javaClassesToPainlessStructs = new HashMap<>(); String origin = null; - // add the universal def type - structsMap.put(def.class.getSimpleName(), - new Struct(def.class.getSimpleName(), Object.class, org.objectweb.asm.Type.getType(Object.class))); + painlessTypesToJavaClasses.put("def", def.class); + javaClassesToPainlessStructs.put(def.class, new Struct("def", Object.class, Type.getType(Object.class))); try { // first iteration collects all the Painless type names that @@ -675,7 +542,7 @@ public Definition(List whitelists) { for (Whitelist whitelist : whitelists) { for (Whitelist.Struct whitelistStruct : whitelist.whitelistStructs) { String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); - Struct painlessStruct = structsMap.get(painlessTypeName); + Struct painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName)); if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) { throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " + @@ -685,7 +552,7 @@ public Definition(List whitelists) { origin = whitelistStruct.origin; addStruct(whitelist.javaClassLoader, whitelistStruct); - painlessStruct = structsMap.get(painlessTypeName); + painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName)); javaClassesToPainlessStructs.put(painlessStruct.clazz, painlessStruct); } } @@ -719,13 +586,8 @@ public Definition(List whitelists) { // goes through each Painless struct and determines the inheritance list, // and then adds all inherited types to the Painless struct's whitelist - for (Map.Entry painlessNameStructEntry : structsMap.entrySet()) { - String painlessStructName = painlessNameStructEntry.getKey(); - Struct painlessStruct = painlessNameStructEntry.getValue(); - - if (painlessStruct.name.equals(painlessStructName) == false) { - continue; - } + for (Class javaClass : javaClassesToPainlessStructs.keySet()) { + Struct painlessStruct = javaClassesToPainlessStructs.get(javaClass); List painlessSuperStructs = new ArrayList<>(); Class javaSuperClass = painlessStruct.clazz.getSuperclass(); @@ -782,52 +644,14 @@ public Definition(List whitelists) { } // precompute runtime classes - for (String painlessStructName : structsMap.keySet()) { - Struct painlessStruct = structsMap.get(painlessStructName); - - if (painlessStruct.name.equals(painlessStructName) == false) { - continue; - } - + for (Struct painlessStruct : javaClassesToPainlessStructs.values()) { addRuntimeClass(painlessStruct); } // copy all structs to make them unmodifiable for outside users: - for (Map.Entry entry : structsMap.entrySet()) { - if (entry.getKey().equals(entry.getValue().name) == false) { - continue; - } - + for (Map.Entry,Struct> entry : javaClassesToPainlessStructs.entrySet()) { entry.setValue(entry.getValue().freeze(computeFunctionalInterfaceMethod(entry.getValue()))); } - - voidType = getType("void"); - booleanType = getType("boolean"); - BooleanType = getType("Boolean"); - byteType = getType("byte"); - ByteType = getType("Byte"); - shortType = getType("short"); - ShortType = getType("Short"); - intType = getType("int"); - IntegerType = getType("Integer"); - longType = getType("long"); - LongType = getType("Long"); - floatType = getType("float"); - FloatType = getType("Float"); - doubleType = getType("double"); - DoubleType = getType("Double"); - charType = getType("char"); - CharacterType = getType("Character"); - ObjectType = getType("Object"); - DefType = getType(def.class.getSimpleName()); - NumberType = getType("Number"); - StringType = getType("String"); - ExceptionType = getType("Exception"); - PatternType = getType("Pattern"); - MatcherType = getType("Matcher"); - IteratorType = getType("Iterator"); - ArrayListType = getType("ArrayList"); - HashMapType = getType("HashMap"); } private void addStruct(ClassLoader whitelistClassLoader, Whitelist.Struct whitelistStruct) { @@ -864,35 +688,45 @@ private void addStruct(ClassLoader whitelistClassLoader, Whitelist.Struct whitel } } - Struct existingStruct = structsMap.get(painlessTypeName); + Struct existingStruct = javaClassesToPainlessStructs.get(javaClass); if (existingStruct == null) { Struct struct = new Struct(painlessTypeName, javaClass, org.objectweb.asm.Type.getType(javaClass)); - structsMap.put(painlessTypeName, struct); - - if (whitelistStruct.onlyFQNJavaClassName) { - simpleTypesMap.put(painlessTypeName, getType(painlessTypeName)); - } else if (simpleTypesMap.containsKey(importedPainlessTypeName) == false) { - simpleTypesMap.put(importedPainlessTypeName, getType(painlessTypeName)); - structsMap.put(importedPainlessTypeName, struct); - } else { - throw new IllegalArgumentException("duplicate short name [" + importedPainlessTypeName + "] " + - "found for struct [" + painlessTypeName + "]"); - } + painlessTypesToJavaClasses.put(painlessTypeName, javaClass); + javaClassesToPainlessStructs.put(javaClass, struct); } else if (existingStruct.clazz.equals(javaClass) == false) { throw new IllegalArgumentException("struct [" + painlessTypeName + "] is used to " + "illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] and " + "[" + existingStruct.clazz.getName() + "]"); - } else if (whitelistStruct.onlyFQNJavaClassName && simpleTypesMap.containsKey(importedPainlessTypeName) && - simpleTypesMap.get(importedPainlessTypeName).clazz == javaClass || - whitelistStruct.onlyFQNJavaClassName == false && (simpleTypesMap.containsKey(importedPainlessTypeName) == false || - simpleTypesMap.get(importedPainlessTypeName).clazz != javaClass)) { - throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]"); + } + + if (painlessTypeName.equals(importedPainlessTypeName)) { + if (whitelistStruct.onlyFQNJavaClassName == false) { + throw new IllegalArgumentException("must use only_fqn parameter on type [" + painlessTypeName + "] with no package"); + } + } else { + Class importedJavaClass = painlessTypesToJavaClasses.get(importedPainlessTypeName); + + if (importedJavaClass == null) { + if (whitelistStruct.onlyFQNJavaClassName == false) { + if (existingStruct != null) { + throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]"); + } + + painlessTypesToJavaClasses.put(importedPainlessTypeName, javaClass); + } + } else if (importedJavaClass.equals(javaClass) == false) { + throw new IllegalArgumentException("imported name [" + painlessTypeName + "] is used to " + + "illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] " + + "and [" + importedJavaClass.getName() + "]"); + } else if (whitelistStruct.onlyFQNJavaClassName) { + throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]"); + } } } private void addConstructor(String ownerStructName, Whitelist.Constructor whitelistConstructor) { - Struct ownerStruct = structsMap.get(ownerStructName); + Struct ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for constructor with " + @@ -906,7 +740,7 @@ private void addConstructor(String ownerStructName, Whitelist.Constructor whitel String painlessParameterTypeName = whitelistConstructor.painlessParameterTypeNames.get(parameterCount); try { - Class painlessParameterClass = TypeToClass(getTypeInternal(painlessParameterTypeName)); + Class painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName); painlessParametersTypes.add(painlessParameterClass); javaClassParameters[parameterCount] = defClassToObjectClass(painlessParameterClass); @@ -952,7 +786,7 @@ private void addConstructor(String ownerStructName, Whitelist.Constructor whitel } private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, Whitelist.Method whitelistMethod) { - Struct ownerStruct = structsMap.get(ownerStructName); + Struct ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " + @@ -991,7 +825,7 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, String painlessParameterTypeName = whitelistMethod.painlessParameterTypeNames.get(parameterCount); try { - Class painlessParameterClass = TypeToClass(getTypeInternal(painlessParameterTypeName)); + Class painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName); painlessParametersTypes.add(painlessParameterClass); javaClassParameters[parameterCount + augmentedOffset] = defClassToObjectClass(painlessParameterClass); @@ -1016,7 +850,7 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, Class painlessReturnClass; try { - painlessReturnClass = TypeToClass(getTypeInternal(whitelistMethod.painlessReturnTypeName)); + painlessReturnClass = getJavaClassFromPainlessType(whitelistMethod.painlessReturnTypeName); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("struct not defined for return type [" + whitelistMethod.painlessReturnTypeName + "] " + "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " + @@ -1088,7 +922,7 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, } private void addField(String ownerStructName, Whitelist.Field whitelistField) { - Struct ownerStruct = structsMap.get(ownerStructName); + Struct ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " + @@ -1112,7 +946,7 @@ private void addField(String ownerStructName, Whitelist.Field whitelistField) { Class painlessFieldClass; try { - painlessFieldClass = TypeToClass(getTypeInternal(whitelistField.painlessFieldTypeName)); + painlessFieldClass = getJavaClassFromPainlessType(whitelistField.painlessFieldTypeName); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("struct not defined for return type [" + whitelistField.painlessFieldTypeName + "] " + "with owner struct [" + ownerStructName + "] and field with name [" + whitelistField.javaFieldName + "]", iae); @@ -1169,14 +1003,14 @@ private void addField(String ownerStructName, Whitelist.Field whitelistField) { } private void copyStruct(String struct, List children) { - final Struct owner = structsMap.get(struct); + final Struct owner = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(struct)); if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy."); } for (int count = 0; count < children.size(); ++count) { - final Struct child = structsMap.get(children.get(count)); + final Struct child = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(children.get(count))); if (child == null) { throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" + @@ -1340,71 +1174,68 @@ private Method computeFunctionalInterfaceMethod(Struct clazz) { return painless; } - private Type getTypeInternal(String name) { - // simple types (e.g. 0 array dimensions) are a simple hash lookup for speed - Type simple = simpleTypesMap.get(name); + public boolean isSimplePainlessType(String painlessType) { + return painlessTypesToJavaClasses.containsKey(painlessType); + } - if (simple != null) { - return simple; - } + public Struct getPainlessStructFromJavaClass(Class clazz) { + return javaClassesToPainlessStructs.get(clazz); + } - int dimensions = getDimensions(name); - String structstr = dimensions == 0 ? name : name.substring(0, name.indexOf('[')); - Struct struct = structsMap.get(structstr); + public Class getJavaClassFromPainlessType(String painlessType) { + Class javaClass = painlessTypesToJavaClasses.get(painlessType); - if (struct == null) { - throw new IllegalArgumentException("The struct with name [" + name + "] has not been defined."); + if (javaClass != null) { + return javaClass; } + int arrayDimensions = 0; + int arrayIndex = painlessType.indexOf('['); - return getTypeInternal(struct, dimensions); - } - - private Type getTypeInternal(Struct struct, int dimensions) { - String name = struct.name; - org.objectweb.asm.Type type = struct.type; - Class clazz = struct.clazz; + if (arrayIndex != -1) { + int length = painlessType.length(); - if (dimensions > 0) { - StringBuilder builder = new StringBuilder(name); - char[] brackets = new char[dimensions]; - - for (int count = 0; count < dimensions; ++count) { - builder.append("[]"); - brackets[count] = '['; + while (arrayIndex < length) { + if (painlessType.charAt(arrayIndex) == '[' && ++arrayIndex < length && painlessType.charAt(arrayIndex++) == ']') { + ++arrayDimensions; + } else { + throw new IllegalArgumentException("invalid painless type [" + painlessType + "]."); + } } - String descriptor = new String(brackets) + struct.type.getDescriptor(); - - name = builder.toString(); - type = org.objectweb.asm.Type.getType(descriptor); - - try { - clazz = Class.forName(type.getInternalName().replace('/', '.')); - } catch (ClassNotFoundException exception) { - throw new IllegalArgumentException("The class [" + type.getInternalName() + "]" + - " could not be found to create type [" + name + "]."); + painlessType = painlessType.substring(0, painlessType.indexOf('[')); + javaClass = painlessTypesToJavaClasses.get(painlessType); + + char braces[] = new char[arrayDimensions]; + Arrays.fill(braces, '['); + String descriptor = new String(braces); + + if (javaClass == boolean.class) { + descriptor += "Z"; + } else if (javaClass == byte.class) { + descriptor += "B"; + } else if (javaClass == short.class) { + descriptor += "S"; + } else if (javaClass == char.class) { + descriptor += "C"; + } else if (javaClass == int.class) { + descriptor += "I"; + } else if (javaClass == long.class) { + descriptor += "J"; + } else if (javaClass == float.class) { + descriptor += "F"; + } else if (javaClass == double.class) { + descriptor += "D"; + } else { + descriptor += "L" + javaClass.getName() + ";"; } - } - - return new Type(name, dimensions, def.class.getSimpleName().equals(name), struct, clazz, type); - } - private int getDimensions(String name) { - int dimensions = 0; - int index = name.indexOf('['); - - if (index != -1) { - int length = name.length(); - - while (index < length) { - if (name.charAt(index) == '[' && ++index < length && name.charAt(index++) == ']') { - ++dimensions; - } else { - throw new IllegalArgumentException("Invalid array braces in canonical name [" + name + "]."); - } + try { + return Class.forName(descriptor); + } catch (ClassNotFoundException cnfe) { + throw new IllegalStateException("invalid painless type [" + painlessType + "]", cnfe); } } - return dimensions; + throw new IllegalArgumentException("invalid painless type [" + painlessType + "]"); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java index 0b698dd244192..1b438965538ce 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java @@ -78,7 +78,7 @@ public class FunctionRef { * @param numCaptures number of captured arguments */ public FunctionRef(Definition definition, Class expected, String type, String call, int numCaptures) { - this(expected, definition.ClassToType(expected).struct.functionalMethod, + this(expected, definition.getPainlessStructFromJavaClass(expected).functionalMethod, lookup(definition, expected, type, call, numCaptures > 0), numCaptures); } @@ -162,14 +162,14 @@ private static Definition.Method lookup(Definition definition, Class expected String type, String call, boolean receiverCaptured) { // check its really a functional interface // for e.g. Comparable - Method method = definition.ClassToType(expected).struct.functionalMethod; + Method method = definition.getPainlessStructFromJavaClass(expected).functionalMethod; if (method == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + "to [" + Definition.ClassToName(expected) + "], not a functional interface"); } // lookup requested method - Definition.Struct struct = definition.getType(type).struct; + Definition.Struct struct = definition.getPainlessStructFromJavaClass(definition.getJavaClassFromPainlessType(type)); final Definition.Method impl; // ctor ref if ("new".equals(call)) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java index 0b2fdf35890a0..7ae93eba22632 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java @@ -54,7 +54,7 @@ public Map> getHeaders(Definition definition) { if (objectToExplain != null) { toString = objectToExplain.toString(); javaClassName = objectToExplain.getClass().getName(); - Definition.Struct struct = definition.ClassToType(objectToExplain.getClass()).struct; + Definition.Struct struct = definition.getPainlessStructFromJavaClass(objectToExplain.getClass()); if (struct != null) { painlessClassName = struct.name; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java index 4ebcf8bfb82d2..833ff0eac4134 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java @@ -31,8 +31,8 @@ import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.painless.spi.PainlessExtension; import org.elasticsearch.painless.spi.Whitelist; -import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.painless.spi.WhitelistLoader; +import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ScriptPlugin; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java index 60ce1d033532a..0ec806282db2f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java @@ -179,22 +179,18 @@ private MethodArgument methodArgument(Definition definition, Class clazz, Str private static Class definitionTypeForClass(Definition definition, Class type, Function, String> unknownErrorMessageSource) { - int dimensions = 0; + type = Definition.ObjectClassTodefClass(type); Class componentType = type; + while (componentType.isArray()) { - dimensions++; componentType = componentType.getComponentType(); } - Definition.Struct struct; - if (componentType == Object.class) { - struct = definition.getType("def").struct; - } else { - if (definition.RuntimeClassToStruct(componentType) == null) { - throw new IllegalArgumentException(unknownErrorMessageSource.apply(componentType)); - } - struct = definition.RuntimeClassToStruct(componentType); + + if (definition.getPainlessStructFromJavaClass(componentType) == null) { + throw new IllegalArgumentException(unknownErrorMessageSource.apply(componentType)); } - return Definition.TypeToClass(definition.getType(struct, dimensions)); + + return type; } private static String[] readArgumentNamesConstant(Class iface) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java index cf24a47386603..add3aaabe51e0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java @@ -74,8 +74,8 @@ public void recover(final LexerNoViableAltException lnvae) { } @Override - protected boolean isSimpleType(String name) { - return definition.isSimpleType(name); + protected boolean isType(String name) { + return definition.isSimplePainlessType(name); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java index dd62701b86e4d..7fa10f6e9fbf2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessLexer.java @@ -1,9 +1,16 @@ // ANTLR GENERATED CODE: DO NOT EDIT package org.elasticsearch.painless.antlr; -import org.antlr.v4.runtime.Lexer; + import org.antlr.v4.runtime.CharStream; -import org.antlr.v4.runtime.*; -import org.antlr.v4.runtime.atn.*; +import org.antlr.v4.runtime.Lexer; +import org.antlr.v4.runtime.RuleContext; +import org.antlr.v4.runtime.RuntimeMetaData; +import org.antlr.v4.runtime.Vocabulary; +import org.antlr.v4.runtime.VocabularyImpl; +import org.antlr.v4.runtime.atn.ATN; +import org.antlr.v4.runtime.atn.ATNDeserializer; +import org.antlr.v4.runtime.atn.LexerATNSimulator; +import org.antlr.v4.runtime.atn.PredictionContextCache; import org.antlr.v4.runtime.dfa.DFA; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) @@ -14,16 +21,16 @@ abstract class PainlessLexer extends Lexer { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, - NSDOT=10, COMMA=11, SEMICOLON=12, IF=13, IN=14, ELSE=15, WHILE=16, DO=17, - FOR=18, CONTINUE=19, BREAK=20, RETURN=21, NEW=22, TRY=23, CATCH=24, THROW=25, - THIS=26, INSTANCEOF=27, BOOLNOT=28, BWNOT=29, MUL=30, DIV=31, REM=32, - ADD=33, SUB=34, LSH=35, RSH=36, USH=37, LT=38, LTE=39, GT=40, GTE=41, - EQ=42, EQR=43, NE=44, NER=45, BWAND=46, XOR=47, BWOR=48, BOOLAND=49, BOOLOR=50, - COND=51, COLON=52, ELVIS=53, REF=54, ARROW=55, FIND=56, MATCH=57, INCR=58, - DECR=59, ASSIGN=60, AADD=61, ASUB=62, AMUL=63, ADIV=64, AREM=65, AAND=66, - AXOR=67, AOR=68, ALSH=69, ARSH=70, AUSH=71, OCTAL=72, HEX=73, INTEGER=74, - DECIMAL=75, STRING=76, REGEX=77, TRUE=78, FALSE=79, NULL=80, TYPE=81, + WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, + NSDOT=10, COMMA=11, SEMICOLON=12, IF=13, IN=14, ELSE=15, WHILE=16, DO=17, + FOR=18, CONTINUE=19, BREAK=20, RETURN=21, NEW=22, TRY=23, CATCH=24, THROW=25, + THIS=26, INSTANCEOF=27, BOOLNOT=28, BWNOT=29, MUL=30, DIV=31, REM=32, + ADD=33, SUB=34, LSH=35, RSH=36, USH=37, LT=38, LTE=39, GT=40, GTE=41, + EQ=42, EQR=43, NE=44, NER=45, BWAND=46, XOR=47, BWOR=48, BOOLAND=49, BOOLOR=50, + COND=51, COLON=52, ELVIS=53, REF=54, ARROW=55, FIND=56, MATCH=57, INCR=58, + DECR=59, ASSIGN=60, AADD=61, ASUB=62, AMUL=63, ADIV=64, AREM=65, AAND=66, + AXOR=67, AOR=68, ALSH=69, ARSH=70, AUSH=71, OCTAL=72, HEX=73, INTEGER=74, + DECIMAL=75, STRING=76, REGEX=77, TRUE=78, FALSE=79, NULL=80, TYPE=81, ID=82, DOTINTEGER=83, DOTID=84; public static final int AFTER_DOT = 1; public static String[] modeNames = { @@ -31,39 +38,39 @@ abstract class PainlessLexer extends Lexer { }; public static final String[] ruleNames = { - "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", "DOT", - "NSDOT", "COMMA", "SEMICOLON", "IF", "IN", "ELSE", "WHILE", "DO", "FOR", - "CONTINUE", "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "THIS", - "INSTANCEOF", "BOOLNOT", "BWNOT", "MUL", "DIV", "REM", "ADD", "SUB", "LSH", - "RSH", "USH", "LT", "LTE", "GT", "GTE", "EQ", "EQR", "NE", "NER", "BWAND", - "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "ELVIS", "REF", "ARROW", - "FIND", "MATCH", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV", - "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", "HEX", - "INTEGER", "DECIMAL", "STRING", "REGEX", "TRUE", "FALSE", "NULL", "TYPE", + "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", "DOT", + "NSDOT", "COMMA", "SEMICOLON", "IF", "IN", "ELSE", "WHILE", "DO", "FOR", + "CONTINUE", "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", "THIS", + "INSTANCEOF", "BOOLNOT", "BWNOT", "MUL", "DIV", "REM", "ADD", "SUB", "LSH", + "RSH", "USH", "LT", "LTE", "GT", "GTE", "EQ", "EQR", "NE", "NER", "BWAND", + "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "ELVIS", "REF", "ARROW", + "FIND", "MATCH", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", "AMUL", "ADIV", + "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", "OCTAL", "HEX", + "INTEGER", "DECIMAL", "STRING", "REGEX", "TRUE", "FALSE", "NULL", "TYPE", "ID", "DOTINTEGER", "DOTID" }; private static final String[] _LITERAL_NAMES = { - null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "'?.'", - "','", "';'", "'if'", "'in'", "'else'", "'while'", "'do'", "'for'", "'continue'", - "'break'", "'return'", "'new'", "'try'", "'catch'", "'throw'", "'this'", - "'instanceof'", "'!'", "'~'", "'*'", "'/'", "'%'", "'+'", "'-'", "'<<'", - "'>>'", "'>>>'", "'<'", "'<='", "'>'", "'>='", "'=='", "'==='", "'!='", - "'!=='", "'&'", "'^'", "'|'", "'&&'", "'||'", "'?'", "':'", "'?:'", "'::'", - "'->'", "'=~'", "'==~'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", - "'/='", "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", null, + null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "'?.'", + "','", "';'", "'if'", "'in'", "'else'", "'while'", "'do'", "'for'", "'continue'", + "'break'", "'return'", "'new'", "'try'", "'catch'", "'throw'", "'this'", + "'instanceof'", "'!'", "'~'", "'*'", "'/'", "'%'", "'+'", "'-'", "'<<'", + "'>>'", "'>>>'", "'<'", "'<='", "'>'", "'>='", "'=='", "'==='", "'!='", + "'!=='", "'&'", "'^'", "'|'", "'&&'", "'||'", "'?'", "':'", "'?:'", "'::'", + "'->'", "'=~'", "'==~'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", + "'/='", "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", null, null, null, null, null, null, "'true'", "'false'", "'null'" }; private static final String[] _SYMBOLIC_NAMES = { - null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", - "DOT", "NSDOT", "COMMA", "SEMICOLON", "IF", "IN", "ELSE", "WHILE", "DO", - "FOR", "CONTINUE", "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", - "THIS", "INSTANCEOF", "BOOLNOT", "BWNOT", "MUL", "DIV", "REM", "ADD", - "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", "GTE", "EQ", "EQR", "NE", - "NER", "BWAND", "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "ELVIS", - "REF", "ARROW", "FIND", "MATCH", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", - "AMUL", "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", - "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "REGEX", "TRUE", "FALSE", + null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", + "DOT", "NSDOT", "COMMA", "SEMICOLON", "IF", "IN", "ELSE", "WHILE", "DO", + "FOR", "CONTINUE", "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", + "THIS", "INSTANCEOF", "BOOLNOT", "BWNOT", "MUL", "DIV", "REM", "ADD", + "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", "GTE", "EQ", "EQR", "NE", + "NER", "BWAND", "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "ELVIS", + "REF", "ARROW", "FIND", "MATCH", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", + "AMUL", "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", + "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "REGEX", "TRUE", "FALSE", "NULL", "TYPE", "ID", "DOTINTEGER", "DOTID" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -106,7 +113,7 @@ public Vocabulary getVocabulary() { * See also * The lexer hack. */ - protected abstract boolean isSimpleType(String name); + protected abstract boolean isType(String name); /** * Is the preceding {@code /} a the beginning of a regex (true) or a division @@ -164,7 +171,7 @@ private boolean REGEX_sempred(RuleContext _localctx, int predIndex) { private boolean TYPE_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 2: - return isSimpleType(getText()) ; + return isType(getText()) ; } return true; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java index 9cd3334aa51da..bef57d22e9ea9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/PainlessParser.java @@ -1,9 +1,24 @@ // ANTLR GENERATED CODE: DO NOT EDIT package org.elasticsearch.painless.antlr; -import org.antlr.v4.runtime.atn.*; + +import org.antlr.v4.runtime.FailedPredicateException; +import org.antlr.v4.runtime.NoViableAltException; +import org.antlr.v4.runtime.Parser; +import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.RecognitionException; +import org.antlr.v4.runtime.RuleContext; +import org.antlr.v4.runtime.RuntimeMetaData; +import org.antlr.v4.runtime.TokenStream; +import org.antlr.v4.runtime.Vocabulary; +import org.antlr.v4.runtime.VocabularyImpl; +import org.antlr.v4.runtime.atn.ATN; +import org.antlr.v4.runtime.atn.ATNDeserializer; +import org.antlr.v4.runtime.atn.ParserATNSimulator; +import org.antlr.v4.runtime.atn.PredictionContextCache; import org.antlr.v4.runtime.dfa.DFA; -import org.antlr.v4.runtime.*; -import org.antlr.v4.runtime.tree.*; +import org.antlr.v4.runtime.tree.ParseTreeVisitor; +import org.antlr.v4.runtime.tree.TerminalNode; + import java.util.List; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) @@ -14,57 +29,57 @@ class PainlessParser extends Parser { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, - NSDOT=10, COMMA=11, SEMICOLON=12, IF=13, IN=14, ELSE=15, WHILE=16, DO=17, - FOR=18, CONTINUE=19, BREAK=20, RETURN=21, NEW=22, TRY=23, CATCH=24, THROW=25, - THIS=26, INSTANCEOF=27, BOOLNOT=28, BWNOT=29, MUL=30, DIV=31, REM=32, - ADD=33, SUB=34, LSH=35, RSH=36, USH=37, LT=38, LTE=39, GT=40, GTE=41, - EQ=42, EQR=43, NE=44, NER=45, BWAND=46, XOR=47, BWOR=48, BOOLAND=49, BOOLOR=50, - COND=51, COLON=52, ELVIS=53, REF=54, ARROW=55, FIND=56, MATCH=57, INCR=58, - DECR=59, ASSIGN=60, AADD=61, ASUB=62, AMUL=63, ADIV=64, AREM=65, AAND=66, - AXOR=67, AOR=68, ALSH=69, ARSH=70, AUSH=71, OCTAL=72, HEX=73, INTEGER=74, - DECIMAL=75, STRING=76, REGEX=77, TRUE=78, FALSE=79, NULL=80, TYPE=81, + WS=1, COMMENT=2, LBRACK=3, RBRACK=4, LBRACE=5, RBRACE=6, LP=7, RP=8, DOT=9, + NSDOT=10, COMMA=11, SEMICOLON=12, IF=13, IN=14, ELSE=15, WHILE=16, DO=17, + FOR=18, CONTINUE=19, BREAK=20, RETURN=21, NEW=22, TRY=23, CATCH=24, THROW=25, + THIS=26, INSTANCEOF=27, BOOLNOT=28, BWNOT=29, MUL=30, DIV=31, REM=32, + ADD=33, SUB=34, LSH=35, RSH=36, USH=37, LT=38, LTE=39, GT=40, GTE=41, + EQ=42, EQR=43, NE=44, NER=45, BWAND=46, XOR=47, BWOR=48, BOOLAND=49, BOOLOR=50, + COND=51, COLON=52, ELVIS=53, REF=54, ARROW=55, FIND=56, MATCH=57, INCR=58, + DECR=59, ASSIGN=60, AADD=61, ASUB=62, AMUL=63, ADIV=64, AREM=65, AAND=66, + AXOR=67, AOR=68, ALSH=69, ARSH=70, AUSH=71, OCTAL=72, HEX=73, INTEGER=74, + DECIMAL=75, STRING=76, REGEX=77, TRUE=78, FALSE=79, NULL=80, TYPE=81, ID=82, DOTINTEGER=83, DOTID=84; public static final int - RULE_source = 0, RULE_function = 1, RULE_parameters = 2, RULE_statement = 3, - RULE_rstatement = 4, RULE_dstatement = 5, RULE_trailer = 6, RULE_block = 7, - RULE_empty = 8, RULE_initializer = 9, RULE_afterthought = 10, RULE_declaration = 11, - RULE_decltype = 12, RULE_declvar = 13, RULE_trap = 14, RULE_expression = 15, - RULE_unary = 16, RULE_chain = 17, RULE_primary = 18, RULE_postfix = 19, - RULE_postdot = 20, RULE_callinvoke = 21, RULE_fieldaccess = 22, RULE_braceaccess = 23, - RULE_arrayinitializer = 24, RULE_listinitializer = 25, RULE_mapinitializer = 26, - RULE_maptoken = 27, RULE_arguments = 28, RULE_argument = 29, RULE_lambda = 30, + RULE_source = 0, RULE_function = 1, RULE_parameters = 2, RULE_statement = 3, + RULE_rstatement = 4, RULE_dstatement = 5, RULE_trailer = 6, RULE_block = 7, + RULE_empty = 8, RULE_initializer = 9, RULE_afterthought = 10, RULE_declaration = 11, + RULE_decltype = 12, RULE_declvar = 13, RULE_trap = 14, RULE_expression = 15, + RULE_unary = 16, RULE_chain = 17, RULE_primary = 18, RULE_postfix = 19, + RULE_postdot = 20, RULE_callinvoke = 21, RULE_fieldaccess = 22, RULE_braceaccess = 23, + RULE_arrayinitializer = 24, RULE_listinitializer = 25, RULE_mapinitializer = 26, + RULE_maptoken = 27, RULE_arguments = 28, RULE_argument = 29, RULE_lambda = 30, RULE_lamtype = 31, RULE_funcref = 32; public static final String[] ruleNames = { - "source", "function", "parameters", "statement", "rstatement", "dstatement", - "trailer", "block", "empty", "initializer", "afterthought", "declaration", - "decltype", "declvar", "trap", "expression", "unary", "chain", "primary", - "postfix", "postdot", "callinvoke", "fieldaccess", "braceaccess", "arrayinitializer", - "listinitializer", "mapinitializer", "maptoken", "arguments", "argument", + "source", "function", "parameters", "statement", "rstatement", "dstatement", + "trailer", "block", "empty", "initializer", "afterthought", "declaration", + "decltype", "declvar", "trap", "expression", "unary", "chain", "primary", + "postfix", "postdot", "callinvoke", "fieldaccess", "braceaccess", "arrayinitializer", + "listinitializer", "mapinitializer", "maptoken", "arguments", "argument", "lambda", "lamtype", "funcref" }; private static final String[] _LITERAL_NAMES = { - null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "'?.'", - "','", "';'", "'if'", "'in'", "'else'", "'while'", "'do'", "'for'", "'continue'", - "'break'", "'return'", "'new'", "'try'", "'catch'", "'throw'", "'this'", - "'instanceof'", "'!'", "'~'", "'*'", "'/'", "'%'", "'+'", "'-'", "'<<'", - "'>>'", "'>>>'", "'<'", "'<='", "'>'", "'>='", "'=='", "'==='", "'!='", - "'!=='", "'&'", "'^'", "'|'", "'&&'", "'||'", "'?'", "':'", "'?:'", "'::'", - "'->'", "'=~'", "'==~'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", - "'/='", "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", null, + null, null, null, "'{'", "'}'", "'['", "']'", "'('", "')'", "'.'", "'?.'", + "','", "';'", "'if'", "'in'", "'else'", "'while'", "'do'", "'for'", "'continue'", + "'break'", "'return'", "'new'", "'try'", "'catch'", "'throw'", "'this'", + "'instanceof'", "'!'", "'~'", "'*'", "'/'", "'%'", "'+'", "'-'", "'<<'", + "'>>'", "'>>>'", "'<'", "'<='", "'>'", "'>='", "'=='", "'==='", "'!='", + "'!=='", "'&'", "'^'", "'|'", "'&&'", "'||'", "'?'", "':'", "'?:'", "'::'", + "'->'", "'=~'", "'==~'", "'++'", "'--'", "'='", "'+='", "'-='", "'*='", + "'/='", "'%='", "'&='", "'^='", "'|='", "'<<='", "'>>='", "'>>>='", null, null, null, null, null, null, "'true'", "'false'", "'null'" }; private static final String[] _SYMBOLIC_NAMES = { - null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", - "DOT", "NSDOT", "COMMA", "SEMICOLON", "IF", "IN", "ELSE", "WHILE", "DO", - "FOR", "CONTINUE", "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", - "THIS", "INSTANCEOF", "BOOLNOT", "BWNOT", "MUL", "DIV", "REM", "ADD", - "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", "GTE", "EQ", "EQR", "NE", - "NER", "BWAND", "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "ELVIS", - "REF", "ARROW", "FIND", "MATCH", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", - "AMUL", "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", - "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "REGEX", "TRUE", "FALSE", + null, "WS", "COMMENT", "LBRACK", "RBRACK", "LBRACE", "RBRACE", "LP", "RP", + "DOT", "NSDOT", "COMMA", "SEMICOLON", "IF", "IN", "ELSE", "WHILE", "DO", + "FOR", "CONTINUE", "BREAK", "RETURN", "NEW", "TRY", "CATCH", "THROW", + "THIS", "INSTANCEOF", "BOOLNOT", "BWNOT", "MUL", "DIV", "REM", "ADD", + "SUB", "LSH", "RSH", "USH", "LT", "LTE", "GT", "GTE", "EQ", "EQR", "NE", + "NER", "BWAND", "XOR", "BWOR", "BOOLAND", "BOOLOR", "COND", "COLON", "ELVIS", + "REF", "ARROW", "FIND", "MATCH", "INCR", "DECR", "ASSIGN", "AADD", "ASUB", + "AMUL", "ADIV", "AREM", "AAND", "AXOR", "AOR", "ALSH", "ARSH", "AUSH", + "OCTAL", "HEX", "INTEGER", "DECIMAL", "STRING", "REGEX", "TRUE", "FALSE", "NULL", "TYPE", "ID", "DOTINTEGER", "DOTID" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -162,7 +177,7 @@ public final SourceContext source() throws RecognitionException { setState(66); function(); } - } + } } setState(71); _errHandler.sync(this); @@ -178,7 +193,7 @@ public final SourceContext source() throws RecognitionException { setState(72); statement(); } - } + } } setState(77); _errHandler.sync(this); @@ -426,7 +441,7 @@ public RstatementContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_rstatement; } - + public RstatementContext() { } public void copyFrom(RstatementContext ctx) { super.copyFrom(ctx); @@ -805,7 +820,7 @@ public final RstatementContext rstatement() throws RecognitionException { match(TRY); setState(164); block(); - setState(166); + setState(166); _errHandler.sync(this); _alt = 1; do { @@ -821,7 +836,7 @@ public final RstatementContext rstatement() throws RecognitionException { default: throw new NoViableAltException(this); } - setState(168); + setState(168); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,12,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); @@ -845,7 +860,7 @@ public DstatementContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_dstatement; } - + public DstatementContext() { } public void copyFrom(DstatementContext ctx) { super.copyFrom(ctx); @@ -1148,7 +1163,7 @@ public final BlockContext block() throws RecognitionException { setState(194); statement(); } - } + } } setState(199); _errHandler.sync(this); @@ -1407,7 +1422,7 @@ public final DecltypeContext decltype() throws RecognitionException { setState(224); match(RBRACE); } - } + } } setState(229); _errHandler.sync(this); @@ -1532,7 +1547,7 @@ public ExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_expression; } - + public ExpressionContext() { } public void copyFrom(ExpressionContext ctx) { super.copyFrom(ctx); @@ -1943,7 +1958,7 @@ private ExpressionContext expression(int _p) throws RecognitionException { } break; } - } + } } setState(297); _errHandler.sync(this); @@ -1967,7 +1982,7 @@ public UnaryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_unary; } - + public UnaryContext() { } public void copyFrom(UnaryContext ctx) { super.copyFrom(ctx); @@ -2135,7 +2150,7 @@ public ChainContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_chain; } - + public ChainContext() { } public void copyFrom(ChainContext ctx) { super.copyFrom(ctx); @@ -2214,7 +2229,7 @@ public final ChainContext chain() throws RecognitionException { setState(314); postfix(); } - } + } } setState(319); _errHandler.sync(this); @@ -2240,7 +2255,7 @@ public final ChainContext chain() throws RecognitionException { setState(322); postfix(); } - } + } } setState(327); _errHandler.sync(this); @@ -2274,7 +2289,7 @@ public PrimaryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_primary; } - + public PrimaryContext() { } public void copyFrom(PrimaryContext ctx) { super.copyFrom(ctx); @@ -2799,7 +2814,7 @@ public ArrayinitializerContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_arrayinitializer; } - + public ArrayinitializerContext() { } public void copyFrom(ArrayinitializerContext ctx) { super.copyFrom(ctx); @@ -2886,7 +2901,7 @@ public final ArrayinitializerContext arrayinitializer() throws RecognitionExcept match(NEW); setState(372); match(TYPE); - setState(377); + setState(377); _errHandler.sync(this); _alt = 1; do { @@ -2906,7 +2921,7 @@ public final ArrayinitializerContext arrayinitializer() throws RecognitionExcept default: throw new NoViableAltException(this); } - setState(379); + setState(379); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,31,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); @@ -2927,7 +2942,7 @@ public final ArrayinitializerContext arrayinitializer() throws RecognitionExcept setState(382); postfix(); } - } + } } setState(387); _errHandler.sync(this); @@ -2989,7 +3004,7 @@ public final ArrayinitializerContext arrayinitializer() throws RecognitionExcept setState(406); postfix(); } - } + } } setState(411); _errHandler.sync(this); @@ -3542,7 +3557,7 @@ public FuncrefContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_funcref; } - + public FuncrefContext() { } public void copyFrom(FuncrefContext ctx) { super.copyFrom(ctx); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java index 3ac6cb7fd37c4..a481c99a99d12 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java @@ -986,19 +986,20 @@ public AExpression visitBraceaccess(BraceaccessContext ctx, AExpression prefix) @Override public ANode visitNewstandardarray(NewstandardarrayContext ctx) { - String type = ctx.TYPE().getText(); + StringBuilder type = new StringBuilder(ctx.TYPE().getText()); List expressions = new ArrayList<>(); for (ExpressionContext expression : ctx.expression()) { + type.append("[]"); expressions.add((AExpression)visit(expression)); } - return buildPostfixChain(new ENewArray(location(ctx), type, expressions, false), ctx.postdot(), ctx.postfix()); + return buildPostfixChain(new ENewArray(location(ctx), type.toString(), expressions, false), ctx.postdot(), ctx.postfix()); } @Override public ANode visitNewinitializedarray(NewinitializedarrayContext ctx) { - String type = ctx.TYPE().getText(); + String type = ctx.TYPE().getText() + "[]"; List expressions = new ArrayList<>(); for (ExpressionContext expression : ctx.expression()) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java index eaf8045bf1c65..5a897e04a8d98 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -51,7 +50,7 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { try { - actual = Definition.TypeToClass(locals.getDefinition().getType(type)); + actual = locals.getDefinition().getJavaClassFromPainlessType(type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java index c82b1003a55f1..21bef9aa2ed5d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java @@ -66,7 +66,7 @@ void analyze(Locals locals) { try { if ("this".equals(type)) { // user's own function - Method interfaceMethod = locals.getDefinition().ClassToType(expected).struct.functionalMethod; + Method interfaceMethod = locals.getDefinition().getPainlessStructFromJavaClass(expected).functionalMethod; if (interfaceMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + "to [" + Definition.ClassToName(expected) + "], not a functional interface"); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java index 54403b51f04bd..5296d79e214ed 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java @@ -58,7 +58,7 @@ void analyze(Locals locals) { // ensure the specified type is part of the definition try { - clazz = Definition.TypeToClass(locals.getDefinition().getType(this.type)); + clazz = locals.getDefinition().getJavaClassFromPainlessType(this.type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java index a7213e75ca485..e40d21ab110ab 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java @@ -120,7 +120,7 @@ void analyze(Locals locals) { } } else { // we know the method statically, infer return type and any unknown/def types - interfaceMethod = locals.getDefinition().ClassToType(expected).struct.functionalMethod; + interfaceMethod = locals.getDefinition().getPainlessStructFromJavaClass(expected).functionalMethod; if (interfaceMethod == null) { throw createError(new IllegalArgumentException("Cannot pass lambda to [" + Definition.ClassToName(expected) + "], not a functional interface")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java index d957be0aadb50..05b10796cb4f9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java @@ -61,13 +61,13 @@ void analyze(Locals locals) { actual = ArrayList.class; - constructor = locals.getDefinition().ClassToType(actual).struct.constructors.get(new MethodKey("", 0)); + constructor = locals.getDefinition().getPainlessStructFromJavaClass(actual).constructors.get(new MethodKey("", 0)); if (constructor == null) { throw createError(new IllegalStateException("Illegal tree structure.")); } - method = locals.getDefinition().ClassToType(actual).struct.methods.get(new MethodKey("add", 1)); + method = locals.getDefinition().getPainlessStructFromJavaClass(actual).methods.get(new MethodKey("add", 1)); if (method == null) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java index 2cd864da24b65..f5763042b8191 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java @@ -67,13 +67,13 @@ void analyze(Locals locals) { actual = HashMap.class; - constructor = locals.getDefinition().ClassToType(actual).struct.constructors.get(new MethodKey("", 0)); + constructor = locals.getDefinition().getPainlessStructFromJavaClass(actual).constructors.get(new MethodKey("", 0)); if (constructor == null) { throw createError(new IllegalStateException("Illegal tree structure.")); } - method = locals.getDefinition().ClassToType(actual).struct.methods.get(new MethodKey("put", 2)); + method = locals.getDefinition().getPainlessStructFromJavaClass(actual).methods.get(new MethodKey("put", 2)); if (method == null) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java index c1d58cb2f2ad9..1a0a718ae7fc8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -38,8 +37,6 @@ public final class ENewArray extends AExpression { private final List arguments; private final boolean initialize; - private Class array; - public ENewArray(Location location, String type, List arguments, boolean initialize) { super(location); @@ -64,7 +61,7 @@ void analyze(Locals locals) { Class clazz; try { - clazz = Definition.TypeToClass(locals.getDefinition().getType(this.type)); + clazz = locals.getDefinition().getJavaClassFromPainlessType(this.type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } @@ -72,15 +69,13 @@ void analyze(Locals locals) { for (int argument = 0; argument < arguments.size(); ++argument) { AExpression expression = arguments.get(argument); - expression.expected = initialize ? clazz : int.class; + expression.expected = initialize ? clazz.getComponentType() : int.class; expression.internal = true; expression.analyze(locals); arguments.set(argument, expression.cast(locals)); } - actual = Definition.TypeToClass(locals.getDefinition().getType( - locals.getDefinition().ClassToType(clazz).struct, initialize ? 1 : arguments.size())); - array = Definition.defClassToObjectClass(actual); + actual = clazz; } @Override @@ -89,7 +84,7 @@ void write(MethodWriter writer, Globals globals) { if (initialize) { writer.push(arguments.size()); - writer.newArray(MethodWriter.getType(array.getComponentType())); + writer.newArray(MethodWriter.getType(actual.getComponentType())); for (int index = 0; index < arguments.size(); ++index) { AExpression argument = arguments.get(index); @@ -97,7 +92,7 @@ void write(MethodWriter writer, Globals globals) { writer.dup(); writer.push(index); argument.write(writer, globals); - writer.arrayStore(MethodWriter.getType(array.getComponentType())); + writer.arrayStore(MethodWriter.getType(actual.getComponentType())); } } else { for (AExpression argument : arguments) { @@ -105,9 +100,9 @@ void write(MethodWriter writer, Globals globals) { } if (arguments.size() > 1) { - writer.visitMultiANewArrayInsn(MethodWriter.getType(array).getDescriptor(), arguments.size()); + writer.visitMultiANewArrayInsn(MethodWriter.getType(actual).getDescriptor(), arguments.size()); } else { - writer.newArray(MethodWriter.getType(array.getComponentType())); + writer.newArray(MethodWriter.getType(actual.getComponentType())); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java index 2a96d68bcb417..e3a926ef2244b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java @@ -58,12 +58,12 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { try { - actual = Definition.TypeToClass(locals.getDefinition().getType(this.type)); + actual = locals.getDefinition().getJavaClassFromPainlessType(this.type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } - Struct struct = locals.getDefinition().ClassToType(actual).struct; + Struct struct = locals.getDefinition().getPainlessStructFromJavaClass(actual); constructor = struct.constructors.get(new Definition.MethodKey("", arguments.size())); if (constructor != null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java index 5c3b4cadf6ee9..fa249b9df6237 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java @@ -72,7 +72,7 @@ void analyze(Locals locals) { } constant = new Constant( - location, locals.getDefinition().PatternType.type, "regexAt$" + location.getOffset(), this::initializeConstant); + location, MethodWriter.getType(Pattern.class), "regexAt$" + location.getOffset(), this::initializeConstant); actual = Pattern.class; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java index f5c2c6e9da354..5ebf30f5781cf 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -49,7 +48,7 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { try { - actual = Definition.TypeToClass(locals.getDefinition().getType(type)); + actual = locals.getDefinition().getJavaClassFromPainlessType(type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java index 6712eccd914c5..0e2ab70897fe5 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java @@ -63,9 +63,9 @@ void analyze(Locals locals) { } else if (prefix.actual == def.class) { sub = new PSubDefArray(location, index); } else if (Map.class.isAssignableFrom(prefix.actual)) { - sub = new PSubMapShortcut(location, locals.getDefinition().ClassToType(prefix.actual).struct, index); + sub = new PSubMapShortcut(location, locals.getDefinition().getPainlessStructFromJavaClass(prefix.actual), index); } else if (List.class.isAssignableFrom(prefix.actual)) { - sub = new PSubListShortcut(location, locals.getDefinition().ClassToType(prefix.actual).struct, index); + sub = new PSubListShortcut(location, locals.getDefinition().getPainlessStructFromJavaClass(prefix.actual), index); } else { throw createError( new IllegalArgumentException("Illegal array access on type [" + Definition.ClassToName(prefix.actual) + "].")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java index 498fb83239395..6fff5a8e93f3e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java @@ -71,10 +71,10 @@ void analyze(Locals locals) { throw createError(new IllegalArgumentException("Illegal call [" + name + "] on array type.")); } - Struct struct = locals.getDefinition().ClassToType(prefix.actual).struct; + Struct struct = locals.getDefinition().getPainlessStructFromJavaClass(prefix.actual); if (prefix.actual.isPrimitive()) { - struct = locals.getDefinition().ClassToType(Definition.getBoxedType(prefix.actual)).struct; + struct = locals.getDefinition().getPainlessStructFromJavaClass(Definition.getBoxedType(prefix.actual)); } MethodKey methodKey = new MethodKey(name, arguments.size()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java index 1f492758af618..de2c05dfa9b28 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java @@ -67,7 +67,7 @@ void analyze(Locals locals) { } else if (prefix.actual == def.class) { sub = new PSubDefField(location, value); } else { - Struct struct = locals.getDefinition().ClassToType(prefix.actual).struct; + Struct struct = locals.getDefinition().getPainlessStructFromJavaClass(prefix.actual); Field field = prefix instanceof EStatic ? struct.staticMembers.get(value) : struct.members.get(value); if (field != null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java index d98c2f2276eaa..6428e47d1bacc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.DefBootstrap; - import org.elasticsearch.painless.Definition.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java index 535ad5235b07c..98e45ca29f416 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; @@ -68,7 +67,7 @@ void analyze(Locals locals) { Class clazz; try { - clazz = Definition.TypeToClass(locals.getDefinition().getType(this.type)); + clazz = locals.getDefinition().getJavaClassFromPainlessType(this.type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java index f00db583ceae4..9f3f86abf438b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; @@ -63,7 +62,7 @@ void analyze(Locals locals) { Class clazz; try { - clazz = Definition.TypeToClass(locals.getDefinition().getType(this.type)); + clazz = locals.getDefinition().getJavaClassFromPainlessType(this.type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java index 04de0c0696e96..a3c8319825a26 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java @@ -71,7 +71,7 @@ void analyze(Locals locals) { Class clazz; try { - clazz = Definition.TypeToClass(locals.getDefinition().getType(this.type)); + clazz = locals.getDefinition().getJavaClassFromPainlessType(this.type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java index 5fa62f27e94dc..1b1e6bd2ef84b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java @@ -119,7 +119,7 @@ void extractVariables(Set variables) { void generateSignature(Definition definition) { try { - rtnType = Definition.TypeToClass(definition.getType(rtnTypeStr)); + rtnType = definition.getJavaClassFromPainlessType(rtnTypeStr); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Illegal return type [" + rtnTypeStr + "] for function [" + name + "].")); } @@ -133,7 +133,7 @@ void generateSignature(Definition definition) { for (int param = 0; param < this.paramTypeStrs.size(); ++param) { try { - Class paramType = Definition.TypeToClass(definition.getType(this.paramTypeStrs.get(param))); + Class paramType = definition.getJavaClassFromPainlessType(this.paramTypeStrs.get(param)); paramClasses[param] = Definition.defClassToObjectClass(paramType); paramTypes.add(paramType); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java index ca30d641e7468..11e0f15d7e4f8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java @@ -25,7 +25,6 @@ import org.elasticsearch.painless.Definition.Cast; import org.elasticsearch.painless.Definition.Method; import org.elasticsearch.painless.Definition.MethodKey; -import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Definition.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; @@ -78,12 +77,11 @@ void analyze(Locals locals) { if (expression.actual == def.class) { method = null; } else { - Type actualType = locals.getDefinition().ClassToType(expression.actual); - method = actualType.struct.methods.get(new MethodKey("iterator", 0)); + method = locals.getDefinition().getPainlessStructFromJavaClass(expression.actual).methods.get(new MethodKey("iterator", 0)); if (method == null) { throw createError(new IllegalArgumentException( - "Unable to create iterator for the type [" + actualType.name + "].")); + "Unable to create iterator for the type [" + Definition.ClassToName(expression.actual) + "].")); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java index 2b6af6982ead0..291a4807606ea 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java @@ -1,5 +1,3 @@ -package org.elasticsearch.painless; - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -19,6 +17,8 @@ * under the License. */ +package org.elasticsearch.painless; + import java.lang.invoke.CallSite; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java index ed38f4c511f59..309b6be97f20b 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java @@ -26,9 +26,6 @@ import org.elasticsearch.painless.Definition.Field; import org.elasticsearch.painless.Definition.Method; import org.elasticsearch.painless.Definition.Struct; -import org.elasticsearch.painless.Definition.Type; -import org.elasticsearch.painless.spi.Whitelist; - import java.io.IOException; import java.io.PrintStream; import java.lang.reflect.Modifier; @@ -44,12 +41,14 @@ import static java.util.Comparator.comparing; import static java.util.stream.Collectors.toList; +import static org.elasticsearch.painless.spi.Whitelist.BASE_WHITELISTS; /** * Generates an API reference from the method and type whitelists in {@link Definition}. */ public class PainlessDocGenerator { - private static final Definition definition = new Definition(Whitelist.BASE_WHITELISTS); + + private static final Definition definition = new Definition(BASE_WHITELISTS); private static final Logger logger = ESLoggerFactory.getLogger(PainlessDocGenerator.class); private static final Comparator FIELD_NAME = comparing(f -> f.name); private static final Comparator METHOD_NAME = comparing(m -> m.name); @@ -68,41 +67,41 @@ public static void main(String[] args) throws IOException { Files.newOutputStream(indexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), false, StandardCharsets.UTF_8.name())) { emitGeneratedWarning(indexStream); - List types = definition.allSimpleTypes().stream().sorted(comparing(t -> t.name)).collect(toList()); - for (Type type : types) { - if (type.clazz.isPrimitive()) { + List structs = definition.getStructs().stream().sorted(comparing(t -> t.name)).collect(toList()); + for (Struct struct : structs) { + if (struct.clazz.isPrimitive()) { // Primitives don't have methods to reference continue; } - if ("def".equals(type.name)) { + if ("def".equals(struct.name)) { // def is special but doesn't have any methods all of its own. continue; } indexStream.print("include::"); - indexStream.print(type.struct.name); + indexStream.print(struct.name); indexStream.println(".asciidoc[]"); - Path typePath = apiRootPath.resolve(type.struct.name + ".asciidoc"); - logger.info("Writing [{}.asciidoc]", type.name); + Path typePath = apiRootPath.resolve(struct.name + ".asciidoc"); + logger.info("Writing [{}.asciidoc]", struct.name); try (PrintStream typeStream = new PrintStream( Files.newOutputStream(typePath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), false, StandardCharsets.UTF_8.name())) { emitGeneratedWarning(typeStream); typeStream.print("[["); - emitAnchor(typeStream, type.struct); + emitAnchor(typeStream, struct); typeStream.print("]]++"); - typeStream.print(type.name); + typeStream.print(struct.name); typeStream.println("++::"); Consumer documentField = field -> PainlessDocGenerator.documentField(typeStream, field); Consumer documentMethod = method -> PainlessDocGenerator.documentMethod(typeStream, method); - type.struct.staticMembers.values().stream().sorted(FIELD_NAME).forEach(documentField); - type.struct.members.values().stream().sorted(FIELD_NAME).forEach(documentField); - type.struct.staticMethods.values().stream().sorted(METHOD_NAME.thenComparing(NUMBER_OF_ARGS)).forEach(documentMethod); - type.struct.constructors.values().stream().sorted(NUMBER_OF_ARGS).forEach(documentMethod); + struct.staticMembers.values().stream().sorted(FIELD_NAME).forEach(documentField); + struct.members.values().stream().sorted(FIELD_NAME).forEach(documentField); + struct.staticMethods.values().stream().sorted(METHOD_NAME.thenComparing(NUMBER_OF_ARGS)).forEach(documentMethod); + struct.constructors.values().stream().sorted(NUMBER_OF_ARGS).forEach(documentMethod); Map inherited = new TreeMap<>(); - type.struct.methods.values().stream().sorted(METHOD_NAME.thenComparing(NUMBER_OF_ARGS)).forEach(method -> { - if (method.owner == type.struct) { + struct.methods.values().stream().sorted(METHOD_NAME.thenComparing(NUMBER_OF_ARGS)).forEach(method -> { + if (method.owner == struct) { documentMethod(typeStream, method); } else { inherited.put(method.owner.name, method.owner); @@ -139,7 +138,7 @@ private static void documentField(PrintStream stream, Field field) { stream.print("static "); } - emitType(stream, definition.ClassToType(field.clazz)); + emitType(stream, field.clazz); stream.print(' '); String javadocRoot = javadocRoot(field); @@ -170,7 +169,7 @@ private static void documentMethod(PrintStream stream, Method method) { } if (false == method.name.equals("")) { - emitType(stream, definition.ClassToType(method.rtn)); + emitType(stream, method.rtn); stream.print(' '); } @@ -188,7 +187,7 @@ private static void documentMethod(PrintStream stream, Method method) { } else { stream.print(", "); } - emitType(stream, definition.ClassToType(arg)); + emitType(stream, arg); } stream.print(")++"); @@ -234,19 +233,19 @@ private static String methodName(Method method) { } /** - * Emit a {@link Type}. If the type is primitive or an array of primitives this just emits the name of the type. Otherwise this emits an - * internal link with the text. + * Emit a {@link Class}. If the type is primitive or an array of primitives this just emits the name of the type. Otherwise this emits + an internal link with the text. */ - private static void emitType(PrintStream stream, Type type) { - emitStruct(stream, type.struct); - for (int i = 0; i < type.dimensions; i++) { + private static void emitType(PrintStream stream, Class clazz) { + emitStruct(stream, definition.getPainlessStructFromJavaClass(clazz)); + while ((clazz = clazz.getComponentType()) != null) { stream.print("[]"); } } /** - * Emit a {@link Struct}. If the {@linkplain Struct} is primitive or def this just emits the name of the struct. Otherwise this emits an - * internal link with the name. + * Emit a {@link Struct}. If the {@linkplain Struct} is primitive or def this just emits the name of the struct. Otherwise this emits + * an internal link with the name. */ private static void emitStruct(PrintStream stream, Struct struct) { if (false == struct.clazz.isPrimitive() && false == struct.name.equals("def")) { @@ -279,14 +278,13 @@ private static void emitJavadocLink(PrintStream stream, String root, Method meth stream.print(method.owner.clazz.getName()); } for (Class clazz: method.arguments) { - Type arg = definition.ClassToType(clazz); if (first) { first = false; } else { stream.print("%2D"); } - stream.print(arg.struct.clazz.getName()); - if (arg.dimensions > 0) { + stream.print(clazz.getName()); + if (clazz.isArray()) { stream.print(":A"); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java new file mode 100644 index 0000000000000..66d49be16ba9a --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java @@ -0,0 +1,126 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Scorer; +import org.elasticsearch.painless.spi.Whitelist; +import org.elasticsearch.script.ScriptedMetricAggContexts; +import org.elasticsearch.script.ScriptContext; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class ScriptedMetricAggContextsTests extends ScriptTestCase { + @Override + protected Map, List> scriptContexts() { + Map, List> contexts = new HashMap<>(); + contexts.put(ScriptedMetricAggContexts.InitScript.CONTEXT, Whitelist.BASE_WHITELISTS); + contexts.put(ScriptedMetricAggContexts.MapScript.CONTEXT, Whitelist.BASE_WHITELISTS); + contexts.put(ScriptedMetricAggContexts.CombineScript.CONTEXT, Whitelist.BASE_WHITELISTS); + contexts.put(ScriptedMetricAggContexts.ReduceScript.CONTEXT, Whitelist.BASE_WHITELISTS); + return contexts; + } + + public void testInitBasic() { + ScriptedMetricAggContexts.InitScript.Factory factory = scriptEngine.compile("test", + "state.testField = params.initialVal", ScriptedMetricAggContexts.InitScript.CONTEXT, Collections.emptyMap()); + + Map params = new HashMap<>(); + Map state = new HashMap<>(); + + params.put("initialVal", 10); + + ScriptedMetricAggContexts.InitScript script = factory.newInstance(params, state); + script.execute(); + + assert(state.containsKey("testField")); + assertEquals(10, state.get("testField")); + } + + public void testMapBasic() { + ScriptedMetricAggContexts.MapScript.Factory factory = scriptEngine.compile("test", + "state.testField = 2*_score", ScriptedMetricAggContexts.MapScript.CONTEXT, Collections.emptyMap()); + + Map params = new HashMap<>(); + Map state = new HashMap<>(); + + Scorer scorer = new Scorer(null) { + @Override + public int docID() { return 0; } + + @Override + public float score() { return 0.5f; } + + @Override + public DocIdSetIterator iterator() { return null; } + }; + + ScriptedMetricAggContexts.MapScript.LeafFactory leafFactory = factory.newFactory(params, state, null); + ScriptedMetricAggContexts.MapScript script = leafFactory.newInstance(null); + + script.setScorer(scorer); + script.execute(); + + assert(state.containsKey("testField")); + assertEquals(1.0, state.get("testField")); + } + + public void testCombineBasic() { + ScriptedMetricAggContexts.CombineScript.Factory factory = scriptEngine.compile("test", + "state.testField = params.initialVal; return state.testField + params.inc", ScriptedMetricAggContexts.CombineScript.CONTEXT, + Collections.emptyMap()); + + Map params = new HashMap<>(); + Map state = new HashMap<>(); + + params.put("initialVal", 10); + params.put("inc", 2); + + ScriptedMetricAggContexts.CombineScript script = factory.newInstance(params, state); + Object res = script.execute(); + + assert(state.containsKey("testField")); + assertEquals(10, state.get("testField")); + assertEquals(12, res); + } + + public void testReduceBasic() { + ScriptedMetricAggContexts.ReduceScript.Factory factory = scriptEngine.compile("test", + "states[0].testField + states[1].testField", ScriptedMetricAggContexts.ReduceScript.CONTEXT, Collections.emptyMap()); + + Map params = new HashMap<>(); + List states = new ArrayList<>(); + + Map state1 = new HashMap<>(), state2 = new HashMap<>(); + state1.put("testField", 1); + state2.put("testField", 2); + + states.add(state1); + states.add(state2); + + ScriptedMetricAggContexts.ReduceScript script = factory.newInstance(params, states); + Object res = script.execute(); + assertEquals(3, res); + } +} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java index 6dbe480d4b5a3..fd8190aa2c2eb 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java @@ -284,12 +284,12 @@ public void testEMapInit() { } public void testENewArray() { - assertToString("(SSource (SReturn (ENewArray int dims (Args (ENumeric 10)))))", "return new int[10]"); - assertToString("(SSource (SReturn (ENewArray int dims (Args (ENumeric 10) (ENumeric 4) (ENumeric 5)))))", + assertToString("(SSource (SReturn (ENewArray int[] dims (Args (ENumeric 10)))))", "return new int[10]"); + assertToString("(SSource (SReturn (ENewArray int[][][] dims (Args (ENumeric 10) (ENumeric 4) (ENumeric 5)))))", "return new int[10][4][5]"); - assertToString("(SSource (SReturn (ENewArray int init (Args (ENumeric 1) (ENumeric 2) (ENumeric 3)))))", + assertToString("(SSource (SReturn (ENewArray int[] init (Args (ENumeric 1) (ENumeric 2) (ENumeric 3)))))", "return new int[] {1, 2, 3}"); - assertToString("(SSource (SReturn (ENewArray def init (Args (ENumeric 1) (ENumeric 2) (EString 'bird')))))", + assertToString("(SSource (SReturn (ENewArray def[] init (Args (ENumeric 1) (ENumeric 2) (EString 'bird')))))", "return new def[] {1, 2, 'bird'}"); } @@ -372,7 +372,7 @@ public void testPField() { assertToString("(SSource (SReturn (PField nullSafe (EVariable params) a)))", "return params?.a"); assertToString( "(SSource\n" - + " (SDeclBlock (SDeclaration int[] a (ENewArray int dims (Args (ENumeric 10)))))\n" + + " (SDeclBlock (SDeclaration int[] a (ENewArray int[] dims (Args (ENumeric 10)))))\n" + " (SReturn (PField (EVariable a) length)))", "int[] a = new int[10];\n" + "return a.length"); @@ -403,7 +403,7 @@ public void testPSubBrace() { public void testPSubCallInvoke() { Location l = new Location(getTestName(), 0); - Struct c = definition.ClassToType(Integer.class).struct; + Struct c = definition.getPainlessStructFromJavaClass(Integer.class); Method m = c.methods.get(new MethodKey("toString", 0)); PSubCallInvoke node = new PSubCallInvoke(l, m, null, emptyList()); node.prefix = new EVariable(l, "a"); @@ -458,7 +458,7 @@ public void testPSubDefField() { public void testPSubField() { Location l = new Location(getTestName(), 0); - Struct s = definition.getType(Boolean.class.getSimpleName()).struct; + Struct s = definition.getPainlessStructFromJavaClass(Boolean.class); Field f = s.staticMembers.get("TRUE"); PSubField node = new PSubField(l, f); node.prefix = new EStatic(l, "Boolean"); @@ -468,7 +468,7 @@ public void testPSubField() { public void testPSubListShortcut() { Location l = new Location(getTestName(), 0); - Struct s = definition.getType(List.class.getSimpleName()).struct; + Struct s = definition.getPainlessStructFromJavaClass(List.class); PSubListShortcut node = new PSubListShortcut(l, s, new EConstant(l, 1)); node.prefix = new EVariable(l, "a"); assertEquals("(PSubListShortcut (EVariable a) (EConstant Integer 1))", node.toString()); @@ -476,7 +476,7 @@ public void testPSubListShortcut() { new PSubNullSafeCallInvoke(l, node).toString()); l = new Location(getTestName(), 0); - s = definition.getType(List.class.getSimpleName()).struct; + s = definition.getPainlessStructFromJavaClass(List.class); node = new PSubListShortcut(l, s, new EBinary(l, Operation.ADD, new EConstant(l, 1), new EConstant(l, 4))); node.prefix = new EVariable(l, "a"); assertEquals("(PSubListShortcut (EVariable a) (EBinary (EConstant Integer 1) + (EConstant Integer 4)))", node.toString()); @@ -484,7 +484,7 @@ public void testPSubListShortcut() { public void testPSubMapShortcut() { Location l = new Location(getTestName(), 0); - Struct s = definition.getType(Map.class.getSimpleName()).struct; + Struct s = definition.getPainlessStructFromJavaClass(Map.class); PSubMapShortcut node = new PSubMapShortcut(l, s, new EConstant(l, "cat")); node.prefix = new EVariable(l, "a"); assertEquals("(PSubMapShortcut (EVariable a) (EConstant String 'cat'))", node.toString()); @@ -492,7 +492,7 @@ public void testPSubMapShortcut() { new PSubNullSafeCallInvoke(l, node).toString()); l = new Location(getTestName(), 1); - s = definition.getType(Map.class.getSimpleName()).struct; + s = definition.getPainlessStructFromJavaClass(Map.class); node = new PSubMapShortcut(l, s, new EBinary(l, Operation.ADD, new EConstant(l, 1), new EConstant(l, 4))); node.prefix = new EVariable(l, "a"); assertEquals("(PSubMapShortcut (EVariable a) (EBinary (EConstant Integer 1) + (EConstant Integer 4)))", node.toString()); @@ -500,7 +500,7 @@ public void testPSubMapShortcut() { public void testPSubShortcut() { Location l = new Location(getTestName(), 0); - Struct s = definition.getType(FeatureTest.class.getName()).struct; + Struct s = definition.getPainlessStructFromJavaClass(FeatureTest.class); Method getter = s.methods.get(new MethodKey("getX", 0)); Method setter = s.methods.get(new MethodKey("setX", 1)); PSubShortcut node = new PSubShortcut(l, "x", FeatureTest.class.getName(), getter, setter); diff --git a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobContainer.java b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobContainer.java index fb20b73b61c00..7b72871f4f78d 100644 --- a/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobContainer.java +++ b/modules/repository-url/src/main/java/org/elasticsearch/common/blobstore/url/URLBlobContainer.java @@ -108,7 +108,7 @@ public InputStream readBlob(String name) throws IOException { } @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { + public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { throw new UnsupportedOperationException("URL repository doesn't support this operation"); } diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java index 67902174630ea..a65500d9e2289 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java @@ -19,12 +19,9 @@ package org.elasticsearch.discovery.ec2; -import java.util.Random; -import java.util.concurrent.atomic.AtomicReference; - import com.amazonaws.ClientConfiguration; +import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; -import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import com.amazonaws.http.IdleConnectionReaper; import com.amazonaws.internal.StaticCredentialsProvider; @@ -39,6 +36,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.LazyInitializable; +import java.util.Random; +import java.util.concurrent.atomic.AtomicReference; + class AwsEc2ServiceImpl extends AbstractComponent implements AwsEc2Service { public static final String EC2_METADATA_URL = "http://169.254.169.254/latest/meta-data/"; @@ -99,7 +99,7 @@ static ClientConfiguration buildConfiguration(Logger logger, Ec2ClientSettings c // pkg private for tests static AWSCredentialsProvider buildCredentials(Logger logger, Ec2ClientSettings clientSettings) { - final BasicAWSCredentials credentials = clientSettings.credentials; + final AWSCredentials credentials = clientSettings.credentials; if (credentials == null) { logger.debug("Using either environment variables, system properties or instance profile credentials"); return new DefaultAWSCredentialsProviderChain(); diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2ClientSettings.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2ClientSettings.java index b42b0d546001a..d76c9e820b8b1 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2ClientSettings.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2ClientSettings.java @@ -21,14 +21,20 @@ import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; +import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.BasicAWSCredentials; - +import com.amazonaws.auth.BasicSessionCredentials; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.TimeValue; + import java.util.Locale; /** @@ -42,6 +48,9 @@ final class Ec2ClientSettings { /** The secret key (ie password) for connecting to ec2. */ static final Setting SECRET_KEY_SETTING = SecureSetting.secureString("discovery.ec2.secret_key", null); + /** The session token for connecting to ec2. */ + static final Setting SESSION_TOKEN_SETTING = SecureSetting.secureString("discovery.ec2.session_token", null); + /** The host name of a proxy to connect to ec2 through. */ static final Setting PROXY_HOST_SETTING = Setting.simpleString("discovery.ec2.proxy.host", Property.NodeScope); @@ -66,8 +75,12 @@ final class Ec2ClientSettings { static final Setting READ_TIMEOUT_SETTING = Setting.timeSetting("discovery.ec2.read_timeout", TimeValue.timeValueMillis(ClientConfiguration.DEFAULT_SOCKET_TIMEOUT), Property.NodeScope); + private static final Logger logger = Loggers.getLogger(Ec2ClientSettings.class); + + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(logger); + /** Credentials to authenticate with ec2. */ - final BasicAWSCredentials credentials; + final AWSCredentials credentials; /** * The ec2 endpoint the client should talk to, or empty string to use the @@ -96,7 +109,7 @@ final class Ec2ClientSettings { /** The read timeout for the ec2 client. */ final int readTimeoutMillis; - protected Ec2ClientSettings(BasicAWSCredentials credentials, String endpoint, Protocol protocol, String proxyHost, int proxyPort, + protected Ec2ClientSettings(AWSCredentials credentials, String endpoint, Protocol protocol, String proxyHost, int proxyPort, String proxyUsername, String proxyPassword, int readTimeoutMillis) { this.credentials = credentials; this.endpoint = endpoint; @@ -108,26 +121,45 @@ protected Ec2ClientSettings(BasicAWSCredentials credentials, String endpoint, Pr this.readTimeoutMillis = readTimeoutMillis; } - static BasicAWSCredentials loadCredentials(Settings settings) { - try (SecureString accessKey = ACCESS_KEY_SETTING.get(settings); - SecureString secretKey = SECRET_KEY_SETTING.get(settings);) { - if (accessKey.length() != 0) { - if (secretKey.length() != 0) { - return new BasicAWSCredentials(accessKey.toString(), secretKey.toString()); + static AWSCredentials loadCredentials(Settings settings) { + try (SecureString key = ACCESS_KEY_SETTING.get(settings); + SecureString secret = SECRET_KEY_SETTING.get(settings); + SecureString sessionToken = SESSION_TOKEN_SETTING.get(settings)) { + if (key.length() == 0 && secret.length() == 0) { + if (sessionToken.length() > 0) { + throw new SettingsException("Setting [{}] is set but [{}] and [{}] are not", + SESSION_TOKEN_SETTING.getKey(), ACCESS_KEY_SETTING.getKey(), SECRET_KEY_SETTING.getKey()); + } + + logger.debug("Using either environment variables, system properties or instance profile credentials"); + return null; + } else { + if (key.length() == 0) { + DEPRECATION_LOGGER.deprecated("Setting [{}] is set but [{}] is not, which will be unsupported in future", + SECRET_KEY_SETTING.getKey(), ACCESS_KEY_SETTING.getKey()); + } + if (secret.length() == 0) { + DEPRECATION_LOGGER.deprecated("Setting [{}] is set but [{}] is not, which will be unsupported in future", + ACCESS_KEY_SETTING.getKey(), SECRET_KEY_SETTING.getKey()); + } + + final AWSCredentials credentials; + if (sessionToken.length() == 0) { + logger.debug("Using basic key/secret credentials"); + credentials = new BasicAWSCredentials(key.toString(), secret.toString()); } else { - throw new IllegalArgumentException("Missing secret key for ec2 client."); + logger.debug("Using basic session credentials"); + credentials = new BasicSessionCredentials(key.toString(), secret.toString(), sessionToken.toString()); } - } else if (secretKey.length() != 0) { - throw new IllegalArgumentException("Missing access key for ec2 client."); + return credentials; } - return null; } } // pkg private for tests /** Parse settings for a single client. */ static Ec2ClientSettings getClientSettings(Settings settings) { - final BasicAWSCredentials credentials = loadCredentials(settings); + final AWSCredentials credentials = loadCredentials(settings); try (SecureString proxyUsername = PROXY_USERNAME_SETTING.get(settings); SecureString proxyPassword = PROXY_PASSWORD_SETTING.get(settings)) { return new Ec2ClientSettings( diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java index 79f653d5bde55..d476d00eef860 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java @@ -107,6 +107,7 @@ public List> getSettings() { // Register EC2 discovery settings: discovery.ec2 Ec2ClientSettings.ACCESS_KEY_SETTING, Ec2ClientSettings.SECRET_KEY_SETTING, + Ec2ClientSettings.SESSION_TOKEN_SETTING, Ec2ClientSettings.ENDPOINT_SETTING, Ec2ClientSettings.PROTOCOL_SETTING, Ec2ClientSettings.PROXY_HOST_SETTING, diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImplTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImplTests.java index a13fe47a632ae..148e58d7b3c06 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImplTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImplTests.java @@ -23,10 +23,11 @@ import com.amazonaws.Protocol; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.auth.BasicSessionCredentials; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.discovery.ec2.AwsEc2ServiceImpl; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.instanceOf; @@ -44,15 +45,53 @@ public void testAWSCredentialsWithElasticsearchAwsSettings() { final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("discovery.ec2.access_key", "aws_key"); secureSettings.setString("discovery.ec2.secret_key", "aws_secret"); - final Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); - launchAWSCredentialsWithElasticsearchSettingsTest(settings, "aws_key", "aws_secret"); + final AWSCredentials credentials = AwsEc2ServiceImpl.buildCredentials(logger, + Ec2ClientSettings.getClientSettings(Settings.builder().setSecureSettings(secureSettings).build())).getCredentials(); + assertThat(credentials.getAWSAccessKeyId(), is("aws_key")); + assertThat(credentials.getAWSSecretKey(), is("aws_secret")); } - protected void launchAWSCredentialsWithElasticsearchSettingsTest(Settings settings, String expectedKey, String expectedSecret) { - final AWSCredentials credentials = AwsEc2ServiceImpl.buildCredentials(logger, Ec2ClientSettings.getClientSettings(settings)) - .getCredentials(); - assertThat(credentials.getAWSAccessKeyId(), is(expectedKey)); - assertThat(credentials.getAWSSecretKey(), is(expectedSecret)); + public void testAWSSessionCredentialsWithElasticsearchAwsSettings() { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("discovery.ec2.access_key", "aws_key"); + secureSettings.setString("discovery.ec2.secret_key", "aws_secret"); + secureSettings.setString("discovery.ec2.session_token", "aws_session_token"); + final BasicSessionCredentials credentials = (BasicSessionCredentials) AwsEc2ServiceImpl.buildCredentials(logger, + Ec2ClientSettings.getClientSettings(Settings.builder().setSecureSettings(secureSettings).build())).getCredentials(); + assertThat(credentials.getAWSAccessKeyId(), is("aws_key")); + assertThat(credentials.getAWSSecretKey(), is("aws_secret")); + assertThat(credentials.getSessionToken(), is("aws_session_token")); + } + + public void testDeprecationOfLoneAccessKey() { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("discovery.ec2.access_key", "aws_key"); + final AWSCredentials credentials = AwsEc2ServiceImpl.buildCredentials(logger, + Ec2ClientSettings.getClientSettings(Settings.builder().setSecureSettings(secureSettings).build())).getCredentials(); + assertThat(credentials.getAWSAccessKeyId(), is("aws_key")); + assertThat(credentials.getAWSSecretKey(), is("")); + assertSettingDeprecationsAndWarnings(new String[]{}, + "Setting [discovery.ec2.access_key] is set but [discovery.ec2.secret_key] is not, which will be unsupported in future"); + } + + public void testDeprecationOfLoneSecretKey() { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("discovery.ec2.secret_key", "aws_secret"); + final AWSCredentials credentials = AwsEc2ServiceImpl.buildCredentials(logger, + Ec2ClientSettings.getClientSettings(Settings.builder().setSecureSettings(secureSettings).build())).getCredentials(); + assertThat(credentials.getAWSAccessKeyId(), is("")); + assertThat(credentials.getAWSSecretKey(), is("aws_secret")); + assertSettingDeprecationsAndWarnings(new String[]{}, + "Setting [discovery.ec2.secret_key] is set but [discovery.ec2.access_key] is not, which will be unsupported in future"); + } + + public void testRejectionOfLoneSessionToken() { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("discovery.ec2.session_token", "aws_session_token"); + SettingsException e = expectThrows(SettingsException.class, () -> AwsEc2ServiceImpl.buildCredentials(logger, + Ec2ClientSettings.getClientSettings(Settings.builder().setSecureSettings(secureSettings).build()))); + assertThat(e.getMessage(), is( + "Setting [discovery.ec2.session_token] is set but [discovery.ec2.access_key] and [discovery.ec2.secret_key] are not")); } public void testAWSDefaultConfiguration() { diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java index 6001ab56d5042..720ffaddd74a5 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java @@ -19,22 +19,24 @@ package org.elasticsearch.discovery.ec2; +import com.amazonaws.auth.AWSCredentials; +import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.auth.BasicSessionCredentials; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.node.Node; +import org.elasticsearch.test.ESTestCase; + import java.io.IOException; import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -import org.elasticsearch.discovery.ec2.AwsEc2Service; -import org.elasticsearch.common.settings.MockSecureSettings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.discovery.ec2.Ec2DiscoveryPlugin; -import org.elasticsearch.node.Node; -import org.elasticsearch.test.ESTestCase; - public class Ec2DiscoveryPluginTests extends ESTestCase { private Settings getNodeAttributes(Settings settings, String url) { @@ -106,6 +108,10 @@ public void testClientSettingsReInit() throws IOException { final MockSecureSettings mockSecure1 = new MockSecureSettings(); mockSecure1.setString(Ec2ClientSettings.ACCESS_KEY_SETTING.getKey(), "ec2_access_1"); mockSecure1.setString(Ec2ClientSettings.SECRET_KEY_SETTING.getKey(), "ec2_secret_1"); + final boolean mockSecure1HasSessionToken = randomBoolean(); + if (mockSecure1HasSessionToken) { + mockSecure1.setString(Ec2ClientSettings.SESSION_TOKEN_SETTING.getKey(), "ec2_session_token_1"); + } mockSecure1.setString(Ec2ClientSettings.PROXY_USERNAME_SETTING.getKey(), "proxy_username_1"); mockSecure1.setString(Ec2ClientSettings.PROXY_PASSWORD_SETTING.getKey(), "proxy_password_1"); final Settings settings1 = Settings.builder() @@ -117,6 +123,10 @@ public void testClientSettingsReInit() throws IOException { final MockSecureSettings mockSecure2 = new MockSecureSettings(); mockSecure2.setString(Ec2ClientSettings.ACCESS_KEY_SETTING.getKey(), "ec2_access_2"); mockSecure2.setString(Ec2ClientSettings.SECRET_KEY_SETTING.getKey(), "ec2_secret_2"); + final boolean mockSecure2HasSessionToken = randomBoolean(); + if (mockSecure2HasSessionToken) { + mockSecure2.setString(Ec2ClientSettings.SESSION_TOKEN_SETTING.getKey(), "ec2_session_token_2"); + } mockSecure2.setString(Ec2ClientSettings.PROXY_USERNAME_SETTING.getKey(), "proxy_username_2"); mockSecure2.setString(Ec2ClientSettings.PROXY_PASSWORD_SETTING.getKey(), "proxy_password_2"); final Settings settings2 = Settings.builder() @@ -127,27 +137,50 @@ public void testClientSettingsReInit() throws IOException { .build(); try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(settings1)) { try (AmazonEc2Reference clientReference = plugin.ec2Service.client()) { - assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSAccessKeyId(), is("ec2_access_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSSecretKey(), is("ec2_secret_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(881)); - assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_1")); + { + final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.client()).credentials.getCredentials(); + assertThat(credentials.getAWSAccessKeyId(), is("ec2_access_1")); + assertThat(credentials.getAWSSecretKey(), is("ec2_secret_1")); + if (mockSecure1HasSessionToken) { + assertThat(credentials, instanceOf(BasicSessionCredentials.class)); + assertThat(((BasicSessionCredentials)credentials).getSessionToken(), is("ec2_session_token_1")); + } else { + assertThat(credentials, instanceOf(BasicAWSCredentials.class)); + } + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(881)); + assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_1")); + } // reload secure settings2 plugin.reload(settings2); // client is not released, it is still using the old settings - assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSAccessKeyId(), is("ec2_access_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSSecretKey(), is("ec2_secret_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(881)); - assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_1")); + { + final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.client()).credentials.getCredentials(); + if (mockSecure1HasSessionToken) { + assertThat(credentials, instanceOf(BasicSessionCredentials.class)); + assertThat(((BasicSessionCredentials)credentials).getSessionToken(), is("ec2_session_token_1")); + } else { + assertThat(credentials, instanceOf(BasicAWSCredentials.class)); + } + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(881)); + assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_1")); + } } try (AmazonEc2Reference clientReference = plugin.ec2Service.client()) { - assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSAccessKeyId(), is("ec2_access_2")); - assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSSecretKey(), is("ec2_secret_2")); + final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.client()).credentials.getCredentials(); + assertThat(credentials.getAWSAccessKeyId(), is("ec2_access_2")); + assertThat(credentials.getAWSSecretKey(), is("ec2_secret_2")); + if (mockSecure2HasSessionToken) { + assertThat(credentials, instanceOf(BasicSessionCredentials.class)); + assertThat(((BasicSessionCredentials)credentials).getSessionToken(), is("ec2_session_token_2")); + } else { + assertThat(credentials, instanceOf(BasicAWSCredentials.class)); + } assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_2")); assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_2")); assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_2")); diff --git a/plugins/repository-azure/qa/microsoft-azure-storage/src/test/java/org/elasticsearch/repositories/azure/AzureStorageFixture.java b/plugins/repository-azure/qa/microsoft-azure-storage/src/test/java/org/elasticsearch/repositories/azure/AzureStorageFixture.java index f906b9fa9a913..0bd9503f43dac 100644 --- a/plugins/repository-azure/qa/microsoft-azure-storage/src/test/java/org/elasticsearch/repositories/azure/AzureStorageFixture.java +++ b/plugins/repository-azure/qa/microsoft-azure-storage/src/test/java/org/elasticsearch/repositories/azure/AzureStorageFixture.java @@ -122,15 +122,20 @@ private static PathTrie defaultHandlers(final Map { final String destContainerName = request.getParam("container"); final String destBlobName = objectName(request.getParameters()); + final String ifNoneMatch = request.getHeader("If-None-Match"); final Container destContainer = containers.get(destContainerName); if (destContainer == null) { return newContainerNotFoundError(request.getId()); } - byte[] existingBytes = destContainer.objects.putIfAbsent(destBlobName, request.getBody()); - if (existingBytes != null) { - return newBlobAlreadyExistsError(request.getId()); + if ("*".equals(ifNoneMatch)) { + byte[] existingBytes = destContainer.objects.putIfAbsent(destBlobName, request.getBody()); + if (existingBytes != null) { + return newBlobAlreadyExistsError(request.getId()); + } + } else { + destContainer.objects.put(destBlobName, request.getBody()); } return new Response(RestStatus.CREATED.getStatus(), TEXT_PLAIN_CONTENT_TYPE, EMPTY_BYTE); }) diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobContainer.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobContainer.java index 686926c3a991e..08041d54cd14a 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobContainer.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobContainer.java @@ -86,11 +86,11 @@ public InputStream readBlob(String blobName) throws IOException { } @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { + public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { logger.trace("writeBlob({}, stream, {})", buildKey(blobName), blobSize); try { - blobStore.writeBlob(buildKey(blobName), inputStream, blobSize); + blobStore.writeBlob(buildKey(blobName), inputStream, blobSize, failIfAlreadyExists); } catch (URISyntaxException|StorageException e) { throw new IOException("Can not write blob " + blobName, e); } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java index d4497c5ee85fd..d519af370eb9e 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java @@ -119,8 +119,8 @@ public Map listBlobsByPrefix(String keyPath, String prefix return service.listBlobsByPrefix(clientName, container, keyPath, prefix); } - public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws URISyntaxException, StorageException, - FileAlreadyExistsException { - service.writeBlob(this.clientName, container, blobName, inputStream, blobSize); + public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) + throws URISyntaxException, StorageException, FileAlreadyExistsException { + service.writeBlob(this.clientName, container, blobName, inputStream, blobSize, failIfAlreadyExists); } } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java index 30266ee6cfc40..783006910faff 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java @@ -237,17 +237,20 @@ public Map listBlobsByPrefix(String account, String contai return blobsBuilder.immutableMap(); } - public void writeBlob(String account, String container, String blobName, InputStream inputStream, long blobSize) + public void writeBlob(String account, String container, String blobName, InputStream inputStream, long blobSize, + boolean failIfAlreadyExists) throws URISyntaxException, StorageException, FileAlreadyExistsException { logger.trace(() -> new ParameterizedMessage("writeBlob({}, stream, {})", blobName, blobSize)); final Tuple> client = client(account); final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); final CloudBlockBlob blob = blobContainer.getBlockBlobReference(blobName); try { + final AccessCondition accessCondition = + failIfAlreadyExists ? AccessCondition.generateIfNotExistsCondition() : AccessCondition.generateEmptyCondition(); SocketAccess.doPrivilegedVoidException(() -> - blob.upload(inputStream, blobSize, AccessCondition.generateIfNotExistsCondition(), null, client.v2().get())); + blob.upload(inputStream, blobSize, accessCondition, null, client.v2().get())); } catch (final StorageException se) { - if (se.getHttpStatusCode() == HttpURLConnection.HTTP_CONFLICT && + if (failIfAlreadyExists && se.getHttpStatusCode() == HttpURLConnection.HTTP_CONFLICT && StorageErrorCodeStrings.BLOB_ALREADY_EXISTS.equals(se.getErrorCode())) { throw new FileAlreadyExistsException(blobName, null, se.getMessage()); } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java index 365b559eca373..bfbb29fe2ee82 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java @@ -108,9 +108,10 @@ public Map listBlobsByPrefix(String account, String contai } @Override - public void writeBlob(String account, String container, String blobName, InputStream inputStream, long blobSize) + public void writeBlob(String account, String container, String blobName, InputStream inputStream, long blobSize, + boolean failIfAlreadyExists) throws URISyntaxException, StorageException, FileAlreadyExistsException { - if (blobs.containsKey(blobName)) { + if (failIfAlreadyExists && blobs.containsKey(blobName)) { throw new FileAlreadyExistsException(blobName); } try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { diff --git a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageFixture.java b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageFixture.java index b1a185c9c08c9..b37b89b243ba7 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageFixture.java +++ b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageFixture.java @@ -158,10 +158,6 @@ private static PathTrie defaultHandlers(final Map { final String ifGenerationMatch = request.getParam("ifGenerationMatch"); - if ("0".equals(ifGenerationMatch) == false) { - return newError(RestStatus.PRECONDITION_FAILED, "object already exist"); - } - final String uploadType = request.getParam("uploadType"); if ("resumable".equals(uploadType)) { final String objectName = request.getParam("name"); @@ -172,12 +168,19 @@ private static PathTrie defaultHandlers(final Map LARGE_BLOB_THRESHOLD_BYTE_SIZE) { - writeBlobResumable(blobInfo, inputStream); + writeBlobResumable(blobInfo, inputStream, failIfAlreadyExists); } else { - writeBlobMultipart(blobInfo, inputStream, blobSize); + writeBlobMultipart(blobInfo, inputStream, blobSize, failIfAlreadyExists); } } @@ -210,14 +210,17 @@ void writeBlob(String blobName, InputStream inputStream, long blobSize) throws I * Uploads a blob using the "resumable upload" method (multiple requests, which * can be independently retried in case of failure, see * https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload - * * @param blobInfo the info for the blob to be uploaded * @param inputStream the stream containing the blob data + * @param failIfAlreadyExists whether to throw a FileAlreadyExistsException if the given blob already exists */ - private void writeBlobResumable(BlobInfo blobInfo, InputStream inputStream) throws IOException { + private void writeBlobResumable(BlobInfo blobInfo, InputStream inputStream, boolean failIfAlreadyExists) throws IOException { try { + final Storage.BlobWriteOption[] writeOptions = failIfAlreadyExists ? + new Storage.BlobWriteOption[] { Storage.BlobWriteOption.doesNotExist() } : + new Storage.BlobWriteOption[0]; final WriteChannel writeChannel = SocketAccess - .doPrivilegedIOException(() -> client().writer(blobInfo, Storage.BlobWriteOption.doesNotExist())); + .doPrivilegedIOException(() -> client().writer(blobInfo, writeOptions)); Streams.copy(inputStream, Channels.newOutputStream(new WritableByteChannel() { @Override public boolean isOpen() { @@ -236,7 +239,7 @@ public int write(ByteBuffer src) throws IOException { } })); } catch (final StorageException se) { - if (se.getCode() == HTTP_PRECON_FAILED) { + if (failIfAlreadyExists && se.getCode() == HTTP_PRECON_FAILED) { throw new FileAlreadyExistsException(blobInfo.getBlobId().getName(), null, se.getMessage()); } throw se; @@ -248,20 +251,24 @@ public int write(ByteBuffer src) throws IOException { * 'multipart/related' request containing both data and metadata. The request is * gziped), see: * https://cloud.google.com/storage/docs/json_api/v1/how-tos/multipart-upload - * - * @param blobInfo the info for the blob to be uploaded + * @param blobInfo the info for the blob to be uploaded * @param inputStream the stream containing the blob data * @param blobSize the size + * @param failIfAlreadyExists whether to throw a FileAlreadyExistsException if the given blob already exists */ - private void writeBlobMultipart(BlobInfo blobInfo, InputStream inputStream, long blobSize) throws IOException { + private void writeBlobMultipart(BlobInfo blobInfo, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) + throws IOException { assert blobSize <= LARGE_BLOB_THRESHOLD_BYTE_SIZE : "large blob uploads should use the resumable upload method"; final ByteArrayOutputStream baos = new ByteArrayOutputStream(Math.toIntExact(blobSize)); Streams.copy(inputStream, baos); try { + final Storage.BlobTargetOption[] targetOptions = failIfAlreadyExists ? + new Storage.BlobTargetOption[] { Storage.BlobTargetOption.doesNotExist() } : + new Storage.BlobTargetOption[0]; SocketAccess.doPrivilegedVoidIOException( - () -> client().create(blobInfo, baos.toByteArray(), Storage.BlobTargetOption.doesNotExist())); + () -> client().create(blobInfo, baos.toByteArray(), targetOptions)); } catch (final StorageException se) { - if (se.getCode() == HTTP_PRECON_FAILED) { + if (failIfAlreadyExists && se.getCode() == HTTP_PRECON_FAILED) { throw new FileAlreadyExistsException(blobInfo.getBlobId().getName(), null, se.getMessage()); } throw se; diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java index 23557ae6cf84a..580d033354e58 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java @@ -91,11 +91,12 @@ public InputStream readBlob(String blobName) throws IOException { } @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { + public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { store.execute((Operation) fileContext -> { Path blob = new Path(path, blobName); // we pass CREATE, which means it fails if a blob already exists. - EnumSet flags = EnumSet.of(CreateFlag.CREATE, CreateFlag.SYNC_BLOCK); + EnumSet flags = failIfAlreadyExists ? EnumSet.of(CreateFlag.CREATE, CreateFlag.SYNC_BLOCK) : + EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE, CreateFlag.SYNC_BLOCK); CreateOpts[] opts = {CreateOpts.bufferSize(bufferSize)}; try (FSDataOutputStream stream = fileContext.create(blob, flags, opts)) { int bytesRead; diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java index a5d68331db78e..ba00862e93848 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java @@ -135,7 +135,7 @@ public void testReadOnly() throws Exception { assertTrue(util.exists(hdfsPath)); byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); - writeBlob(container, "foo", new BytesArray(data)); + writeBlob(container, "foo", new BytesArray(data), randomBoolean()); assertArrayEquals(readBlobFully(container, "foo", data.length), data); assertTrue(container.blobExists("foo")); } diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 8448b2ab9e1ac..dc2140a6086a4 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -1,3 +1,12 @@ +import org.apache.tools.ant.taskdefs.condition.Os +import org.elasticsearch.gradle.LoggedExec +import org.elasticsearch.gradle.MavenFilteringHack +import org.elasticsearch.gradle.test.AntFixture +import org.elasticsearch.gradle.test.ClusterConfiguration +import org.elasticsearch.gradle.test.RestIntegTestTask + +import java.lang.reflect.Field + /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -64,14 +73,245 @@ test { exclude '**/*CredentialsTests.class' } -check { - // also execute the QA tests when testing the plugin - dependsOn 'qa:amazon-s3:check' +boolean useFixture = false + +// We test against two repositories, one which uses the usual two-part "permanent" credentials and +// the other which uses three-part "temporary" or "session" credentials. + +String s3PermanentAccessKey = System.getenv("amazon_s3_access_key") +String s3PermanentSecretKey = System.getenv("amazon_s3_secret_key") +String s3PermanentBucket = System.getenv("amazon_s3_bucket") +String s3PermanentBasePath = System.getenv("amazon_s3_base_path") + +String s3TemporaryAccessKey = System.getenv("amazon_s3_access_key_temporary") +String s3TemporarySecretKey = System.getenv("amazon_s3_secret_key_temporary") +String s3TemporarySessionToken = System.getenv("amazon_s3_session_token_temporary") +String s3TemporaryBucket = System.getenv("amazon_s3_bucket_temporary") +String s3TemporaryBasePath = System.getenv("amazon_s3_base_path_temporary") + +// If all these variables are missing then we are testing against the internal fixture instead, which has the following +// credentials hard-coded in. + +if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3PermanentBasePath + && !s3TemporaryAccessKey && !s3TemporarySecretKey && !s3TemporaryBucket && !s3TemporaryBasePath && !s3TemporarySessionToken) { + + s3PermanentAccessKey = 's3_integration_test_permanent_access_key' + s3PermanentSecretKey = 's3_integration_test_permanent_secret_key' + s3PermanentBucket = 'permanent-bucket-test' + s3PermanentBasePath = 'integration_test' + + s3TemporaryAccessKey = 's3_integration_test_temporary_access_key' + s3TemporarySecretKey = 's3_integration_test_temporary_secret_key' + s3TemporaryBucket = 'temporary-bucket-test' + s3TemporaryBasePath = 'integration_test' + s3TemporarySessionToken = 's3_integration_test_temporary_session_token' + + useFixture = true +} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath + || !s3TemporaryAccessKey || !s3TemporarySecretKey || !s3TemporaryBucket || !s3TemporaryBasePath || !s3TemporarySessionToken) { + throw new IllegalArgumentException("not all options specified to run against external S3 service") +} + +final String minioVersion = 'RELEASE.2018-06-22T23-48-46Z' +final String minioBinDir = "${buildDir}/minio/bin" +final String minioDataDir = "${buildDir}/minio/data" +final String minioAddress = "127.0.0.1:60920" + +final String minioDistribution +final String minioCheckSum +if (Os.isFamily(Os.FAMILY_MAC)) { + minioDistribution = 'darwin-amd64' + minioCheckSum = '96b0bcb2f590e8e65fb83d5c3e221f9bd1106b49fa6f22c6b726b80b845d7c60' +} else if (Os.isFamily(Os.FAMILY_UNIX)) { + minioDistribution = 'linux-amd64' + minioCheckSum = '713dac7c105285eab3b92649be92b5e793b29d3525c7929fa7aaed99374fad99' +} else { + minioDistribution = null + minioCheckSum = null +} + +buildscript { + repositories { + maven { + url 'https://plugins.gradle.org/m2/' + } + } + dependencies { + classpath 'de.undercouch:gradle-download-task:3.4.3' + } +} + +if (useFixture && minioDistribution) { + apply plugin: 'de.undercouch.download' + + final String minioFileName = "minio.${minioVersion}" + final String minioDownloadURL = "https://dl.minio.io/server/minio/release/${minioDistribution}/archive/${minioFileName}" + final String minioFilePath = "${gradle.gradleUserHomeDir}/downloads/minio/${minioDistribution}/${minioFileName}" + + task downloadMinio(type: Download) { + src minioDownloadURL + dest minioFilePath + onlyIfModified true + } + + task verifyMinioChecksum(type: Verify, dependsOn: downloadMinio) { + src minioFilePath + algorithm 'SHA-256' + checksum minioCheckSum + } + + task installMinio(type: Sync, dependsOn: verifyMinioChecksum) { + from minioFilePath + into minioBinDir + fileMode 0755 + } + + task startMinio { + dependsOn installMinio + + ext.minioPid = 0L + + doLast { + new File("${minioDataDir}/${s3PermanentBucket}").mkdirs() + // we skip these tests on Windows so we do no need to worry about compatibility here + final ProcessBuilder minio = new ProcessBuilder( + "${minioBinDir}/${minioFileName}", + "server", + "--address", + minioAddress, + minioDataDir) + minio.environment().put('MINIO_ACCESS_KEY', s3PermanentAccessKey) + minio.environment().put('MINIO_SECRET_KEY', s3PermanentSecretKey) + final Process process = minio.start() + if (JavaVersion.current() <= JavaVersion.VERSION_1_8) { + try { + Class cProcessImpl = process.getClass() + Field fPid = cProcessImpl.getDeclaredField("pid") + if (!fPid.isAccessible()) { + fPid.setAccessible(true) + } + minioPid = fPid.getInt(process) + } catch (Exception e) { + logger.error("failed to read pid from minio process", e) + process.destroyForcibly() + throw e + } + } else { + minioPid = process.pid() + } + + new BufferedReader(new InputStreamReader(process.getInputStream())).withReader { br -> + String line + int httpPort = 0 + while ((line = br.readLine()) != null) { + logger.info(line) + if (line.matches('.*Endpoint.*:\\d+$')) { + assert httpPort == 0 + final int index = line.lastIndexOf(":") + assert index >= 0 + httpPort = Integer.parseInt(line.substring(index + 1)) + + final File script = new File(project.buildDir, "minio/minio.killer.sh") + script.setText( + ["function shutdown {", + " kill ${minioPid}", + "}", + "trap shutdown EXIT", + // will wait indefinitely for input, but we never pass input, and the pipe is only closed when the build dies + "read line\n"].join('\n'), 'UTF-8') + final ProcessBuilder killer = new ProcessBuilder("bash", script.absolutePath) + killer.start() + break + } + } + + assert httpPort > 0 + } + } + } + + task stopMinio(type: LoggedExec) { + onlyIf { startMinio.minioPid > 0 } + + doFirst { + logger.info("Shutting down minio with pid ${startMinio.minioPid}") + } + + final Object pid = "${ -> startMinio.minioPid }" + + // we skip these tests on Windows so we do no need to worry about compatibility here + executable = 'kill' + args('-9', pid) + } + + RestIntegTestTask integTestMinio = project.tasks.create('integTestMinio', RestIntegTestTask.class) { + description = "Runs REST tests using the Minio repository." + } + + // The following closure must execute before the afterEvaluate block in the constructor of the following integrationTest tasks: + project.afterEvaluate { + ClusterConfiguration cluster = project.extensions.getByName('integTestMinioCluster') as ClusterConfiguration + cluster.dependsOn(project.bundlePlugin) + cluster.keystoreSetting 's3.client.integration_test_permanent.access_key', s3PermanentAccessKey + cluster.keystoreSetting 's3.client.integration_test_permanent.secret_key', s3PermanentSecretKey + + cluster.setting 's3.client.integration_test_permanent.endpoint', "http://${minioAddress}" + + Task restIntegTestTask = project.tasks.getByName('integTestMinio') + restIntegTestTask.clusterConfig.plugin(project.path) + + // Default jvm arguments for all test clusters + String jvmArgs = "-Xms" + System.getProperty('tests.heap.size', '512m') + + " " + "-Xmx" + System.getProperty('tests.heap.size', '512m') + + " " + System.getProperty('tests.jvm.argline', '') + + restIntegTestTask.clusterConfig.jvmArgs = jvmArgs + } + + integTestMinioRunner.dependsOn(startMinio) + integTestMinioRunner.finalizedBy(stopMinio) + // Minio only supports a single access key, see https://github.com/minio/minio/pull/5968 + integTestMinioRunner.systemProperty 'tests.rest.blacklist', 'repository_s3/30_repository_temporary_credentials/*' + + project.check.dependsOn(integTestMinio) +} + +/** A task to start the AmazonS3Fixture which emulates an S3 service **/ +task s3Fixture(type: AntFixture) { + dependsOn testClasses + env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" + executable = new File(project.runtimeJavaHome, 'bin/java') + args 'org.elasticsearch.repositories.s3.AmazonS3Fixture', baseDir, s3PermanentBucket, s3TemporaryBucket +} + +Map expansions = [ + 'permanent_bucket': s3PermanentBucket, + 'permanent_base_path': s3PermanentBasePath, + 'temporary_bucket': s3TemporaryBucket, + 'temporary_base_path': s3TemporaryBasePath +] + +processTestResources { + inputs.properties(expansions) + MavenFilteringHack.filter(it, expansions) } integTestCluster { - keystoreSetting 's3.client.integration_test.access_key', "s3_integration_test_access_key" - keystoreSetting 's3.client.integration_test.secret_key', "s3_integration_test_secret_key" + keystoreSetting 's3.client.integration_test_permanent.access_key', s3PermanentAccessKey + keystoreSetting 's3.client.integration_test_permanent.secret_key', s3PermanentSecretKey + + keystoreSetting 's3.client.integration_test_temporary.access_key', s3TemporaryAccessKey + keystoreSetting 's3.client.integration_test_temporary.secret_key', s3TemporarySecretKey + keystoreSetting 's3.client.integration_test_temporary.session_token', s3TemporarySessionToken + + if (useFixture) { + dependsOn s3Fixture + /* Use a closure on the string to delay evaluation until tests are executed */ + setting 's3.client.integration_test_permanent.endpoint', "http://${-> s3Fixture.addressAndPort}" + setting 's3.client.integration_test_temporary.endpoint', "http://${-> s3Fixture.addressAndPort}" + } else { + println "Using an external service to test the repository-s3 plugin" + } } thirdPartyAudit.excludes = [ diff --git a/plugins/repository-s3/qa/amazon-s3/build.gradle b/plugins/repository-s3/qa/amazon-s3/build.gradle deleted file mode 100644 index dbbffdebded47..0000000000000 --- a/plugins/repository-s3/qa/amazon-s3/build.gradle +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.elasticsearch.gradle.MavenFilteringHack -import org.elasticsearch.gradle.test.AntFixture - -apply plugin: 'elasticsearch.standalone-rest-test' -apply plugin: 'elasticsearch.rest-test' - -dependencies { - testCompile project(path: ':plugins:repository-s3', configuration: 'runtime') -} - -integTestCluster { - plugin ':plugins:repository-s3' -} - -boolean useFixture = false - -String s3AccessKey = System.getenv("amazon_s3_access_key") -String s3SecretKey = System.getenv("amazon_s3_secret_key") -String s3Bucket = System.getenv("amazon_s3_bucket") -String s3BasePath = System.getenv("amazon_s3_base_path") - -if (!s3AccessKey && !s3SecretKey && !s3Bucket && !s3BasePath) { - s3AccessKey = 's3_integration_test_access_key' - s3SecretKey = 's3_integration_test_secret_key' - s3Bucket = 'bucket_test' - s3BasePath = 'integration_test' - useFixture = true -} - -/** A task to start the AmazonS3Fixture which emulates a S3 service **/ -task s3Fixture(type: AntFixture) { - dependsOn testClasses - env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" - executable = new File(project.runtimeJavaHome, 'bin/java') - args 'org.elasticsearch.repositories.s3.AmazonS3Fixture', baseDir, s3Bucket -} - -Map expansions = [ - 'bucket': s3Bucket, - 'base_path': s3BasePath -] - -processTestResources { - inputs.properties(expansions) - MavenFilteringHack.filter(it, expansions) -} - -integTestCluster { - keystoreSetting 's3.client.integration_test.access_key', s3AccessKey - keystoreSetting 's3.client.integration_test.secret_key', s3SecretKey - - if (useFixture) { - dependsOn s3Fixture - /* Use a closure on the string to delay evaluation until tests are executed */ - setting 's3.client.integration_test.endpoint', "http://${-> s3Fixture.addressAndPort}" - } else { - println "Using an external service to test the repository-s3 plugin" - } -} \ No newline at end of file diff --git a/plugins/repository-s3/qa/build.gradle b/plugins/repository-s3/qa/build.gradle deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java index 86b01a3e79cdd..b7cc2b89605d3 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java @@ -90,8 +90,11 @@ public InputStream readBlob(String blobName) throws IOException { } } + /** + * This implementation ignores the failIfAlreadyExists flag as the S3 API has no way to enforce this due to its weak consistency model. + */ @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { + public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { SocketAccess.doPrivilegedIOException(() -> { if (blobSize <= blobStore.bufferSizeInBytes()) { executeSingleUpload(blobStore, buildKey(blobName), inputStream, blobSize); diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java index ef6088fe154bf..795304541be35 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java @@ -26,8 +26,10 @@ import java.util.Set; import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; +import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.auth.BasicSessionCredentials; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureString; @@ -52,6 +54,10 @@ final class S3ClientSettings { static final Setting.AffixSetting SECRET_KEY_SETTING = Setting.affixKeySetting(PREFIX, "secret_key", key -> SecureSetting.secureString(key, null)); + /** The secret key (ie password) for connecting to s3. */ + static final Setting.AffixSetting SESSION_TOKEN_SETTING = Setting.affixKeySetting(PREFIX, "session_token", + key -> SecureSetting.secureString(key, null)); + /** An override for the s3 endpoint to connect to. */ static final Setting.AffixSetting ENDPOINT_SETTING = Setting.affixKeySetting(PREFIX, "endpoint", key -> new Setting<>(key, "", s -> s.toLowerCase(Locale.ROOT), Property.NodeScope)); @@ -89,7 +95,7 @@ final class S3ClientSettings { key -> Setting.boolSetting(key, ClientConfiguration.DEFAULT_THROTTLE_RETRIES, Property.NodeScope)); /** Credentials to authenticate with s3. */ - final BasicAWSCredentials credentials; + final AWSCredentials credentials; /** The s3 endpoint the client should talk to, or empty string to use the default. */ final String endpoint; @@ -120,7 +126,7 @@ final class S3ClientSettings { /** Whether the s3 client should use an exponential backoff retry policy. */ final boolean throttleRetries; - protected S3ClientSettings(BasicAWSCredentials credentials, String endpoint, Protocol protocol, + protected S3ClientSettings(AWSCredentials credentials, String endpoint, Protocol protocol, String proxyHost, int proxyPort, String proxyUsername, String proxyPassword, int readTimeoutMillis, int maxRetries, boolean throttleRetries) { this.credentials = credentials; @@ -190,26 +196,36 @@ static BasicAWSCredentials loadDeprecatedCredentials(Settings repositorySettings } } - static BasicAWSCredentials loadCredentials(Settings settings, String clientName) { + static AWSCredentials loadCredentials(Settings settings, String clientName) { try (SecureString accessKey = getConfigValue(settings, clientName, ACCESS_KEY_SETTING); - SecureString secretKey = getConfigValue(settings, clientName, SECRET_KEY_SETTING);) { + SecureString secretKey = getConfigValue(settings, clientName, SECRET_KEY_SETTING); + SecureString sessionToken = getConfigValue(settings, clientName, SESSION_TOKEN_SETTING)) { if (accessKey.length() != 0) { if (secretKey.length() != 0) { - return new BasicAWSCredentials(accessKey.toString(), secretKey.toString()); + if (sessionToken.length() != 0) { + return new BasicSessionCredentials(accessKey.toString(), secretKey.toString(), sessionToken.toString()); + } else { + return new BasicAWSCredentials(accessKey.toString(), secretKey.toString()); + } } else { throw new IllegalArgumentException("Missing secret key for s3 client [" + clientName + "]"); } - } else if (secretKey.length() != 0) { - throw new IllegalArgumentException("Missing access key for s3 client [" + clientName + "]"); + } else { + if (secretKey.length() != 0) { + throw new IllegalArgumentException("Missing access key for s3 client [" + clientName + "]"); + } + if (sessionToken.length() != 0) { + throw new IllegalArgumentException("Missing access key and secret key for s3 client [" + clientName + "]"); + } + return null; } - return null; } } // pkg private for tests /** Parse settings for a single client. */ static S3ClientSettings getClientSettings(Settings settings, String clientName) { - final BasicAWSCredentials credentials = S3ClientSettings.loadCredentials(settings, clientName); + final AWSCredentials credentials = S3ClientSettings.loadCredentials(settings, clientName); try (SecureString proxyUsername = getConfigValue(settings, clientName, PROXY_USERNAME_SETTING); SecureString proxyPassword = getConfigValue(settings, clientName, PROXY_PASSWORD_SETTING)) { return new S3ClientSettings( diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java index 6a605319114fe..79a5187059f38 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java @@ -92,6 +92,7 @@ public List> getSettings() { // named s3 client configuration settings S3ClientSettings.ACCESS_KEY_SETTING, S3ClientSettings.SECRET_KEY_SETTING, + S3ClientSettings.SESSION_TOKEN_SETTING, S3ClientSettings.ENDPOINT_SETTING, S3ClientSettings.PROTOCOL_SETTING, S3ClientSettings.PROXY_HOST_SETTING, diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java index 58c83bd84da49..cb12555d9eaa2 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java @@ -22,7 +22,6 @@ import com.amazonaws.ClientConfiguration; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; -import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.InstanceProfileCredentialsProvider; import com.amazonaws.http.IdleConnectionReaper; import com.amazonaws.internal.StaticCredentialsProvider; @@ -133,7 +132,7 @@ static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings) { // pkg private for tests static AWSCredentialsProvider buildCredentials(Logger logger, S3ClientSettings clientSettings) { - final BasicAWSCredentials credentials = clientSettings.credentials; + final AWSCredentials credentials = clientSettings.credentials; if (credentials == null) { logger.debug("Using instance profile credentials"); return new PrivilegedInstanceProfileCredentialsProvider(); diff --git a/plugins/repository-s3/qa/amazon-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java similarity index 87% rename from plugins/repository-s3/qa/amazon-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java rename to plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java index d1034aff48248..e21f2bf71496b 100644 --- a/plugins/repository-s3/qa/amazon-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java @@ -52,13 +52,19 @@ public class AmazonS3Fixture extends AbstractHttpFixture { /** Request handlers for the requests made by the S3 client **/ private final PathTrie handlers; + private final String permanentBucketName; + private final String temporaryBucketName; /** * Creates a {@link AmazonS3Fixture} */ - private AmazonS3Fixture(final String workingDir, final String bucket) { + private AmazonS3Fixture(final String workingDir, final String permanentBucketName, final String temporaryBucketName) { super(workingDir); - this.buckets.put(bucket, new Bucket(bucket)); + this.permanentBucketName = permanentBucketName; + this.temporaryBucketName = temporaryBucketName; + + this.buckets.put(permanentBucketName, new Bucket(permanentBucketName)); + this.buckets.put(temporaryBucketName, new Bucket(temporaryBucketName)); this.handlers = defaultHandlers(buckets); } @@ -67,21 +73,52 @@ protected Response handle(final Request request) throws IOException { final RequestHandler handler = handlers.retrieve(request.getMethod() + " " + request.getPath(), request.getParameters()); if (handler != null) { final String authorization = request.getHeader("Authorization"); - if (authorization == null - || (authorization.length() > 0 && authorization.contains("s3_integration_test_access_key") == false)) { - return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Access Denied", ""); + final String permittedBucket; + if (authorization.contains("s3_integration_test_permanent_access_key")) { + final String sessionToken = request.getHeader("x-amz-security-token"); + if (sessionToken != null) { + return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Unexpected session token", ""); + } + permittedBucket = permanentBucketName; + } else if (authorization.contains("s3_integration_test_temporary_access_key")) { + final String sessionToken = request.getHeader("x-amz-security-token"); + if (sessionToken == null) { + return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "No session token", ""); + } + if (sessionToken.equals("s3_integration_test_temporary_session_token") == false) { + return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad session token", ""); + } + permittedBucket = temporaryBucketName; + } else { + return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad access key", ""); + } + + if (handler != null) { + final String bucket = request.getParam("bucket"); + if (bucket != null && permittedBucket.equals(bucket) == false) { + // allow a null bucket to support the multi-object-delete API which + // passes the bucket name in the host header instead of the URL. + if (buckets.containsKey(bucket)) { + return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad bucket", ""); + } else { + return newBucketNotFoundError(request.getId(), bucket); + } + } + return handler.handle(request); + } else { + return newInternalError(request.getId(), "No handler defined for request [" + request + "]"); } - return handler.handle(request); } return null; } public static void main(final String[] args) throws Exception { - if (args == null || args.length != 2) { - throw new IllegalArgumentException("AmazonS3Fixture "); + if (args == null || args.length != 3) { + throw new IllegalArgumentException( + "AmazonS3Fixture "); } - final AmazonS3Fixture fixture = new AmazonS3Fixture(args[0], args[1]); + final AmazonS3Fixture fixture = new AmazonS3Fixture(args[0], args[1], args[2]); fixture.listen(); } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java new file mode 100644 index 0000000000000..e629f43f8a3d3 --- /dev/null +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java @@ -0,0 +1,123 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories.s3; + +import com.amazonaws.ClientConfiguration; +import com.amazonaws.Protocol; +import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.auth.BasicSessionCredentials; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import java.util.Map; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isEmptyString; +import static org.hamcrest.Matchers.nullValue; + +public class S3ClientSettingsTests extends ESTestCase { + public void testThereIsADefaultClientByDefault() { + final Map settings = S3ClientSettings.load(Settings.EMPTY); + assertThat(settings.keySet(), contains("default")); + + final S3ClientSettings defaultSettings = settings.get("default"); + assertThat(defaultSettings.credentials, nullValue()); + assertThat(defaultSettings.endpoint, isEmptyString()); + assertThat(defaultSettings.protocol, is(Protocol.HTTPS)); + assertThat(defaultSettings.proxyHost, isEmptyString()); + assertThat(defaultSettings.proxyPort, is(80)); + assertThat(defaultSettings.proxyUsername, isEmptyString()); + assertThat(defaultSettings.proxyPassword, isEmptyString()); + assertThat(defaultSettings.readTimeoutMillis, is(ClientConfiguration.DEFAULT_SOCKET_TIMEOUT)); + assertThat(defaultSettings.maxRetries, is(ClientConfiguration.DEFAULT_RETRY_POLICY.getMaxErrorRetry())); + assertThat(defaultSettings.throttleRetries, is(ClientConfiguration.DEFAULT_THROTTLE_RETRIES)); + } + + public void testDefaultClientSettingsCanBeSet() { + final Map settings = S3ClientSettings.load(Settings.builder() + .put("s3.client.default.max_retries", 10).build()); + assertThat(settings.keySet(), contains("default")); + + final S3ClientSettings defaultSettings = settings.get("default"); + assertThat(defaultSettings.maxRetries, is(10)); + } + + public void testNondefaultClientCreatedBySettingItsSettings() { + final Map settings = S3ClientSettings.load(Settings.builder() + .put("s3.client.another_client.max_retries", 10).build()); + assertThat(settings.keySet(), contains("default", "another_client")); + + final S3ClientSettings defaultSettings = settings.get("default"); + assertThat(defaultSettings.maxRetries, is(ClientConfiguration.DEFAULT_RETRY_POLICY.getMaxErrorRetry())); + + final S3ClientSettings anotherClientSettings = settings.get("another_client"); + assertThat(anotherClientSettings.maxRetries, is(10)); + } + + public void testRejectionOfLoneAccessKey() { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("s3.client.default.access_key", "aws_key"); + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> S3ClientSettings.load(Settings.builder().setSecureSettings(secureSettings).build())); + assertThat(e.getMessage(), is("Missing secret key for s3 client [default]")); + } + + public void testRejectionOfLoneSecretKey() { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("s3.client.default.secret_key", "aws_key"); + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> S3ClientSettings.load(Settings.builder().setSecureSettings(secureSettings).build())); + assertThat(e.getMessage(), is("Missing access key for s3 client [default]")); + } + + public void testRejectionOfLoneSessionToken() { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("s3.client.default.session_token", "aws_key"); + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> S3ClientSettings.load(Settings.builder().setSecureSettings(secureSettings).build())); + assertThat(e.getMessage(), is("Missing access key and secret key for s3 client [default]")); + } + + public void testCredentialsTypeWithAccessKeyAndSecretKey() { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("s3.client.default.access_key", "access_key"); + secureSettings.setString("s3.client.default.secret_key", "secret_key"); + final Map settings = S3ClientSettings.load(Settings.builder().setSecureSettings(secureSettings).build()); + final S3ClientSettings defaultSettings = settings.get("default"); + BasicAWSCredentials credentials = (BasicAWSCredentials) defaultSettings.credentials; + assertThat(credentials.getAWSAccessKeyId(), is("access_key")); + assertThat(credentials.getAWSSecretKey(), is("secret_key")); + } + + public void testCredentialsTypeWithAccessKeyAndSecretKeyAndSessionToken() { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("s3.client.default.access_key", "access_key"); + secureSettings.setString("s3.client.default.secret_key", "secret_key"); + secureSettings.setString("s3.client.default.session_token", "session_token"); + final Map settings = S3ClientSettings.load(Settings.builder().setSecureSettings(secureSettings).build()); + final S3ClientSettings defaultSettings = settings.get("default"); + BasicSessionCredentials credentials = (BasicSessionCredentials) defaultSettings.credentials; + assertThat(credentials.getAWSAccessKeyId(), is("access_key")); + assertThat(credentials.getAWSSecretKey(), is("secret_key")); + assertThat(credentials.getSessionToken(), is("session_token")); + } +} diff --git a/plugins/repository-s3/qa/amazon-s3/src/test/resources/rest-api-spec/test/repository_s3/10_repository.yml b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml similarity index 72% rename from plugins/repository-s3/qa/amazon-s3/src/test/resources/rest-api-spec/test/repository_s3/10_repository.yml rename to plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml index 56e2b2cb8fa16..39ce992b7a58e 100644 --- a/plugins/repository-s3/qa/amazon-s3/src/test/resources/rest-api-spec/test/repository_s3/10_repository.yml +++ b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml @@ -3,34 +3,35 @@ --- setup: - # Register repository + # Register repository with permanent credentials - do: snapshot.create_repository: - repository: repository + repository: repository_permanent body: type: s3 settings: - bucket: ${bucket} - client: integration_test - base_path: ${base_path} + bucket: ${permanent_bucket} + client: integration_test_permanent + base_path: ${permanent_base_path} canned_acl: private storage_class: standard --- -"Snapshot/Restore with repository-s3": +"Snapshot and Restore with repository-s3 using permanent credentials": # Get repository - do: snapshot.get_repository: - repository: repository + repository: repository_permanent - - match: { repository.settings.bucket : ${bucket} } - - match: { repository.settings.client : "integration_test" } - - match: { repository.settings.base_path : ${base_path} } - - match: { repository.settings.canned_acl : "private" } - - match: { repository.settings.storage_class : "standard" } - - is_false: repository.settings.access_key - - is_false: repository.settings.secret_key + - match: { repository_permanent.settings.bucket : ${permanent_bucket} } + - match: { repository_permanent.settings.client : "integration_test_permanent" } + - match: { repository_permanent.settings.base_path : ${permanent_base_path} } + - match: { repository_permanent.settings.canned_acl : "private" } + - match: { repository_permanent.settings.storage_class : "standard" } + - is_false: repository_permanent.settings.access_key + - is_false: repository_permanent.settings.secret_key + - is_false: repository_permanent.settings.session_token # Index documents - do: @@ -62,7 +63,7 @@ setup: # Create a first snapshot - do: snapshot.create: - repository: repository + repository: repository_permanent snapshot: snapshot-one wait_for_completion: true @@ -73,7 +74,7 @@ setup: - do: snapshot.status: - repository: repository + repository: repository_permanent snapshot: snapshot-one - is_true: snapshots @@ -115,7 +116,7 @@ setup: # Create a second snapshot - do: snapshot.create: - repository: repository + repository: repository_permanent snapshot: snapshot-two wait_for_completion: true @@ -125,7 +126,7 @@ setup: - do: snapshot.get: - repository: repository + repository: repository_permanent snapshot: snapshot-one,snapshot-two - is_true: snapshots @@ -140,7 +141,7 @@ setup: # Restore the second snapshot - do: snapshot.restore: - repository: repository + repository: repository_permanent snapshot: snapshot-two wait_for_completion: true @@ -158,7 +159,7 @@ setup: # Restore the first snapshot - do: snapshot.restore: - repository: repository + repository: repository_permanent snapshot: snapshot-one wait_for_completion: true @@ -171,12 +172,12 @@ setup: # Remove the snapshots - do: snapshot.delete: - repository: repository + repository: repository_permanent snapshot: snapshot-two - do: snapshot.delete: - repository: repository + repository: repository_permanent snapshot: snapshot-one --- @@ -185,12 +186,12 @@ setup: - do: catch: /repository_exception/ snapshot.create_repository: - repository: repository + repository: repository_permanent body: type: s3 settings: bucket: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE - client: integration_test + client: integration_test_permanent --- "Register a repository with a non existing client": @@ -198,11 +199,11 @@ setup: - do: catch: /repository_exception/ snapshot.create_repository: - repository: repository + repository: repository_permanent body: type: s3 settings: - bucket: repository + bucket: repository_permanent client: unknown --- @@ -211,7 +212,7 @@ setup: - do: catch: /snapshot_missing_exception/ snapshot.get: - repository: repository + repository: repository_permanent snapshot: missing --- @@ -220,7 +221,7 @@ setup: - do: catch: /snapshot_missing_exception/ snapshot.delete: - repository: repository + repository: repository_permanent snapshot: missing --- @@ -229,7 +230,7 @@ setup: - do: catch: /snapshot_restore_exception/ snapshot.restore: - repository: repository + repository: repository_permanent snapshot: missing wait_for_completion: true @@ -239,4 +240,4 @@ teardown: # Remove our repository - do: snapshot.delete_repository: - repository: repository + repository: repository_permanent diff --git a/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/30_repository_temporary_credentials.yml b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/30_repository_temporary_credentials.yml new file mode 100644 index 0000000000000..497d85db752db --- /dev/null +++ b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/30_repository_temporary_credentials.yml @@ -0,0 +1,243 @@ +# Integration tests for repository-s3 + +--- +setup: + + # Register repository with temporary credentials + - do: + snapshot.create_repository: + repository: repository_temporary + body: + type: s3 + settings: + bucket: ${temporary_bucket} + client: integration_test_temporary + base_path: ${temporary_base_path} + canned_acl: private + storage_class: standard + +--- +"Snapshot and Restore with repository-s3 using temporary credentials": + + # Get repository + - do: + snapshot.get_repository: + repository: repository_temporary + + - match: { repository_temporary.settings.bucket : ${temporary_bucket} } + - match: { repository_temporary.settings.client : "integration_test_temporary" } + - match: { repository_temporary.settings.base_path : ${temporary_base_path} } + - match: { repository_temporary.settings.canned_acl : "private" } + - match: { repository_temporary.settings.storage_class : "standard" } + - is_false: repository_temporary.settings.access_key + - is_false: repository_temporary.settings.secret_key + - is_false: repository_temporary.settings.session_token + + # Index documents + - do: + bulk: + refresh: true + body: + - index: + _index: docs + _type: doc + _id: 1 + - snapshot: one + - index: + _index: docs + _type: doc + _id: 2 + - snapshot: one + - index: + _index: docs + _type: doc + _id: 3 + - snapshot: one + + - do: + count: + index: docs + + - match: {count: 3} + + # Create a first snapshot + - do: + snapshot.create: + repository: repository_temporary + snapshot: snapshot-one + wait_for_completion: true + + - match: { snapshot.snapshot: snapshot-one } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.include_global_state: true } + - match: { snapshot.shards.failed : 0 } + + - do: + snapshot.status: + repository: repository_temporary + snapshot: snapshot-one + + - is_true: snapshots + - match: { snapshots.0.snapshot: snapshot-one } + - match: { snapshots.0.state : SUCCESS } + + # Index more documents + - do: + bulk: + refresh: true + body: + - index: + _index: docs + _type: doc + _id: 4 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 5 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 6 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 7 + - snapshot: two + + - do: + count: + index: docs + + - match: {count: 7} + + # Create a second snapshot + - do: + snapshot.create: + repository: repository_temporary + snapshot: snapshot-two + wait_for_completion: true + + - match: { snapshot.snapshot: snapshot-two } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.shards.failed : 0 } + + - do: + snapshot.get: + repository: repository_temporary + snapshot: snapshot-one,snapshot-two + + - is_true: snapshots + - match: { snapshots.0.state : SUCCESS } + - match: { snapshots.1.state : SUCCESS } + + # Delete the index + - do: + indices.delete: + index: docs + + # Restore the second snapshot + - do: + snapshot.restore: + repository: repository_temporary + snapshot: snapshot-two + wait_for_completion: true + + - do: + count: + index: docs + + - match: {count: 7} + + # Delete the index again + - do: + indices.delete: + index: docs + + # Restore the first snapshot + - do: + snapshot.restore: + repository: repository_temporary + snapshot: snapshot-one + wait_for_completion: true + + - do: + count: + index: docs + + - match: {count: 3} + + # Remove the snapshots + - do: + snapshot.delete: + repository: repository_temporary + snapshot: snapshot-two + + - do: + snapshot.delete: + repository: repository_temporary + snapshot: snapshot-one + +--- +"Register a repository with a non existing bucket": + + - do: + catch: /repository_exception/ + snapshot.create_repository: + repository: repository_temporary + body: + type: s3 + settings: + bucket: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE + client: integration_test_temporary + +--- +"Register a repository with a non existing client": + + - do: + catch: /repository_exception/ + snapshot.create_repository: + repository: repository_temporary + body: + type: s3 + settings: + bucket: repository_temporary + client: unknown + +--- +"Get a non existing snapshot": + + - do: + catch: /snapshot_missing_exception/ + snapshot.get: + repository: repository_temporary + snapshot: missing + +--- +"Delete a non existing snapshot": + + - do: + catch: /snapshot_missing_exception/ + snapshot.delete: + repository: repository_temporary + snapshot: missing + +--- +"Restore a non existing snapshot": + + - do: + catch: /snapshot_restore_exception/ + snapshot.restore: + repository: repository_temporary + snapshot: missing + wait_for_completion: true + +--- +teardown: + + # Remove our repository + - do: + snapshot.delete_repository: + repository: repository_temporary diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 8186f1b935370..ada2329857e10 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -558,9 +558,8 @@ public void initRestHandlers(Supplier nodesInCluster) { registerHandler.accept(new RestRestoreSnapshotAction(settings, restController)); registerHandler.accept(new RestDeleteSnapshotAction(settings, restController)); registerHandler.accept(new RestSnapshotsStatusAction(settings, restController)); - registerHandler.accept(new RestGetAllAliasesAction(settings, restController)); - registerHandler.accept(new RestGetIndicesAction(settings, restController, indexScopedSettings, settingsFilter)); + registerHandler.accept(new RestGetIndicesAction(settings, restController)); registerHandler.accept(new RestIndicesStatsAction(settings, restController)); registerHandler.accept(new RestIndicesSegmentsAction(settings, restController)); registerHandler.accept(new RestIndicesShardStoresAction(settings, restController)); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsRequest.java new file mode 100644 index 0000000000000..7048b60fc2a6b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsRequest.java @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.settings; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; + +/** + * This request is specific to the REST client. {@link org.elasticsearch.action.admin.cluster.state.ClusterStateRequest} + * is used on the transport layer. + */ +public class ClusterGetSettingsRequest extends MasterNodeReadRequest { + private boolean includeDefaults = false; + + @Override + public ActionRequestValidationException validate() { + return null; + } + + /** + * When include_defaults is set, return default settings which are normally suppressed. + */ + public ClusterGetSettingsRequest includeDefaults(boolean includeDefaults) { + this.includeDefaults = includeDefaults; + return this; + } + + public boolean includeDefaults() { + return includeDefaults; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java new file mode 100644 index 0000000000000..19b0517d96c95 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java @@ -0,0 +1,165 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.settings; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * This response is specific to the REST client. {@link org.elasticsearch.action.admin.cluster.state.ClusterStateResponse} + * is used on the transport layer. + */ +public class ClusterGetSettingsResponse extends ActionResponse implements ToXContentObject { + + private Settings persistentSettings = Settings.EMPTY; + private Settings transientSettings = Settings.EMPTY; + private Settings defaultSettings = Settings.EMPTY; + + static final String PERSISTENT_FIELD = "persistent"; + static final String TRANSIENT_FIELD = "transient"; + static final String DEFAULTS_FIELD = "defaults"; + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "cluster_get_settings_response", + true, + a -> { + Settings defaultSettings = a[2] == null ? Settings.EMPTY : (Settings) a[2]; + return new ClusterGetSettingsResponse((Settings) a[0], (Settings) a[1], defaultSettings); + } + ); + static { + PARSER.declareObject(constructorArg(), (p, c) -> Settings.fromXContent(p), new ParseField(PERSISTENT_FIELD)); + PARSER.declareObject(constructorArg(), (p, c) -> Settings.fromXContent(p), new ParseField(TRANSIENT_FIELD)); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> Settings.fromXContent(p), new ParseField(DEFAULTS_FIELD)); + } + + public ClusterGetSettingsResponse(Settings persistentSettings, Settings transientSettings, Settings defaultSettings) { + if (persistentSettings != null) { + this.persistentSettings = persistentSettings; + } + if (transientSettings != null) { + this.transientSettings = transientSettings; + } + if (defaultSettings != null) { + this.defaultSettings = defaultSettings; + } + } + + /** + * Returns the persistent settings for the cluster + * @return Settings + */ + public Settings getPersistentSettings() { + return persistentSettings; + } + + /** + * Returns the transient settings for the cluster + * @return Settings + */ + public Settings getTransientSettings() { + return transientSettings; + } + + /** + * Returns the default settings for the cluster (only if {@code include_defaults} was set to true in the request) + * @return Settings + */ + public Settings getDefaultSettings() { + return defaultSettings; + } + + /** + * Returns the string value of the setting for the specified index. The order of search is first + * in persistent settings the transient settings and finally the default settings. + * @param setting the name of the setting to get + * @return String + */ + public String getSetting(String setting) { + if (persistentSettings.hasValue(setting)) { + return persistentSettings.get(setting); + } else if (transientSettings.hasValue(setting)) { + return transientSettings.get(setting); + } else if (defaultSettings.hasValue(setting)) { + return defaultSettings.get(setting); + } else { + return null; + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.startObject(PERSISTENT_FIELD); + persistentSettings.toXContent(builder, params); + builder.endObject(); + + builder.startObject(TRANSIENT_FIELD); + transientSettings.toXContent(builder, params); + builder.endObject(); + + if (defaultSettings.isEmpty() == false) { + builder.startObject(DEFAULTS_FIELD); + defaultSettings.toXContent(builder, params); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + public static ClusterGetSettingsResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterGetSettingsResponse that = (ClusterGetSettingsResponse) o; + return Objects.equals(transientSettings, that.transientSettings) && + Objects.equals(persistentSettings, that.persistentSettings) && + Objects.equals(defaultSettings, that.defaultSettings); + } + + @Override + public int hashCode() { + return Objects.hash(transientSettings, persistentSettings, defaultSettings); + } + + @Override + public String toString() { + return Strings.toString(this); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java index d9c018848d7e8..09686025e9da9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java @@ -26,6 +26,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -42,7 +44,7 @@ * A request to analyze a text associated with a specific index. Allow to provide * the actual analyzer name to perform the analysis with. */ -public class AnalyzeRequest extends SingleShardRequest { +public class AnalyzeRequest extends SingleShardRequest implements ToXContentObject { private String[] text; @@ -62,7 +64,7 @@ public class AnalyzeRequest extends SingleShardRequest { private String normalizer; - public static class NameOrDefinition implements Writeable { + public static class NameOrDefinition implements Writeable, ToXContentFragment { // exactly one of these two members is not null public final String name; public final Settings definition; @@ -102,6 +104,15 @@ public void writeTo(StreamOutput out) throws IOException { Settings.writeSettingsToStream(definition, out); } } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (definition == null) { + return builder.value(name); + } + return definition.toXContent(builder, params); + } + } public AnalyzeRequest() { @@ -171,6 +182,7 @@ public AnalyzeRequest addCharFilter(String charFilter) { this.charFilters.add(new NameOrDefinition(charFilter)); return this; } + public List charFilters() { return this.charFilters; } @@ -260,4 +272,36 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(normalizer); } } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("text", text); + if (Strings.isNullOrEmpty(analyzer) == false) { + builder.field("analyzer", analyzer); + } + if (tokenizer != null) { + tokenizer.toXContent(builder, params); + } + if (tokenFilters.size() > 0) { + builder.field("filter", tokenFilters); + } + if (charFilters.size() > 0) { + builder.field("char_filter", charFilters); + } + if (Strings.isNullOrEmpty(field) == false) { + builder.field("field", field); + } + if (explain) { + builder.field("explain", true); + } + if (attributes.length > 0) { + builder.field("attributes", attributes); + } + if (Strings.isNullOrEmpty(normalizer) == false) { + builder.field("normalizer", normalizer); + } + return builder.endObject(); + } + } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java index 1e54def2385f8..d45ab2682a5ec 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java @@ -20,17 +20,27 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.TreeMap; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; public class AnalyzeResponse extends ActionResponse implements Iterable, ToXContentObject { @@ -46,6 +56,25 @@ public static class AnalyzeToken implements Streamable, ToXContentObject { AnalyzeToken() { } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnalyzeToken that = (AnalyzeToken) o; + return startOffset == that.startOffset && + endOffset == that.endOffset && + position == that.position && + positionLength == that.positionLength && + Objects.equals(term, that.term) && + Objects.equals(attributes, that.attributes) && + Objects.equals(type, that.type); + } + + @Override + public int hashCode() { + return Objects.hash(term, startOffset, endOffset, position, positionLength, attributes, type); + } + public AnalyzeToken(String term, int position, int startOffset, int endOffset, int positionLength, String type, Map attributes) { this.term = term; @@ -97,7 +126,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(Fields.POSITION_LENGTH, positionLength); } if (attributes != null && !attributes.isEmpty()) { - for (Map.Entry entity : attributes.entrySet()) { + Map sortedAttributes = new TreeMap<>(attributes); + for (Map.Entry entity : sortedAttributes.entrySet()) { builder.field(entity.getKey(), entity.getValue()); } } @@ -111,6 +141,50 @@ public static AnalyzeToken readAnalyzeToken(StreamInput in) throws IOException { return analyzeToken; } + public static AnalyzeToken fromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); + String field = null; + String term = ""; + int position = -1; + int startOffset = -1; + int endOffset = -1; + int positionLength = 1; + String type = ""; + Map attributes = new HashMap<>(); + for (XContentParser.Token t = parser.nextToken(); t != XContentParser.Token.END_OBJECT; t = parser.nextToken()) { + if (t == XContentParser.Token.FIELD_NAME) { + field = parser.currentName(); + continue; + } + if (Fields.TOKEN.equals(field)) { + term = parser.text(); + } else if (Fields.POSITION.equals(field)) { + position = parser.intValue(); + } else if (Fields.START_OFFSET.equals(field)) { + startOffset = parser.intValue(); + } else if (Fields.END_OFFSET.equals(field)) { + endOffset = parser.intValue(); + } else if (Fields.POSITION_LENGTH.equals(field)) { + positionLength = parser.intValue(); + } else if (Fields.TYPE.equals(field)) { + type = parser.text(); + } else { + if (t == XContentParser.Token.VALUE_STRING) { + attributes.put(field, parser.text()); + } else if (t == XContentParser.Token.VALUE_NUMBER) { + attributes.put(field, parser.numberValue()); + } else if (t == XContentParser.Token.VALUE_BOOLEAN) { + attributes.put(field, parser.booleanValue()); + } else if (t == XContentParser.Token.START_OBJECT) { + attributes.put(field, parser.map()); + } else if (t == XContentParser.Token.START_ARRAY) { + attributes.put(field, parser.list()); + } + } + } + return new AnalyzeToken(term, position, startOffset, endOffset, positionLength, type, attributes); + } + @Override public void readFrom(StreamInput in) throws IOException { term = in.readString(); @@ -125,8 +199,11 @@ public void readFrom(StreamInput in) throws IOException { positionLength = 1; } } + else { + positionLength = 1; + } type = in.readOptionalString(); - attributes = (Map) in.readGenericValue(); + attributes = in.readMap(); } @Override @@ -139,7 +216,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalVInt(positionLength > 1 ? positionLength : null); } out.writeOptionalString(type); - out.writeGenericValue(attributes); + out.writeMapWithConsistentOrder(attributes); } } @@ -188,6 +265,17 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("analyze_response", + true, args -> new AnalyzeResponse((List) args[0], (DetailAnalyzeResponse) args[1])); + static { + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> AnalyzeToken.fromXContent(p), new ParseField(Fields.TOKENS)); + PARSER.declareObject(optionalConstructorArg(), DetailAnalyzeResponse.PARSER, new ParseField(Fields.DETAIL)); + } + + public static AnalyzeResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -196,6 +284,9 @@ public void readFrom(StreamInput in) throws IOException { for (int i = 0; i < size; i++) { tokens.add(AnalyzeToken.readAnalyzeToken(in)); } + if (tokens.size() == 0) { + tokens = null; + } detail = in.readOptionalStreamable(DetailAnalyzeResponse::new); } @@ -213,6 +304,25 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalStreamable(detail); } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnalyzeResponse that = (AnalyzeResponse) o; + return Objects.equals(detail, that.detail) && + Objects.equals(tokens, that.tokens); + } + + @Override + public int hashCode() { + return Objects.hash(detail, tokens); + } + + @Override + public String toString() { + return Strings.toString(this, true, true); + } + static final class Fields { static final String TOKENS = "tokens"; static final String TOKEN = "token"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java index c080a01a98168..1e0c4ed525ef1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java @@ -20,20 +20,27 @@ package org.elasticsearch.action.admin.indices.analyze; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.lang.reflect.Array; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; -public class DetailAnalyzeResponse implements Streamable, ToXContentFragment { +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; - DetailAnalyzeResponse() { - } +public class DetailAnalyzeResponse implements Streamable, ToXContentFragment { private boolean customAnalyzer = false; private AnalyzeTokenList analyzer; @@ -41,6 +48,9 @@ public class DetailAnalyzeResponse implements Streamable, ToXContentFragment { private AnalyzeTokenList tokenizer; private AnalyzeTokenList[] tokenfilters; + DetailAnalyzeResponse() { + } + public DetailAnalyzeResponse(AnalyzeTokenList analyzer) { this(false, analyzer, null, null, null); } @@ -66,6 +76,7 @@ public AnalyzeTokenList analyzer() { } public DetailAnalyzeResponse analyzer(AnalyzeTokenList analyzer) { + this.customAnalyzer = false; this.analyzer = analyzer; return this; } @@ -75,6 +86,7 @@ public CharFilteredText[] charfilters() { } public DetailAnalyzeResponse charfilters(CharFilteredText[] charfilters) { + this.customAnalyzer = true; this.charfilters = charfilters; return this; } @@ -84,6 +96,7 @@ public AnalyzeTokenList tokenizer() { } public DetailAnalyzeResponse tokenizer(AnalyzeTokenList tokenizer) { + this.customAnalyzer = true; this.tokenizer = tokenizer; return this; } @@ -93,10 +106,31 @@ public AnalyzeTokenList[] tokenfilters() { } public DetailAnalyzeResponse tokenfilters(AnalyzeTokenList[] tokenfilters) { + this.customAnalyzer = true; this.tokenfilters = tokenfilters; return this; } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DetailAnalyzeResponse that = (DetailAnalyzeResponse) o; + return customAnalyzer == that.customAnalyzer && + Objects.equals(analyzer, that.analyzer) && + Arrays.equals(charfilters, that.charfilters) && + Objects.equals(tokenizer, that.tokenizer) && + Arrays.equals(tokenfilters, that.tokenfilters); + } + + @Override + public int hashCode() { + int result = Objects.hash(customAnalyzer, analyzer, tokenizer); + result = 31 * result + Arrays.hashCode(charfilters); + result = 31 * result + Arrays.hashCode(tokenfilters); + return result; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(Fields.CUSTOM_ANALYZER, customAnalyzer); @@ -131,6 +165,32 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + @SuppressWarnings("unchecked") + private static T[] fromList(Class clazz, List list) { + if (list == null) { + return null; + } + return list.toArray((T[])Array.newInstance(clazz, 0)); + } + + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("detail", + true, args -> new DetailAnalyzeResponse((boolean) args[0], (AnalyzeTokenList) args[1], + fromList(CharFilteredText.class, (List)args[2]), + (AnalyzeTokenList) args[3], + fromList(AnalyzeTokenList.class, (List)args[4]))); + + static { + PARSER.declareBoolean(constructorArg(), new ParseField(Fields.CUSTOM_ANALYZER)); + PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.ANALYZER)); + PARSER.declareObjectArray(optionalConstructorArg(), CharFilteredText.PARSER, new ParseField(Fields.CHARFILTERS)); + PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.TOKENIZER)); + PARSER.declareObjectArray(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.TOKENFILTERS)); + } + + public static DetailAnalyzeResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + static final class Fields { static final String NAME = "name"; static final String FILTERED_TEXT = "filtered_text"; @@ -195,6 +255,22 @@ public static class AnalyzeTokenList implements Streamable, ToXContentObject { private String name; private AnalyzeResponse.AnalyzeToken[] tokens; + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnalyzeTokenList that = (AnalyzeTokenList) o; + return Objects.equals(name, that.name) && + Arrays.equals(tokens, that.tokens); + } + + @Override + public int hashCode() { + int result = Objects.hash(name); + result = 31 * result + Arrays.hashCode(tokens); + return result; + } + AnalyzeTokenList() { } @@ -235,6 +311,20 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("token_list", + true, args -> new AnalyzeTokenList((String) args[0], + fromList(AnalyzeResponse.AnalyzeToken.class, (List)args[1]))); + + static { + PARSER.declareString(constructorArg(), new ParseField(Fields.NAME)); + PARSER.declareObjectArray(constructorArg(), (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p), + new ParseField(AnalyzeResponse.Fields.TOKENS)); + } + + public static AnalyzeTokenList fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + @Override public void readFrom(StreamInput in) throws IOException { name = in.readString(); @@ -264,6 +354,7 @@ public void writeTo(StreamOutput out) throws IOException { public static class CharFilteredText implements Streamable, ToXContentObject { private String name; private String[] texts; + CharFilteredText() { } @@ -293,6 +384,18 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("char_filtered_text", + true, args -> new CharFilteredText((String) args[0], ((List) args[1]).toArray(new String[0]))); + + static { + PARSER.declareString(constructorArg(), new ParseField(Fields.NAME)); + PARSER.declareStringArray(constructorArg(), new ParseField(Fields.FILTERED_TEXT)); + } + + public static CharFilteredText fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + public static CharFilteredText readCharFilteredText(StreamInput in) throws IOException { CharFilteredText text = new CharFilteredText(); text.readFrom(in); @@ -310,5 +413,21 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(name); out.writeStringArray(texts); } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CharFilteredText that = (CharFilteredText) o; + return Objects.equals(name, that.name) && + Arrays.equals(texts, that.texts); + } + + @Override + public int hashCode() { + int result = Objects.hash(name); + result = 31 * result + Arrays.hashCode(texts); + return result; + } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequest.java index 1f1a7595ed46b..b09343d004d76 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexRequest.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.indices.get; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.info.ClusterInfoRequest; import org.elasticsearch.common.io.stream.StreamInput; @@ -105,8 +106,7 @@ public boolean humanReadable() { /** * Sets the value of "include_defaults". - * Used only by the high-level REST client. - * + * * @param includeDefaults value of "include_defaults" to be set. * @return this request */ @@ -117,8 +117,7 @@ public GetIndexRequest includeDefaults(boolean includeDefaults) { /** * Whether to return all default settings for each of the indices. - * Used only by the high-level REST client. - * + * * @return true if defaults settings for each of the indices need to returned; * false otherwise. */ @@ -135,6 +134,9 @@ public void readFrom(StreamInput in) throws IOException { features[i] = Feature.fromId(in.readByte()); } humanReadable = in.readBoolean(); + if (in.getVersion().onOrAfter(Version.V_6_4_0)) { + includeDefaults = in.readBoolean(); + } } @Override @@ -145,6 +147,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeByte(feature.id); } out.writeBoolean(humanReadable); + if (out.getVersion().onOrAfter(Version.V_6_4_0)) { + out.writeBoolean(includeDefaults); + } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java index 36bfa81a33416..e2b72077b7f21 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexResponse.java @@ -20,33 +20,50 @@ package org.elasticsearch.action.admin.indices.get; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; +import org.apache.lucene.util.CollectionUtil; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; +import java.util.Comparator; import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; /** - * A response for a delete index action. + * A response for a get index action. */ -public class GetIndexResponse extends ActionResponse { +public class GetIndexResponse extends ActionResponse implements ToXContentObject { private ImmutableOpenMap> mappings = ImmutableOpenMap.of(); private ImmutableOpenMap> aliases = ImmutableOpenMap.of(); private ImmutableOpenMap settings = ImmutableOpenMap.of(); + private ImmutableOpenMap defaultSettings = ImmutableOpenMap.of(); private String[] indices; GetIndexResponse(String[] indices, - ImmutableOpenMap> mappings, - ImmutableOpenMap> aliases, ImmutableOpenMap settings) { + ImmutableOpenMap> mappings, + ImmutableOpenMap> aliases, + ImmutableOpenMap settings, + ImmutableOpenMap defaultSettings) { this.indices = indices; + // to have deterministic order + Arrays.sort(indices); if (mappings != null) { this.mappings = mappings; } @@ -56,6 +73,9 @@ public class GetIndexResponse extends ActionResponse { if (settings != null) { this.settings = settings; } + if (defaultSettings != null) { + this.defaultSettings = defaultSettings; + } } GetIndexResponse() { @@ -89,14 +109,51 @@ public ImmutableOpenMap settings() { return settings; } + /** + * If the originating {@link GetIndexRequest} object was configured to include + * defaults, this will contain a mapping of index name to {@link Settings} objects. + * The returned {@link Settings} objects will contain only those settings taking + * effect as defaults. Any settings explicitly set on the index will be available + * via {@link #settings()}. + * See also {@link GetIndexRequest#includeDefaults(boolean)} + */ + public ImmutableOpenMap defaultSettings() { + return defaultSettings; + } + public ImmutableOpenMap getSettings() { return settings(); } + /** + * Returns the string value for the specified index and setting. If the includeDefaults flag was not set or set to + * false on the {@link GetIndexRequest}, this method will only return a value where the setting was explicitly set + * on the index. If the includeDefaults flag was set to true on the {@link GetIndexRequest}, this method will fall + * back to return the default value if the setting was not explicitly set. + */ + public String getSetting(String index, String setting) { + Settings indexSettings = settings.get(index); + if (setting != null) { + if (indexSettings != null && indexSettings.hasValue(setting)) { + return indexSettings.get(setting); + } else { + Settings defaultIndexSettings = defaultSettings.get(index); + if (defaultIndexSettings != null) { + return defaultIndexSettings.get(setting); + } else { + return null; + } + } + } else { + return null; + } + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); this.indices = in.readStringArray(); + int mappingsSize = in.readVInt(); ImmutableOpenMap.Builder> mappingsMapBuilder = ImmutableOpenMap.builder(); for (int i = 0; i < mappingsSize; i++) { @@ -109,6 +166,7 @@ public void readFrom(StreamInput in) throws IOException { mappingsMapBuilder.put(key, mappingEntryBuilder.build()); } mappings = mappingsMapBuilder.build(); + int aliasesSize = in.readVInt(); ImmutableOpenMap.Builder> aliasesMapBuilder = ImmutableOpenMap.builder(); for (int i = 0; i < aliasesSize; i++) { @@ -121,6 +179,7 @@ public void readFrom(StreamInput in) throws IOException { aliasesMapBuilder.put(key, Collections.unmodifiableList(aliasEntryBuilder)); } aliases = aliasesMapBuilder.build(); + int settingsSize = in.readVInt(); ImmutableOpenMap.Builder settingsMapBuilder = ImmutableOpenMap.builder(); for (int i = 0; i < settingsSize; i++) { @@ -128,6 +187,15 @@ public void readFrom(StreamInput in) throws IOException { settingsMapBuilder.put(key, Settings.readSettingsFromStream(in)); } settings = settingsMapBuilder.build(); + + ImmutableOpenMap.Builder defaultSettingsMapBuilder = ImmutableOpenMap.builder(); + if (in.getVersion().onOrAfter(Version.V_6_4_0)) { + int defaultSettingsSize = in.readVInt(); + for (int i = 0; i < defaultSettingsSize ; i++) { + defaultSettingsMapBuilder.put(in.readString(), Settings.readSettingsFromStream(in)); + } + } + defaultSettings = defaultSettingsMapBuilder.build(); } @Override @@ -156,5 +224,202 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(indexEntry.key); Settings.writeSettingsToStream(indexEntry.value, out); } + if (out.getVersion().onOrAfter(Version.V_6_4_0)) { + out.writeVInt(defaultSettings.size()); + for (ObjectObjectCursor indexEntry : defaultSettings) { + out.writeString(indexEntry.key); + Settings.writeSettingsToStream(indexEntry.value, out); + } + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + for (final String index : indices) { + builder.startObject(index); + { + builder.startObject("aliases"); + List indexAliases = aliases.get(index); + if (indexAliases != null) { + for (final AliasMetaData alias : indexAliases) { + AliasMetaData.Builder.toXContent(alias, builder, params); + } + } + builder.endObject(); + + builder.startObject("mappings"); + ImmutableOpenMap indexMappings = mappings.get(index); + if (indexMappings != null) { + for (final ObjectObjectCursor typeEntry : indexMappings) { + builder.field(typeEntry.key); + builder.map(typeEntry.value.sourceAsMap()); + } + } + builder.endObject(); + + builder.startObject("settings"); + Settings indexSettings = settings.get(index); + if (indexSettings != null) { + indexSettings.toXContent(builder, params); + } + builder.endObject(); + + Settings defaultIndexSettings = defaultSettings.get(index); + if (defaultIndexSettings != null && defaultIndexSettings.isEmpty() == false) { + builder.startObject("defaults"); + defaultIndexSettings.toXContent(builder, params); + builder.endObject(); + } + } + builder.endObject(); + } + } + builder.endObject(); + return builder; + } + + private static List parseAliases(XContentParser parser) throws IOException { + List indexAliases = new ArrayList<>(); + // We start at START_OBJECT since parseIndexEntry ensures that + while (parser.nextToken() != Token.END_OBJECT) { + ensureExpectedToken(Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation); + indexAliases.add(AliasMetaData.Builder.fromXContent(parser)); + } + return indexAliases; + } + + private static ImmutableOpenMap parseMappings(XContentParser parser) throws IOException { + ImmutableOpenMap.Builder indexMappings = ImmutableOpenMap.builder(); + // We start at START_OBJECT since parseIndexEntry ensures that + while (parser.nextToken() != Token.END_OBJECT) { + ensureExpectedToken(Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation); + parser.nextToken(); + if (parser.currentToken() == Token.START_OBJECT) { + String mappingType = parser.currentName(); + indexMappings.put(mappingType, new MappingMetaData(mappingType, parser.map())); + } else if (parser.currentToken() == Token.START_ARRAY) { + parser.skipChildren(); + } + } + return indexMappings.build(); + } + + private static IndexEntry parseIndexEntry(XContentParser parser) throws IOException { + List indexAliases = null; + ImmutableOpenMap indexMappings = null; + Settings indexSettings = null; + Settings indexDefaultSettings = null; + // We start at START_OBJECT since fromXContent ensures that + while (parser.nextToken() != Token.END_OBJECT) { + ensureExpectedToken(Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation); + parser.nextToken(); + if (parser.currentToken() == Token.START_OBJECT) { + switch (parser.currentName()) { + case "aliases": + indexAliases = parseAliases(parser); + break; + case "mappings": + indexMappings = parseMappings(parser); + break; + case "settings": + indexSettings = Settings.fromXContent(parser); + break; + case "defaults": + indexDefaultSettings = Settings.fromXContent(parser); + break; + default: + parser.skipChildren(); + } + } else if (parser.currentToken() == Token.START_ARRAY) { + parser.skipChildren(); + } + } + return new IndexEntry(indexAliases, indexMappings, indexSettings, indexDefaultSettings); + } + + // This is just an internal container to make stuff easier for returning + private static class IndexEntry { + List indexAliases = new ArrayList<>(); + ImmutableOpenMap indexMappings = ImmutableOpenMap.of(); + Settings indexSettings = Settings.EMPTY; + Settings indexDefaultSettings = Settings.EMPTY; + IndexEntry(List indexAliases, ImmutableOpenMap indexMappings, + Settings indexSettings, Settings indexDefaultSettings) { + if (indexAliases != null) this.indexAliases = indexAliases; + if (indexMappings != null) this.indexMappings = indexMappings; + if (indexSettings != null) this.indexSettings = indexSettings; + if (indexDefaultSettings != null) this.indexDefaultSettings = indexDefaultSettings; + } + } + + public static GetIndexResponse fromXContent(XContentParser parser) throws IOException { + ImmutableOpenMap.Builder> aliases = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder> mappings = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder settings = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder defaultSettings = ImmutableOpenMap.builder(); + List indices = new ArrayList<>(); + + if (parser.currentToken() == null) { + parser.nextToken(); + } + ensureExpectedToken(Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); + parser.nextToken(); + + while (!parser.isClosed()) { + if (parser.currentToken() == Token.START_OBJECT) { + // we assume this is an index entry + String indexName = parser.currentName(); + indices.add(indexName); + IndexEntry indexEntry = parseIndexEntry(parser); + // make the order deterministic + CollectionUtil.timSort(indexEntry.indexAliases, Comparator.comparing(AliasMetaData::alias)); + aliases.put(indexName, Collections.unmodifiableList(indexEntry.indexAliases)); + mappings.put(indexName, indexEntry.indexMappings); + settings.put(indexName, indexEntry.indexSettings); + if (indexEntry.indexDefaultSettings.isEmpty() == false) { + defaultSettings.put(indexName, indexEntry.indexDefaultSettings); + } + } else if (parser.currentToken() == Token.START_ARRAY) { + parser.skipChildren(); + } else { + parser.nextToken(); + } + } + return + new GetIndexResponse( + indices.toArray(new String[0]), mappings.build(), aliases.build(), + settings.build(), defaultSettings.build() + ); + } + + @Override + public String toString() { + return Strings.toString(this); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o== null || getClass() != o.getClass()) return false; + GetIndexResponse that = (GetIndexResponse) o; + return Arrays.equals(indices, that.indices) && + Objects.equals(aliases, that.aliases) && + Objects.equals(mappings, that.mappings) && + Objects.equals(settings, that.settings) && + Objects.equals(defaultSettings, that.defaultSettings); + } + + @Override + public int hashCode() { + return + Objects.hash( + Arrays.hashCode(indices), + aliases, + mappings, + settings, + defaultSettings + ); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java index abcd47643b1bb..bf08c2d51015c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java @@ -36,9 +36,11 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.common.settings.IndexScopedSettings; import java.io.IOException; import java.util.List; @@ -49,14 +51,19 @@ public class TransportGetIndexAction extends TransportClusterInfoAction { private final IndicesService indicesService; + private final IndexScopedSettings indexScopedSettings; + private final SettingsFilter settingsFilter; @Inject public TransportGetIndexAction(Settings settings, TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, IndicesService indicesService) { + ThreadPool threadPool, SettingsFilter settingsFilter, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, IndicesService indicesService, + IndexScopedSettings indexScopedSettings) { super(settings, GetIndexAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, GetIndexRequest::new); this.indicesService = indicesService; + this.settingsFilter = settingsFilter; + this.indexScopedSettings = indexScopedSettings; } @Override @@ -82,6 +89,7 @@ protected void doMasterOperation(final GetIndexRequest request, String[] concret ImmutableOpenMap> mappingsResult = ImmutableOpenMap.of(); ImmutableOpenMap> aliasesResult = ImmutableOpenMap.of(); ImmutableOpenMap settings = ImmutableOpenMap.of(); + ImmutableOpenMap defaultSettings = ImmutableOpenMap.of(); Feature[] features = request.features(); boolean doneAliases = false; boolean doneMappings = false; @@ -109,14 +117,21 @@ protected void doMasterOperation(final GetIndexRequest request, String[] concret case SETTINGS: if (!doneSettings) { ImmutableOpenMap.Builder settingsMapBuilder = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder defaultSettingsMapBuilder = ImmutableOpenMap.builder(); for (String index : concreteIndices) { Settings indexSettings = state.metaData().index(index).getSettings(); if (request.humanReadable()) { indexSettings = IndexMetaData.addHumanReadableSettings(indexSettings); } settingsMapBuilder.put(index, indexSettings); + if (request.includeDefaults()) { + Settings defaultIndexSettings = + settingsFilter.filter(indexScopedSettings.diff(indexSettings, Settings.EMPTY)); + defaultSettingsMapBuilder.put(index, defaultIndexSettings); + } } settings = settingsMapBuilder.build(); + defaultSettings = defaultSettingsMapBuilder.build(); doneSettings = true; } break; @@ -125,6 +140,8 @@ protected void doMasterOperation(final GetIndexRequest request, String[] concret throw new IllegalStateException("feature [" + feature + "] is not valid"); } } - listener.onResponse(new GetIndexResponse(concreteIndices, mappingsResult, aliasesResult, settings)); + listener.onResponse( + new GetIndexResponse(concreteIndices, mappingsResult, aliasesResult, settings, defaultSettings) + ); } } diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java b/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java index db185f1e8c11c..2ecce44b55c1e 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/BlobContainer.java @@ -69,16 +69,18 @@ public interface BlobContainer { * @param blobSize * The size of the blob to be written, in bytes. It is implementation dependent whether * this value is used in writing the blob to the repository. - * @throws FileAlreadyExistsException if a blob by the same name already exists + * @param failIfAlreadyExists + * whether to throw a FileAlreadyExistsException if the given blob already exists + * @throws FileAlreadyExistsException if failIfAlreadyExists is true and a blob by the same name already exists * @throws IOException if the input stream could not be read, or the target blob could not be written to. */ - void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException; + void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException; /** * Reads blob content from the input stream and writes it to the container in a new blob with the given name, * using an atomic write operation if the implementation supports it. When the BlobContainer implementation * does not provide a specific implementation of writeBlobAtomic(String, InputStream, long), then - * the {@link #writeBlob(String, InputStream, long)} method is used. + * the {@link #writeBlob(String, InputStream, long, boolean)} method is used. * * This method assumes the container does not already contain a blob of the same blobName. If a blob by the * same name already exists, the operation will fail and an {@link IOException} will be thrown. @@ -90,11 +92,14 @@ public interface BlobContainer { * @param blobSize * The size of the blob to be written, in bytes. It is implementation dependent whether * this value is used in writing the blob to the repository. - * @throws FileAlreadyExistsException if a blob by the same name already exists + * @param failIfAlreadyExists + * whether to throw a FileAlreadyExistsException if the given blob already exists + * @throws FileAlreadyExistsException if failIfAlreadyExists is true and a blob by the same name already exists * @throws IOException if the input stream could not be read, or the target blob could not be written to. */ - default void writeBlobAtomic(final String blobName, final InputStream inputStream, final long blobSize) throws IOException { - writeBlob(blobName, inputStream, blobSize); + default void writeBlobAtomic(final String blobName, final InputStream inputStream, final long blobSize, boolean failIfAlreadyExists) + throws IOException { + writeBlob(blobName, inputStream, blobSize, failIfAlreadyExists); } /** diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java index a58802ecd1828..bab984bd85c74 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/fs/FsBlobContainer.java @@ -124,7 +124,10 @@ public InputStream readBlob(String name) throws IOException { } @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { + public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + if (failIfAlreadyExists == false) { + deleteBlobIgnoringIfNotExists(blobName); + } final Path file = path.resolve(blobName); try (OutputStream outputStream = Files.newOutputStream(file, StandardOpenOption.CREATE_NEW)) { Streams.copy(inputStream, outputStream); @@ -134,7 +137,8 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize) t } @Override - public void writeBlobAtomic(final String blobName, final InputStream inputStream, final long blobSize) throws IOException { + public void writeBlobAtomic(final String blobName, final InputStream inputStream, final long blobSize, boolean failIfAlreadyExists) + throws IOException { final String tempBlob = tempBlobName(blobName); final Path tempBlobPath = path.resolve(tempBlob); try { @@ -142,7 +146,7 @@ public void writeBlobAtomic(final String blobName, final InputStream inputStream Streams.copy(inputStream, outputStream); } IOUtils.fsync(tempBlobPath, false); - moveBlobAtomic(tempBlob, blobName); + moveBlobAtomic(tempBlob, blobName, failIfAlreadyExists); } catch (IOException ex) { try { deleteBlobIgnoringIfNotExists(tempBlob); @@ -155,13 +159,18 @@ public void writeBlobAtomic(final String blobName, final InputStream inputStream } } - public void moveBlobAtomic(final String sourceBlobName, final String targetBlobName) throws IOException { + public void moveBlobAtomic(final String sourceBlobName, final String targetBlobName, final boolean failIfAlreadyExists) + throws IOException { final Path sourceBlobPath = path.resolve(sourceBlobName); final Path targetBlobPath = path.resolve(targetBlobName); // If the target file exists then Files.move() behaviour is implementation specific // the existing file might be replaced or this method fails by throwing an IOException. if (Files.exists(targetBlobPath)) { - throw new FileAlreadyExistsException("blob [" + targetBlobPath + "] already exists, cannot overwrite"); + if (failIfAlreadyExists) { + throw new FileAlreadyExistsException("blob [" + targetBlobPath + "] already exists, cannot overwrite"); + } else { + deleteBlobIgnoringIfNotExists(targetBlobName); + } } Files.move(sourceBlobPath, targetBlobPath, StandardCopyOption.ATOMIC_MOVE); } diff --git a/server/src/main/java/org/elasticsearch/common/settings/SettingsException.java b/server/src/main/java/org/elasticsearch/common/settings/SettingsException.java index f7d4843c1c03e..ad5f56d7fc0f3 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/SettingsException.java +++ b/server/src/main/java/org/elasticsearch/common/settings/SettingsException.java @@ -42,4 +42,8 @@ public SettingsException(String message, Throwable cause) { public SettingsException(StreamInput in) throws IOException { super(in); } + + public SettingsException(String msg, Object... args) { + super(msg, args); + } } diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java index 0e4eae80818ef..a9e870f1af9bc 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java @@ -394,7 +394,8 @@ public static GeoBoundingBoxQueryBuilder fromXContent(XContentParser parser) thr GeoValidationMethod validationMethod = null; boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED; - Rectangle bbox = null; + // bottom (minLat), top (maxLat), left (minLon), right (maxLon) + double[] bbox = null; String type = "memory"; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -429,8 +430,8 @@ public static GeoBoundingBoxQueryBuilder fromXContent(XContentParser parser) thr throw new ElasticsearchParseException("failed to parse [{}] query. bounding box not provided", NAME); } - final GeoPoint topLeft = new GeoPoint(bbox.maxLat, bbox.minLon); //just keep the object - final GeoPoint bottomRight = new GeoPoint(bbox.minLat, bbox.maxLon); + final GeoPoint topLeft = new GeoPoint(bbox[1], bbox[2]); + final GeoPoint bottomRight = new GeoPoint(bbox[0], bbox[3]); GeoBoundingBoxQueryBuilder builder = new GeoBoundingBoxQueryBuilder(fieldName); builder.setCorners(topLeft, bottomRight); @@ -465,7 +466,10 @@ public String getWriteableName() { return NAME; } - public static Rectangle parseBoundingBox(XContentParser parser) throws IOException, ElasticsearchParseException { + /** + * Parses the bounding box and returns bottom, top, left, right coordinates + */ + public static double[] parseBoundingBox(XContentParser parser) throws IOException, ElasticsearchParseException { XContentParser.Token token = parser.currentToken(); if (token != XContentParser.Token.START_OBJECT) { throw new ElasticsearchParseException("failed to parse bounding box. Expected start object but found [{}]", token); @@ -527,8 +531,8 @@ public static Rectangle parseBoundingBox(XContentParser parser) throws IOExcepti + "using well-known text and explicit corners."); } org.locationtech.spatial4j.shape.Rectangle r = envelope.build(); - return new Rectangle(r.getMinY(), r.getMaxY(), r.getMinX(), r.getMaxX()); + return new double[]{r.getMinY(), r.getMaxY(), r.getMinX(), r.getMaxX()}; } - return new Rectangle(bottom, top, left, right); + return new double[]{bottom, top, left, right}; } } diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index caa99bd9572d0..4fe9747285141 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -556,7 +556,7 @@ public String startVerification() { String blobName = "master.dat"; BytesArray bytes = new BytesArray(testBytes); try (InputStream stream = bytes.streamInput()) { - testContainer.writeBlobAtomic(blobName, stream, bytes.length()); + testContainer.writeBlobAtomic(blobName, stream, bytes.length(), true); } return seed; } @@ -664,7 +664,7 @@ protected void writeIndexGen(final RepositoryData repositoryData, final long rep // write the index file final String indexBlob = INDEX_FILE_PREFIX + Long.toString(newGen); logger.debug("Repository [{}] writing new index generational blob [{}]", metadata.name(), indexBlob); - writeAtomic(indexBlob, snapshotsBytes); + writeAtomic(indexBlob, snapshotsBytes, true); // delete the N-2 index file if it exists, keep the previous one around as a backup if (isReadOnly() == false && newGen - 2 >= 0) { final String oldSnapshotIndexFile = INDEX_FILE_PREFIX + Long.toString(newGen - 2); @@ -677,9 +677,8 @@ protected void writeIndexGen(final RepositoryData repositoryData, final long rep bStream.writeLong(newGen); genBytes = bStream.bytes(); } - snapshotsBlobContainer.deleteBlobIgnoringIfNotExists(INDEX_LATEST_BLOB); logger.debug("Repository [{}] updating index.latest with generation [{}]", metadata.name(), newGen); - writeAtomic(INDEX_LATEST_BLOB, genBytes); + writeAtomic(INDEX_LATEST_BLOB, genBytes, false); } /** @@ -698,9 +697,8 @@ void writeIncompatibleSnapshots(RepositoryData repositoryData) throws IOExceptio } bytes = bStream.bytes(); } - snapshotsBlobContainer.deleteBlobIgnoringIfNotExists(INCOMPATIBLE_SNAPSHOTS_BLOB); // write the incompatible snapshots blob - writeAtomic(INCOMPATIBLE_SNAPSHOTS_BLOB, bytes); + writeAtomic(INCOMPATIBLE_SNAPSHOTS_BLOB, bytes, false); } /** @@ -766,9 +764,9 @@ private long listBlobsToGetLatestIndexId() throws IOException { return latest; } - private void writeAtomic(final String blobName, final BytesReference bytesRef) throws IOException { + private void writeAtomic(final String blobName, final BytesReference bytesRef, boolean failIfAlreadyExists) throws IOException { try (InputStream stream = bytesRef.streamInput()) { - snapshotsBlobContainer.writeBlobAtomic(blobName, stream, bytesRef.length()); + snapshotsBlobContainer.writeBlobAtomic(blobName, stream, bytesRef.length(), failIfAlreadyExists); } } @@ -813,7 +811,7 @@ public void verify(String seed, DiscoveryNode localNode) { try { BytesArray bytes = new BytesArray(seed); try (InputStream stream = bytes.streamInput()) { - testBlobContainer.writeBlob("data-" + localNode.getId() + ".dat", stream, bytes.length()); + testBlobContainer.writeBlob("data-" + localNode.getId() + ".dat", stream, bytes.length(), true); } } catch (IOException exp) { throw new RepositoryVerificationException(metadata.name(), "store location [" + blobStore() + "] is not accessible on the node [" + localNode + "]", exp); @@ -1254,7 +1252,7 @@ private void snapshotFile(final BlobStoreIndexShardSnapshot.FileInfo fileInfo) t snapshotRateLimitingTimeInNanos::inc); } inputStream = new AbortableInputStream(inputStream, fileInfo.physicalName()); - blobContainer.writeBlob(fileInfo.partName(i), inputStream, partBytes); + blobContainer.writeBlob(fileInfo.partName(i), inputStream, partBytes, true); } Store.verify(indexInput); snapshotStatus.addProcessedFile(fileInfo.length()); diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java index b974be2b869ab..ca6ec74dc2ce2 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java @@ -132,7 +132,7 @@ public void writeAtomic(T obj, BlobContainer blobContainer, String name) throws final String blobName = blobName(name); writeTo(obj, blobName, bytesArray -> { try (InputStream stream = bytesArray.streamInput()) { - blobContainer.writeBlobAtomic(blobName, stream, bytesArray.length()); + blobContainer.writeBlobAtomic(blobName, stream, bytesArray.length(), true); } }); } @@ -150,7 +150,7 @@ public void write(T obj, BlobContainer blobContainer, String name) throws IOExce final String blobName = blobName(name); writeTo(obj, blobName, bytesArray -> { try (InputStream stream = bytesArray.streamInput()) { - blobContainer.writeBlob(blobName, stream, bytesArray.length()); + blobContainer.writeBlob(blobName, stream, bytesArray.length(), true); } }); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java index f9716d8d1bade..b452b62eb5e95 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.rest.action.admin.cluster; +import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.client.Requests; @@ -65,6 +66,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC .nodes(false); final boolean renderDefaults = request.paramAsBoolean("include_defaults", false); clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local())); + clusterStateRequest.masterNodeTimeout(request.paramAsTime("master_timeout", clusterStateRequest.masterNodeTimeout())); return channel -> client.admin().cluster().state(clusterStateRequest, new RestBuilderListener(channel) { @Override public RestResponse buildResponse(ClusterStateResponse response, XContentBuilder builder) throws Exception { @@ -85,23 +87,13 @@ public boolean canTripCircuitBreaker() { private XContentBuilder renderResponse(ClusterState state, boolean renderDefaults, XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - - builder.startObject("persistent"); - state.metaData().persistentSettings().toXContent(builder, params); - builder.endObject(); - - builder.startObject("transient"); - state.metaData().transientSettings().toXContent(builder, params); - builder.endObject(); - - if (renderDefaults) { - builder.startObject("defaults"); - settingsFilter.filter(clusterSettings.diff(state.metaData().settings(), this.settings)).toXContent(builder, params); - builder.endObject(); - } - - builder.endObject(); - return builder; + return + new ClusterGetSettingsResponse( + state.metaData().persistentSettings(), + state.metaData().transientSettings(), + renderDefaults ? + settingsFilter.filter(clusterSettings.diff(state.metaData().settings(), this.settings)) : + Settings.EMPTY + ).toXContent(builder, params); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java index e9552d4752685..04fae0f30f6bf 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java @@ -19,55 +19,35 @@ package org.elasticsearch.rest.action.admin.indices; -import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; -import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature; -import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.cluster.metadata.AliasMetaData; -import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.common.xcontent.ToXContent.Params; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; -import java.util.List; import java.util.Set; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.HEAD; -import static org.elasticsearch.rest.RestStatus.OK; /** * The REST handler for get index and head index APIs. */ public class RestGetIndicesAction extends BaseRestHandler { - private final IndexScopedSettings indexScopedSettings; - private final SettingsFilter settingsFilter; public RestGetIndicesAction( final Settings settings, - final RestController controller, - final IndexScopedSettings indexScopedSettings, - final SettingsFilter settingsFilter) { + final RestController controller) { super(settings); - this.indexScopedSettings = indexScopedSettings; controller.registerHandler(GET, "/{index}", this); controller.registerHandler(HEAD, "/{index}", this); - this.settingsFilter = settingsFilter; } @Override @@ -82,93 +62,10 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC getIndexRequest.indices(indices); getIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, getIndexRequest.indicesOptions())); getIndexRequest.local(request.paramAsBoolean("local", getIndexRequest.local())); + getIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getIndexRequest.masterNodeTimeout())); getIndexRequest.humanReadable(request.paramAsBoolean("human", false)); - final boolean defaults = request.paramAsBoolean("include_defaults", false); - return channel -> client.admin().indices().getIndex(getIndexRequest, new RestBuilderListener(channel) { - - @Override - public RestResponse buildResponse(final GetIndexResponse response, final XContentBuilder builder) throws Exception { - builder.startObject(); - { - for (final String index : response.indices()) { - builder.startObject(index); - { - for (final Feature feature : getIndexRequest.features()) { - switch (feature) { - case ALIASES: - writeAliases(response.aliases().get(index), builder, request); - break; - case MAPPINGS: - writeMappings(response.mappings().get(index), builder); - break; - case SETTINGS: - writeSettings(response.settings().get(index), builder, request, defaults); - break; - default: - throw new IllegalStateException("feature [" + feature + "] is not valid"); - } - } - } - builder.endObject(); - - } - } - builder.endObject(); - - return new BytesRestResponse(OK, builder); - } - - private void writeAliases( - final List aliases, - final XContentBuilder builder, - final Params params) throws IOException { - builder.startObject("aliases"); - { - if (aliases != null) { - for (final AliasMetaData alias : aliases) { - AliasMetaData.Builder.toXContent(alias, builder, params); - } - } - } - builder.endObject(); - } - - private void writeMappings(final ImmutableOpenMap mappings, final XContentBuilder builder) - throws IOException { - builder.startObject("mappings"); - { - if (mappings != null) { - for (final ObjectObjectCursor typeEntry : mappings) { - builder.field(typeEntry.key); - builder.map(typeEntry.value.sourceAsMap()); - } - } - } - builder.endObject(); - } - - private void writeSettings( - final Settings settings, - final XContentBuilder builder, - final Params params, - final boolean defaults) throws IOException { - builder.startObject("settings"); - { - settings.toXContent(builder, params); - } - builder.endObject(); - if (defaults) { - builder.startObject("defaults"); - { - settingsFilter - .filter(indexScopedSettings.diff(settings, RestGetIndicesAction.this.settings)) - .toXContent(builder, request); - } - builder.endObject(); - } - } - - }); + getIndexRequest.includeDefaults(request.paramAsBoolean("include_defaults", false)); + return channel -> client.admin().indices().getIndex(getIndexRequest, new RestToXContentListener<>(channel)); } @Override diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index 7074d3ad9fe44..f0e075eac7d93 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -53,7 +53,11 @@ public class ScriptModule { SimilarityScript.CONTEXT, SimilarityWeightScript.CONTEXT, TemplateScript.CONTEXT, - MovingFunctionScript.CONTEXT + MovingFunctionScript.CONTEXT, + ScriptedMetricAggContexts.InitScript.CONTEXT, + ScriptedMetricAggContexts.MapScript.CONTEXT, + ScriptedMetricAggContexts.CombineScript.CONTEXT, + ScriptedMetricAggContexts.ReduceScript.CONTEXT ).collect(Collectors.toMap(c -> c.name, Function.identity())); } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java new file mode 100644 index 0000000000000..774dc95d39977 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java @@ -0,0 +1,161 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Scorer; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.index.fielddata.ScriptDocValues; +import org.elasticsearch.search.lookup.LeafSearchLookup; +import org.elasticsearch.search.lookup.SearchLookup; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +public class ScriptedMetricAggContexts { + private abstract static class ParamsAndStateBase { + private final Map params; + private final Object state; + + ParamsAndStateBase(Map params, Object state) { + this.params = params; + this.state = state; + } + + public Map getParams() { + return params; + } + + public Object getState() { + return state; + } + } + + public abstract static class InitScript extends ParamsAndStateBase { + public InitScript(Map params, Object state) { + super(params, state); + } + + public abstract void execute(); + + public interface Factory { + InitScript newInstance(Map params, Object state); + } + + public static String[] PARAMETERS = {}; + public static ScriptContext CONTEXT = new ScriptContext<>("aggs_init", Factory.class); + } + + public abstract static class MapScript extends ParamsAndStateBase { + private final LeafSearchLookup leafLookup; + private Scorer scorer; + + public MapScript(Map params, Object state, SearchLookup lookup, LeafReaderContext leafContext) { + super(params, state); + + this.leafLookup = leafContext == null ? null : lookup.getLeafSearchLookup(leafContext); + } + + // Return the doc as a map (instead of LeafDocLookup) in order to abide by type whitelisting rules for + // Painless scripts. + public Map> getDoc() { + return leafLookup == null ? null : leafLookup.doc(); + } + + public void setDocument(int docId) { + if (leafLookup != null) { + leafLookup.setDocument(docId); + } + } + + public void setScorer(Scorer scorer) { + this.scorer = scorer; + } + + // get_score() is named this way so that it's picked up by Painless as '_score' + public double get_score() { + if (scorer == null) { + return 0.0; + } + + try { + return scorer.score(); + } catch (IOException e) { + throw new ElasticsearchException("Couldn't look up score", e); + } + } + + public abstract void execute(); + + public interface LeafFactory { + MapScript newInstance(LeafReaderContext ctx); + } + + public interface Factory { + LeafFactory newFactory(Map params, Object state, SearchLookup lookup); + } + + public static String[] PARAMETERS = new String[] {}; + public static ScriptContext CONTEXT = new ScriptContext<>("aggs_map", Factory.class); + } + + public abstract static class CombineScript extends ParamsAndStateBase { + public CombineScript(Map params, Object state) { + super(params, state); + } + + public abstract Object execute(); + + public interface Factory { + CombineScript newInstance(Map params, Object state); + } + + public static String[] PARAMETERS = {}; + public static ScriptContext CONTEXT = new ScriptContext<>("aggs_combine", Factory.class); + } + + public abstract static class ReduceScript { + private final Map params; + private final List states; + + public ReduceScript(Map params, List states) { + this.params = params; + this.states = states; + } + + public Map getParams() { + return params; + } + + public List getStates() { + return states; + } + + public abstract Object execute(); + + public interface Factory { + ReduceScript newInstance(Map params, List states); + } + + public static String[] PARAMETERS = {}; + public static ScriptContext CONTEXT = new ScriptContext<>("aggs_reduce", Factory.class); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java index 6f9a6fe5d9774..b671f95c446cb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java @@ -21,8 +21,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -89,15 +90,22 @@ public InternalAggregation doReduce(List aggregations, Redu InternalScriptedMetric firstAggregation = ((InternalScriptedMetric) aggregations.get(0)); List aggregation; if (firstAggregation.reduceScript != null && reduceContext.isFinalReduce()) { - Map vars = new HashMap<>(); - vars.put("_aggs", aggregationObjects); + Map params = new HashMap<>(); if (firstAggregation.reduceScript.getParams() != null) { - vars.putAll(firstAggregation.reduceScript.getParams()); + params.putAll(firstAggregation.reduceScript.getParams()); } - ExecutableScript.Factory factory = reduceContext.scriptService().compile( - firstAggregation.reduceScript, ExecutableScript.AGGS_CONTEXT); - ExecutableScript script = factory.newInstance(vars); - aggregation = Collections.singletonList(script.run()); + + // Add _aggs to params map for backwards compatibility (redundant with a context variable on the ReduceScript created below). + params.put("_aggs", aggregationObjects); + + ScriptedMetricAggContexts.ReduceScript.Factory factory = reduceContext.scriptService().compile( + firstAggregation.reduceScript, ScriptedMetricAggContexts.ReduceScript.CONTEXT); + ScriptedMetricAggContexts.ReduceScript script = factory.newInstance(params, aggregationObjects); + + Object scriptResult = script.execute(); + CollectionUtils.ensureNoSelfReferences(scriptResult); + + aggregation = Collections.singletonList(scriptResult); } else if (reduceContext.isFinalReduce()) { aggregation = Collections.singletonList(aggregationObjects); } else { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java index 225398e51b7c0..8b6d834184d73 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java @@ -26,9 +26,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.script.Script; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; @@ -202,30 +201,32 @@ protected ScriptedMetricAggregatorFactory doBuild(SearchContext context, Aggrega // Extract params from scripts and pass them along to ScriptedMetricAggregatorFactory, since it won't have // access to them for the scripts it's given precompiled. - ExecutableScript.Factory executableInitScript; + ScriptedMetricAggContexts.InitScript.Factory compiledInitScript; Map initScriptParams; if (initScript != null) { - executableInitScript = queryShardContext.getScriptService().compile(initScript, ExecutableScript.AGGS_CONTEXT); + compiledInitScript = queryShardContext.getScriptService().compile(initScript, ScriptedMetricAggContexts.InitScript.CONTEXT); initScriptParams = initScript.getParams(); } else { - executableInitScript = p -> null; + compiledInitScript = (p, a) -> null; initScriptParams = Collections.emptyMap(); } - SearchScript.Factory searchMapScript = queryShardContext.getScriptService().compile(mapScript, SearchScript.AGGS_CONTEXT); + ScriptedMetricAggContexts.MapScript.Factory compiledMapScript = queryShardContext.getScriptService().compile(mapScript, + ScriptedMetricAggContexts.MapScript.CONTEXT); Map mapScriptParams = mapScript.getParams(); - ExecutableScript.Factory executableCombineScript; + ScriptedMetricAggContexts.CombineScript.Factory compiledCombineScript; Map combineScriptParams; if (combineScript != null) { - executableCombineScript = queryShardContext.getScriptService().compile(combineScript, ExecutableScript.AGGS_CONTEXT); + compiledCombineScript = queryShardContext.getScriptService().compile(combineScript, + ScriptedMetricAggContexts.CombineScript.CONTEXT); combineScriptParams = combineScript.getParams(); } else { - executableCombineScript = p -> null; + compiledCombineScript = (p, a) -> null; combineScriptParams = Collections.emptyMap(); } - return new ScriptedMetricAggregatorFactory(name, searchMapScript, mapScriptParams, executableInitScript, initScriptParams, - executableCombineScript, combineScriptParams, reduceScript, + return new ScriptedMetricAggregatorFactory(name, compiledMapScript, mapScriptParams, compiledInitScript, + initScriptParams, compiledCombineScript, combineScriptParams, reduceScript, params, queryShardContext.lookup(), context, parent, subfactoriesBuilder, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java index 04ef595690a33..194a41b897fd2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java @@ -20,10 +20,10 @@ package org.elasticsearch.search.aggregations.metrics.scripted; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Scorer; import org.elasticsearch.common.util.CollectionUtils; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.script.Script; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; @@ -38,17 +38,17 @@ public class ScriptedMetricAggregator extends MetricsAggregator { - private final SearchScript.LeafFactory mapScript; - private final ExecutableScript combineScript; + private final ScriptedMetricAggContexts.MapScript.LeafFactory mapScript; + private final ScriptedMetricAggContexts.CombineScript combineScript; private final Script reduceScript; - private Map params; + private Object aggState; - protected ScriptedMetricAggregator(String name, SearchScript.LeafFactory mapScript, ExecutableScript combineScript, - Script reduceScript, - Map params, SearchContext context, Aggregator parent, List pipelineAggregators, Map metaData) - throws IOException { + protected ScriptedMetricAggregator(String name, ScriptedMetricAggContexts.MapScript.LeafFactory mapScript, ScriptedMetricAggContexts.CombineScript combineScript, + Script reduceScript, Object aggState, SearchContext context, Aggregator parent, + List pipelineAggregators, Map metaData) + throws IOException { super(name, context, parent, pipelineAggregators, metaData); - this.params = params; + this.aggState = aggState; this.mapScript = mapScript; this.combineScript = combineScript; this.reduceScript = reduceScript; @@ -62,13 +62,20 @@ public boolean needsScores() { @Override public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { - final SearchScript leafMapScript = mapScript.newInstance(ctx); + final ScriptedMetricAggContexts.MapScript leafMapScript = mapScript.newInstance(ctx); return new LeafBucketCollectorBase(sub, leafMapScript) { + @Override + public void setScorer(Scorer scorer) throws IOException { + leafMapScript.setScorer(scorer); + } + @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0 : bucket; + leafMapScript.setDocument(doc); - leafMapScript.run(); + leafMapScript.execute(); + CollectionUtils.ensureNoSelfReferences(aggState); } }; } @@ -77,10 +84,10 @@ public void collect(int doc, long bucket) throws IOException { public InternalAggregation buildAggregation(long owningBucketOrdinal) { Object aggregation; if (combineScript != null) { - aggregation = combineScript.run(); + aggregation = combineScript.execute(); CollectionUtils.ensureNoSelfReferences(aggregation); } else { - aggregation = params.get("_agg"); + aggregation = aggState; } return new InternalScriptedMetric(name, aggregation, reduceScript, pipelineAggregators(), metaData()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java index 0bc6a614e541f..69e4c00cf7206 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java @@ -19,9 +19,9 @@ package org.elasticsearch.search.aggregations.metrics.scripted; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptedMetricAggContexts; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.script.Script; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -38,20 +38,21 @@ public class ScriptedMetricAggregatorFactory extends AggregatorFactory { - private final SearchScript.Factory mapScript; + private final ScriptedMetricAggContexts.MapScript.Factory mapScript; private final Map mapScriptParams; - private final ExecutableScript.Factory combineScript; + private final ScriptedMetricAggContexts.CombineScript.Factory combineScript; private final Map combineScriptParams; private final Script reduceScript; private final Map aggParams; private final SearchLookup lookup; - private final ExecutableScript.Factory initScript; + private final ScriptedMetricAggContexts.InitScript.Factory initScript; private final Map initScriptParams; - public ScriptedMetricAggregatorFactory(String name, SearchScript.Factory mapScript, Map mapScriptParams, - ExecutableScript.Factory initScript, Map initScriptParams, - ExecutableScript.Factory combineScript, Map combineScriptParams, - Script reduceScript, Map aggParams, + public ScriptedMetricAggregatorFactory(String name, + ScriptedMetricAggContexts.MapScript.Factory mapScript, Map mapScriptParams, + ScriptedMetricAggContexts.InitScript.Factory initScript, Map initScriptParams, + ScriptedMetricAggContexts.CombineScript.Factory combineScript, + Map combineScriptParams, Script reduceScript, Map aggParams, SearchLookup lookup, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactories, Map metaData) throws IOException { super(name, context, parent, subFactories, metaData); @@ -78,20 +79,29 @@ public Aggregator createInternal(Aggregator parent, boolean collectsFromSingleBu } else { aggParams = new HashMap<>(); } + + // Add _agg to params map for backwards compatibility (redundant with context variables on the scripts created below). + // When this is removed, aggState (as passed to ScriptedMetricAggregator) can be changed to Map, since + // it won't be possible to completely replace it with another type as is possible when it's an entry in params. if (aggParams.containsKey("_agg") == false) { aggParams.put("_agg", new HashMap()); } + Object aggState = aggParams.get("_agg"); - final ExecutableScript initScript = this.initScript.newInstance(mergeParams(aggParams, initScriptParams)); - final SearchScript.LeafFactory mapScript = this.mapScript.newFactory(mergeParams(aggParams, mapScriptParams), lookup); - final ExecutableScript combineScript = this.combineScript.newInstance(mergeParams(aggParams, combineScriptParams)); + final ScriptedMetricAggContexts.InitScript initScript = this.initScript.newInstance( + mergeParams(aggParams, initScriptParams), aggState); + final ScriptedMetricAggContexts.MapScript.LeafFactory mapScript = this.mapScript.newFactory( + mergeParams(aggParams, mapScriptParams), aggState, lookup); + final ScriptedMetricAggContexts.CombineScript combineScript = this.combineScript.newInstance( + mergeParams(aggParams, combineScriptParams), aggState); final Script reduceScript = deepCopyScript(this.reduceScript, context); if (initScript != null) { - initScript.run(); + initScript.execute(); + CollectionUtils.ensureNoSelfReferences(aggState); } return new ScriptedMetricAggregator(name, mapScript, - combineScript, reduceScript, aggParams, context, parent, + combineScript, reduceScript, aggState, context, parent, pipelineAggregators, metaData); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsResponseTests.java new file mode 100644 index 0000000000000..1b307e0713c3b --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsResponseTests.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.settings; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.function.Predicate; + +public class ClusterGetSettingsResponseTests extends AbstractXContentTestCase { + + @Override + protected ClusterGetSettingsResponse doParseInstance(XContentParser parser) throws IOException { + return ClusterGetSettingsResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected ClusterGetSettingsResponse createTestInstance() { + Settings persistentSettings = ClusterUpdateSettingsResponseTests.randomClusterSettings(0, 2); + Settings transientSettings = ClusterUpdateSettingsResponseTests.randomClusterSettings(0, 2); + Settings defaultSettings = randomBoolean() ? + ClusterUpdateSettingsResponseTests.randomClusterSettings(0, 2): Settings.EMPTY; + return new ClusterGetSettingsResponse(persistentSettings, transientSettings, defaultSettings); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return p -> + p.startsWith(ClusterGetSettingsResponse.TRANSIENT_FIELD) || + p.startsWith(ClusterGetSettingsResponse.PERSISTENT_FIELD) || + p.startsWith(ClusterGetSettingsResponse.DEFAULTS_FIELD); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponseTests.java new file mode 100644 index 0000000000000..404db74a46e12 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponseTests.java @@ -0,0 +1,115 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.analyze; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Predicate; + +public class AnalyzeResponseTests extends AbstractStreamableXContentTestCase { + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return s -> s.contains("tokens."); + } + + @Override + protected AnalyzeResponse doParseInstance(XContentParser parser) throws IOException { + return AnalyzeResponse.fromXContent(parser); + } + + @Override + protected AnalyzeResponse createBlankInstance() { + return new AnalyzeResponse(); + } + + @Override + protected AnalyzeResponse createTestInstance() { + int tokenCount = randomIntBetween(1, 30); + AnalyzeResponse.AnalyzeToken[] tokens = new AnalyzeResponse.AnalyzeToken[tokenCount]; + for (int i = 0; i < tokenCount; i++) { + tokens[i] = randomToken(); + } + DetailAnalyzeResponse dar = null; + if (randomBoolean()) { + dar = new DetailAnalyzeResponse(); + if (randomBoolean()) { + dar.charfilters(new DetailAnalyzeResponse.CharFilteredText[]{ + new DetailAnalyzeResponse.CharFilteredText("my_charfilter", new String[]{"one two"}) + }); + } + dar.tokenizer(new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenizer", tokens)); + if (randomBoolean()) { + dar.tokenfilters(new DetailAnalyzeResponse.AnalyzeTokenList[]{ + new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_1", tokens), + new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_2", tokens) + }); + } + return new AnalyzeResponse(null, dar); + } + return new AnalyzeResponse(Arrays.asList(tokens), null); + } + + private AnalyzeResponse.AnalyzeToken randomToken() { + String token = randomAlphaOfLengthBetween(1, 20); + int position = randomIntBetween(0, 1000); + int startOffset = randomIntBetween(0, 1000); + int endOffset = randomIntBetween(0, 1000); + int posLength = randomIntBetween(1, 5); + String type = randomAlphaOfLengthBetween(1, 20); + Map extras = new HashMap<>(); + if (randomBoolean()) { + int entryCount = randomInt(6); + for (int i = 0; i < entryCount; i++) { + switch (randomInt(6)) { + case 0: + case 1: + case 2: + case 3: + String key = randomAlphaOfLength(5); + String value = randomAlphaOfLength(10); + extras.put(key, value); + break; + case 4: + String objkey = randomAlphaOfLength(5); + Map obj = new HashMap<>(); + obj.put(randomAlphaOfLength(5), randomAlphaOfLength(10)); + extras.put(objkey, obj); + break; + case 5: + String listkey = randomAlphaOfLength(5); + List list = new ArrayList<>(); + list.add(randomAlphaOfLength(4)); + list.add(randomAlphaOfLength(6)); + extras.put(listkey, list); + break; + } + } + } + return new AnalyzeResponse.AnalyzeToken(token, position, startOffset, endOffset, posLength, type, extras); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexActionTests.java new file mode 100644 index 0000000000000..731397e043918 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexActionTests.java @@ -0,0 +1,149 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.get; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.settings.SettingsModule; +import org.elasticsearch.index.Index; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.transport.CapturingTransport; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.junit.After; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.TimeUnit; + +public class GetIndexActionTests extends ESSingleNodeTestCase { + + private TransportService transportService; + private ClusterService clusterService; + private IndicesService indicesService; + private ThreadPool threadPool; + private SettingsFilter settingsFilter; + private final String indexName = "test_index"; + + private TestTransportGetIndexAction getIndexAction; + + @Before + public void setUp() throws Exception { + super.setUp(); + + settingsFilter = new SettingsModule(Settings.EMPTY, Collections.emptyList(), Collections.emptyList()).getSettingsFilter(); + threadPool = new TestThreadPool("GetIndexActionTests"); + clusterService = getInstanceFromNode(ClusterService.class); + indicesService = getInstanceFromNode(IndicesService.class); + CapturingTransport capturingTransport = new CapturingTransport(); + transportService = new TransportService(clusterService.getSettings(), capturingTransport, threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + boundAddress -> clusterService.localNode(), null, Collections.emptySet()); + transportService.start(); + transportService.acceptIncomingRequests(); + getIndexAction = new GetIndexActionTests.TestTransportGetIndexAction(); + } + + @After + public void tearDown() throws Exception { + ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + threadPool = null; + super.tearDown(); + } + + public void testIncludeDefaults() { + GetIndexRequest defaultsRequest = new GetIndexRequest().indices(indexName).includeDefaults(true); + getIndexAction.execute(null, defaultsRequest, ActionListener.wrap( + defaultsResponse -> { + assertNotNull( + "index.refresh_interval should be set as we are including defaults", + defaultsResponse.getSetting(indexName, "index.refresh_interval") + ); + }, exception -> { + throw new AssertionError(exception); + }) + ); + } + + public void testDoNotIncludeDefaults() { + GetIndexRequest noDefaultsRequest = new GetIndexRequest().indices(indexName); + getIndexAction.execute(null, noDefaultsRequest, ActionListener.wrap( + noDefaultsResponse -> { + assertNull( + "index.refresh_interval should be null as it was never set", + noDefaultsResponse.getSetting(indexName, "index.refresh_interval") + ); + }, exception -> { + throw new AssertionError(exception); + }) + ); + } + + protected boolean enableWarningsCheck() { + // single node test case has some deprecated settings and we return them + return false; + } + + class TestTransportGetIndexAction extends TransportGetIndexAction { + + TestTransportGetIndexAction() { + super(Settings.EMPTY, GetIndexActionTests.this.transportService, GetIndexActionTests.this.clusterService, + GetIndexActionTests.this.threadPool, settingsFilter, new ActionFilters(Collections.emptySet()), + new GetIndexActionTests.Resolver(Settings.EMPTY), indicesService, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); + } + + @Override + protected void doMasterOperation(GetIndexRequest request, String[] concreteIndices, ClusterState state, + ActionListener listener) { + ClusterState stateWithIndex = ClusterStateCreationUtils.state(indexName, 1, 1); + super.doMasterOperation(request, concreteIndices, stateWithIndex, listener); + } + } + + static class Resolver extends IndexNameExpressionResolver { + Resolver(Settings settings) { + super(settings); + } + + @Override + public String[] concreteIndexNames(ClusterState state, IndicesRequest request) { + return request.indices(); + } + + @Override + public Index[] concreteIndices(ClusterState state, IndicesRequest request) { + Index[] out = new Index[request.indices().length]; + for (int x = 0; x < out.length; x++) { + out[x] = new Index(request.indices()[x], "_na_"); + } + return out; + } + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexResponseTests.java new file mode 100644 index 0000000000000..3991442fd5b87 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexResponseTests.java @@ -0,0 +1,194 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.get; + +import org.apache.lucene.util.CollectionUtil; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponseTests; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponseTests; +import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.RandomCreateIndexGenerator; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.junit.Assert; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.Base64; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.function.Predicate; + +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.index.IndexSettings.INDEX_REFRESH_INTERVAL_SETTING; + +public class GetIndexResponseTests extends AbstractStreamableXContentTestCase { + + /** + * The following byte response was generated from the v6.3.0 tag + */ + private static final String TEST_6_3_0_RESPONSE_BYTES = + "AQhteV9pbmRleAEIbXlfaW5kZXgBA2RvYwNkb2OePID6KURGTACqVkrLTM1JiTdUsqpWKqksSFWyUiouKcrMS1eqrQUAAAD//" + + "wMAAAABCG15X2luZGV4AgZhbGlhczEAAQJyMQECcjEGYWxpYXMyAX8jNXYiREZMAKpWKkktylWyqlaqTE0sUrIyMjA0q60FAAAA//" + + "8DAAAAAQhteV9pbmRleAIYaW5kZXgubnVtYmVyX29mX3JlcGxpY2FzAAExFmluZGV4Lm51bWJlcl9vZl9zaGFyZHMAATI="; + private static final GetIndexResponse TEST_6_3_0_RESPONSE_INSTANCE = getExpectedTest630Response(); + + @Override + protected GetIndexResponse doParseInstance(XContentParser parser) throws IOException { + return GetIndexResponse.fromXContent(parser); + } + + @Override + protected GetIndexResponse createBlankInstance() { + return new GetIndexResponse(); + } + + @Override + protected GetIndexResponse createTestInstance() { + String[] indices = generateRandomStringArray(5, 5, false, false); + ImmutableOpenMap.Builder> mappings = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder> aliases = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder settings = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder defaultSettings = ImmutableOpenMap.builder(); + IndexScopedSettings indexScopedSettings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS; + boolean includeDefaults = randomBoolean(); + for (String index: indices) { + mappings.put(index, GetMappingsResponseTests.createMappingsForIndex()); + + List aliasMetaDataList = new ArrayList<>(); + int aliasesNum = randomIntBetween(0, 3); + for (int i=0; i getRandomFieldsExcludeFilter() { + //we do not want to add new fields at the root (index-level), or inside the blocks + return + f -> f.equals("") || f.contains(".settings") || f.contains(".defaults") || f.contains(".mappings") || + f.contains(".aliases"); + } + + private static ImmutableOpenMap> getTestAliases(String indexName) { + ImmutableOpenMap.Builder> aliases = ImmutableOpenMap.builder(); + List indexAliases = new ArrayList<>(); + indexAliases.add(new AliasMetaData.Builder("alias1").routing("r1").build()); + indexAliases.add(new AliasMetaData.Builder("alias2").filter("{\"term\": {\"year\": 2016}}").build()); + aliases.put(indexName, Collections.unmodifiableList(indexAliases)); + return aliases.build(); + } + + private static ImmutableOpenMap> getTestMappings(String indexName) { + ImmutableOpenMap.Builder> mappings = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder indexMappings = ImmutableOpenMap.builder(); + try { + indexMappings.put( + "doc", + new MappingMetaData("doc", + Collections.singletonMap("field_1", Collections.singletonMap("type", "string")) + ) + ); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + mappings.put(indexName, indexMappings.build()); + return mappings.build(); + } + + private static ImmutableOpenMap getTestSettings(String indexName) { + ImmutableOpenMap.Builder settings = ImmutableOpenMap.builder(); + Settings.Builder indexSettings = Settings.builder(); + indexSettings.put(SETTING_NUMBER_OF_SHARDS, 2); + indexSettings.put(SETTING_NUMBER_OF_REPLICAS, 1); + settings.put(indexName, indexSettings.build()); + return settings.build(); + } + + private static GetIndexResponse getExpectedTest630Response() { + // The only difference between this snippet and the one used for generation TEST_6_3_0_RESPONSE_BYTES is the + // constructor for GetIndexResponse which also takes defaultSettings now. + String indexName = "my_index"; + String indices[] = { indexName }; + return + new GetIndexResponse( + indices, getTestMappings(indexName), getTestAliases(indexName), getTestSettings(indexName), + ImmutableOpenMap.of() + ); + } + + private static GetIndexResponse getResponseWithDefaultSettings() { + String indexName = "my_index"; + String indices[] = { indexName }; + ImmutableOpenMap.Builder defaultSettings = ImmutableOpenMap.builder(); + Settings.Builder indexDefaultSettings = Settings.builder(); + indexDefaultSettings.put(INDEX_REFRESH_INTERVAL_SETTING.getKey(), "1s"); + defaultSettings.put(indexName, indexDefaultSettings.build()); + return + new GetIndexResponse( + indices, getTestMappings(indexName), getTestAliases(indexName), getTestSettings(indexName), + defaultSettings.build() + ); + } + + public void testCanDecode622Response() throws IOException { + StreamInput si = StreamInput.wrap(Base64.getDecoder().decode(TEST_6_3_0_RESPONSE_BYTES)); + si.setVersion(Version.V_6_3_0); + GetIndexResponse response = new GetIndexResponse(); + response.readFrom(si); + + Assert.assertEquals(TEST_6_3_0_RESPONSE_INSTANCE, response); + } + + public void testCanOutput622Response() throws IOException { + GetIndexResponse responseWithExtraFields = getResponseWithDefaultSettings(); + BytesStreamOutput bso = new BytesStreamOutput(); + bso.setVersion(Version.V_6_3_0); + responseWithExtraFields.writeTo(bso); + String base64OfResponse = Base64.getEncoder().encodeToString(BytesReference.toBytes(bso.bytes())); + + Assert.assertEquals(TEST_6_3_0_RESPONSE_BYTES, base64OfResponse); + } + +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java index 0fa5ca075fa8d..91c7841868393 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java @@ -80,8 +80,7 @@ protected GetMappingsResponse mutateInstance(GetMappingsResponse instance) throw return mutate(instance); } - @Override - protected GetMappingsResponse createTestInstance() { + public static ImmutableOpenMap createMappingsForIndex() { // rarely have no types int typeCount = rarely() ? 0 : scaledRandomIntBetween(1, 3); List typeMappings = new ArrayList<>(typeCount); @@ -104,8 +103,13 @@ protected GetMappingsResponse createTestInstance() { } ImmutableOpenMap.Builder typeBuilder = ImmutableOpenMap.builder(); typeMappings.forEach(mmd -> typeBuilder.put(mmd.type(), mmd)); + return typeBuilder.build(); + } + + @Override + protected GetMappingsResponse createTestInstance() { ImmutableOpenMap.Builder> indexBuilder = ImmutableOpenMap.builder(); - indexBuilder.put("index-" + randomAlphaOfLength(5), typeBuilder.build()); + indexBuilder.put("index-" + randomAlphaOfLength(5), createMappingsForIndex()); GetMappingsResponse resp = new GetMappingsResponse(indexBuilder.build()); logger.debug("--> created: {}", resp); return resp; diff --git a/server/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java b/server/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java index 56182fb90febe..31a84423db97a 100644 --- a/server/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java +++ b/server/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java @@ -19,20 +19,12 @@ package org.elasticsearch.common.breaker; -import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.indices.breaker.BreakerSettings; -import org.elasticsearch.indices.breaker.CircuitBreakerService; -import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -50,21 +42,18 @@ public void testThreadedUpdatesToBreaker() throws Exception { final MemoryCircuitBreaker breaker = new MemoryCircuitBreaker(new ByteSizeValue((BYTES_PER_THREAD * NUM_THREADS) - 1), 1.0, logger); for (int i = 0; i < NUM_THREADS; i++) { - threads[i] = new Thread(new Runnable() { - @Override - public void run() { - for (int j = 0; j < BYTES_PER_THREAD; j++) { - try { - breaker.addEstimateBytesAndMaybeBreak(1L, "test"); - } catch (CircuitBreakingException e) { - if (tripped.get()) { - assertThat("tripped too many times", true, equalTo(false)); - } else { - assertThat(tripped.compareAndSet(false, true), equalTo(true)); - } - } catch (Exception e) { - lastException.set(e); + threads[i] = new Thread(() -> { + for (int j = 0; j < BYTES_PER_THREAD; j++) { + try { + breaker.addEstimateBytesAndMaybeBreak(1L, "test"); + } catch (CircuitBreakingException e) { + if (tripped.get()) { + assertThat("tripped too many times", true, equalTo(false)); + } else { + assertThat(tripped.compareAndSet(false, true), equalTo(true)); } + } catch (Exception e) { + lastException.set(e); } } }); @@ -81,134 +70,6 @@ public void run() { assertThat("breaker was tripped at least once", breaker.getTrippedCount(), greaterThanOrEqualTo(1L)); } - public void testThreadedUpdatesToChildBreaker() throws Exception { - final int NUM_THREADS = scaledRandomIntBetween(3, 15); - final int BYTES_PER_THREAD = scaledRandomIntBetween(500, 4500); - final Thread[] threads = new Thread[NUM_THREADS]; - final AtomicBoolean tripped = new AtomicBoolean(false); - final AtomicReference lastException = new AtomicReference<>(null); - - final AtomicReference breakerRef = new AtomicReference<>(null); - final CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)) { - - @Override - public CircuitBreaker getBreaker(String name) { - return breakerRef.get(); - } - - @Override - public void checkParentLimit(String label) throws CircuitBreakingException { - // never trip - } - }; - final BreakerSettings settings = new BreakerSettings(CircuitBreaker.REQUEST, (BYTES_PER_THREAD * NUM_THREADS) - 1, 1.0); - final ChildMemoryCircuitBreaker breaker = new ChildMemoryCircuitBreaker(settings, logger, - (HierarchyCircuitBreakerService)service, CircuitBreaker.REQUEST); - breakerRef.set(breaker); - - for (int i = 0; i < NUM_THREADS; i++) { - threads[i] = new Thread(new Runnable() { - @Override - public void run() { - for (int j = 0; j < BYTES_PER_THREAD; j++) { - try { - breaker.addEstimateBytesAndMaybeBreak(1L, "test"); - } catch (CircuitBreakingException e) { - if (tripped.get()) { - assertThat("tripped too many times", true, equalTo(false)); - } else { - assertThat(tripped.compareAndSet(false, true), equalTo(true)); - } - } catch (Exception e) { - lastException.set(e); - } - } - } - }); - - threads[i].start(); - } - - for (Thread t : threads) { - t.join(); - } - - assertThat("no other exceptions were thrown", lastException.get(), equalTo(null)); - assertThat("breaker was tripped", tripped.get(), equalTo(true)); - assertThat("breaker was tripped at least once", breaker.getTrippedCount(), greaterThanOrEqualTo(1L)); - } - - public void testThreadedUpdatesToChildBreakerWithParentLimit() throws Exception { - final int NUM_THREADS = scaledRandomIntBetween(3, 15); - final int BYTES_PER_THREAD = scaledRandomIntBetween(500, 4500); - final int parentLimit = (BYTES_PER_THREAD * NUM_THREADS) - 2; - final int childLimit = parentLimit + 10; - final Thread[] threads = new Thread[NUM_THREADS]; - final AtomicInteger tripped = new AtomicInteger(0); - final AtomicReference lastException = new AtomicReference<>(null); - - final AtomicInteger parentTripped = new AtomicInteger(0); - final AtomicReference breakerRef = new AtomicReference<>(null); - final CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)) { - - @Override - public CircuitBreaker getBreaker(String name) { - return breakerRef.get(); - } - - @Override - public void checkParentLimit(String label) throws CircuitBreakingException { - // Parent will trip right before regular breaker would trip - if (getBreaker(CircuitBreaker.REQUEST).getUsed() > parentLimit) { - parentTripped.incrementAndGet(); - logger.info("--> parent tripped"); - throw new CircuitBreakingException("parent tripped"); - } - } - }; - final BreakerSettings settings = new BreakerSettings(CircuitBreaker.REQUEST, childLimit, 1.0); - final ChildMemoryCircuitBreaker breaker = new ChildMemoryCircuitBreaker(settings, logger, - (HierarchyCircuitBreakerService)service, CircuitBreaker.REQUEST); - breakerRef.set(breaker); - - for (int i = 0; i < NUM_THREADS; i++) { - threads[i] = new Thread(new Runnable() { - @Override - public void run() { - for (int j = 0; j < BYTES_PER_THREAD; j++) { - try { - breaker.addEstimateBytesAndMaybeBreak(1L, "test"); - } catch (CircuitBreakingException e) { - tripped.incrementAndGet(); - } catch (Exception e) { - lastException.set(e); - } - } - } - }); - } - - logger.info("--> NUM_THREADS: [{}], BYTES_PER_THREAD: [{}], TOTAL_BYTES: [{}], PARENT_LIMIT: [{}], CHILD_LIMIT: [{}]", - NUM_THREADS, BYTES_PER_THREAD, (BYTES_PER_THREAD * NUM_THREADS), parentLimit, childLimit); - - logger.info("--> starting threads..."); - for (Thread t : threads) { - t.start(); - } - - for (Thread t : threads) { - t.join(); - } - - logger.info("--> child breaker: used: {}, limit: {}", breaker.getUsed(), breaker.getLimit()); - logger.info("--> parent tripped: {}, total trip count: {} (expecting 1-2 for each)", parentTripped.get(), tripped.get()); - assertThat("no other exceptions were thrown", lastException.get(), equalTo(null)); - assertThat("breaker should be reset back to the parent limit after parent breaker trips", - breaker.getUsed(), greaterThanOrEqualTo((long)parentLimit - NUM_THREADS)); - assertThat("parent breaker was tripped at least once", parentTripped.get(), greaterThanOrEqualTo(1)); - assertThat("total breaker was tripped at least once", tripped.get(), greaterThanOrEqualTo(1)); - } - public void testConstantFactor() throws Exception { final MemoryCircuitBreaker breaker = new MemoryCircuitBreaker(new ByteSizeValue(15), 1.6, logger); String field = "myfield"; @@ -243,40 +104,4 @@ public void testConstantFactor() throws Exception { assertThat(cbe.getMessage().contains("field [" + field + "]"), equalTo(true)); } } - - /** - * Test that a breaker correctly redistributes to a different breaker, in - * this case, the request breaker borrows space from the fielddata breaker - */ - public void testBorrowingSiblingBreakerMemory() throws Exception { - Settings clusterSettings = Settings.builder() - .put(HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "200mb") - .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "150mb") - .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "150mb") - .build(); - try (CircuitBreakerService service = new HierarchyCircuitBreakerService(clusterSettings, - new ClusterSettings(clusterSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))) { - CircuitBreaker requestCircuitBreaker = service.getBreaker(MemoryCircuitBreaker.REQUEST); - CircuitBreaker fieldDataCircuitBreaker = service.getBreaker(MemoryCircuitBreaker.FIELDDATA); - - assertEquals(new ByteSizeValue(200, ByteSizeUnit.MB).getBytes(), - service.stats().getStats(MemoryCircuitBreaker.PARENT).getLimit()); - assertEquals(new ByteSizeValue(150, ByteSizeUnit.MB).getBytes(), requestCircuitBreaker.getLimit()); - assertEquals(new ByteSizeValue(150, ByteSizeUnit.MB).getBytes(), fieldDataCircuitBreaker.getLimit()); - - double fieldDataUsedBytes = fieldDataCircuitBreaker - .addEstimateBytesAndMaybeBreak(new ByteSizeValue(50, ByteSizeUnit.MB).getBytes(), "should not break"); - assertEquals(new ByteSizeValue(50, ByteSizeUnit.MB).getBytes(), fieldDataUsedBytes, 0.0); - double requestUsedBytes = requestCircuitBreaker.addEstimateBytesAndMaybeBreak(new ByteSizeValue(50, ByteSizeUnit.MB).getBytes(), - "should not break"); - assertEquals(new ByteSizeValue(50, ByteSizeUnit.MB).getBytes(), requestUsedBytes, 0.0); - requestUsedBytes = requestCircuitBreaker.addEstimateBytesAndMaybeBreak(new ByteSizeValue(50, ByteSizeUnit.MB).getBytes(), - "should not break"); - assertEquals(new ByteSizeValue(100, ByteSizeUnit.MB).getBytes(), requestUsedBytes, 0.0); - CircuitBreakingException exception = expectThrows(CircuitBreakingException.class, () -> requestCircuitBreaker - .addEstimateBytesAndMaybeBreak(new ByteSizeValue(50, ByteSizeUnit.MB).getBytes(), "should break")); - assertThat(exception.getMessage(), containsString("[parent] Data too large, data for [should break] would be")); - assertThat(exception.getMessage(), containsString("which is larger than the limit of [209715200/200mb]")); - } - } } diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java index aeaca328ceb7b..39d622d52f698 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java @@ -450,6 +450,26 @@ public void testFromWKT() throws IOException { assertEquals(expectedJson, GeoExecType.MEMORY, parsed.type()); } + public void testHonorsCoercion() throws IOException { + assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); + String query = "{\n" + + " \"geo_bounding_box\": {\n" + + " \"validation_method\": \"COERCE\",\n" + + " \"" + GEO_POINT_FIELD_NAME + "\": {\n" + + " \"top_left\": {\n" + + " \"lat\": -15.5,\n" + + " \"lon\": 176.5\n" + + " },\n" + + " \"bottom_right\": {\n" + + " \"lat\": -19.6,\n" + + " \"lon\": 181\n" + + " }\n" + + " }\n" + + " }\n" + + "}\n"; + assertGeoBoundingBoxQuery(query); + } + @Override public void testMustRewrite() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); diff --git a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java new file mode 100644 index 0000000000000..a03739b2d9a94 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java @@ -0,0 +1,202 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.indices.breaker; + + +import org.elasticsearch.common.breaker.ChildMemoryCircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.breaker.MemoryCircuitBreaker; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.test.ESTestCase; + +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; + +public class HierarchyCircuitBreakerServiceTests extends ESTestCase { + public void testThreadedUpdatesToChildBreaker() throws Exception { + final int NUM_THREADS = scaledRandomIntBetween(3, 15); + final int BYTES_PER_THREAD = scaledRandomIntBetween(500, 4500); + final Thread[] threads = new Thread[NUM_THREADS]; + final AtomicBoolean tripped = new AtomicBoolean(false); + final AtomicReference lastException = new AtomicReference<>(null); + + final AtomicReference breakerRef = new AtomicReference<>(null); + final CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)) { + + @Override + public CircuitBreaker getBreaker(String name) { + return breakerRef.get(); + } + + @Override + public void checkParentLimit(String label) throws CircuitBreakingException { + // never trip + } + }; + final BreakerSettings settings = new BreakerSettings(CircuitBreaker.REQUEST, (BYTES_PER_THREAD * NUM_THREADS) - 1, 1.0); + final ChildMemoryCircuitBreaker breaker = new ChildMemoryCircuitBreaker(settings, logger, + (HierarchyCircuitBreakerService)service, CircuitBreaker.REQUEST); + breakerRef.set(breaker); + + for (int i = 0; i < NUM_THREADS; i++) { + threads[i] = new Thread(() -> { + for (int j = 0; j < BYTES_PER_THREAD; j++) { + try { + breaker.addEstimateBytesAndMaybeBreak(1L, "test"); + } catch (CircuitBreakingException e) { + if (tripped.get()) { + assertThat("tripped too many times", true, equalTo(false)); + } else { + assertThat(tripped.compareAndSet(false, true), equalTo(true)); + } + } catch (Exception e) { + lastException.set(e); + } + } + }); + + threads[i].start(); + } + + for (Thread t : threads) { + t.join(); + } + + assertThat("no other exceptions were thrown", lastException.get(), equalTo(null)); + assertThat("breaker was tripped", tripped.get(), equalTo(true)); + assertThat("breaker was tripped at least once", breaker.getTrippedCount(), greaterThanOrEqualTo(1L)); + } + + public void testThreadedUpdatesToChildBreakerWithParentLimit() throws Exception { + final int NUM_THREADS = scaledRandomIntBetween(3, 15); + final int BYTES_PER_THREAD = scaledRandomIntBetween(500, 4500); + final int parentLimit = (BYTES_PER_THREAD * NUM_THREADS) - 2; + final int childLimit = parentLimit + 10; + final Thread[] threads = new Thread[NUM_THREADS]; + final AtomicInteger tripped = new AtomicInteger(0); + final AtomicReference lastException = new AtomicReference<>(null); + + final AtomicInteger parentTripped = new AtomicInteger(0); + final AtomicReference breakerRef = new AtomicReference<>(null); + final CircuitBreakerService service = new HierarchyCircuitBreakerService(Settings.EMPTY, + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)) { + + @Override + public CircuitBreaker getBreaker(String name) { + return breakerRef.get(); + } + + @Override + public void checkParentLimit(String label) throws CircuitBreakingException { + // Parent will trip right before regular breaker would trip + if (getBreaker(CircuitBreaker.REQUEST).getUsed() > parentLimit) { + parentTripped.incrementAndGet(); + logger.info("--> parent tripped"); + throw new CircuitBreakingException("parent tripped"); + } + } + }; + final BreakerSettings settings = new BreakerSettings(CircuitBreaker.REQUEST, childLimit, 1.0); + final ChildMemoryCircuitBreaker breaker = new ChildMemoryCircuitBreaker(settings, logger, + (HierarchyCircuitBreakerService)service, CircuitBreaker.REQUEST); + breakerRef.set(breaker); + + for (int i = 0; i < NUM_THREADS; i++) { + threads[i] = new Thread(() -> { + for (int j = 0; j < BYTES_PER_THREAD; j++) { + try { + breaker.addEstimateBytesAndMaybeBreak(1L, "test"); + } catch (CircuitBreakingException e) { + tripped.incrementAndGet(); + } catch (Exception e) { + lastException.set(e); + } + } + }); + } + + logger.info("--> NUM_THREADS: [{}], BYTES_PER_THREAD: [{}], TOTAL_BYTES: [{}], PARENT_LIMIT: [{}], CHILD_LIMIT: [{}]", + NUM_THREADS, BYTES_PER_THREAD, (BYTES_PER_THREAD * NUM_THREADS), parentLimit, childLimit); + + logger.info("--> starting threads..."); + for (Thread t : threads) { + t.start(); + } + + for (Thread t : threads) { + t.join(); + } + + logger.info("--> child breaker: used: {}, limit: {}", breaker.getUsed(), breaker.getLimit()); + logger.info("--> parent tripped: {}, total trip count: {} (expecting 1-2 for each)", parentTripped.get(), tripped.get()); + assertThat("no other exceptions were thrown", lastException.get(), equalTo(null)); + assertThat("breaker should be reset back to the parent limit after parent breaker trips", + breaker.getUsed(), greaterThanOrEqualTo((long)parentLimit - NUM_THREADS)); + assertThat("parent breaker was tripped at least once", parentTripped.get(), greaterThanOrEqualTo(1)); + assertThat("total breaker was tripped at least once", tripped.get(), greaterThanOrEqualTo(1)); + } + + + /** + * Test that a breaker correctly redistributes to a different breaker, in + * this case, the request breaker borrows space from the fielddata breaker + */ + public void testBorrowingSiblingBreakerMemory() throws Exception { + Settings clusterSettings = Settings.builder() + .put(HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "200mb") + .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "150mb") + .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "150mb") + .build(); + try (CircuitBreakerService service = new HierarchyCircuitBreakerService(clusterSettings, + new ClusterSettings(clusterSettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS))) { + CircuitBreaker requestCircuitBreaker = service.getBreaker(MemoryCircuitBreaker.REQUEST); + CircuitBreaker fieldDataCircuitBreaker = service.getBreaker(MemoryCircuitBreaker.FIELDDATA); + + assertEquals(new ByteSizeValue(200, ByteSizeUnit.MB).getBytes(), + service.stats().getStats(MemoryCircuitBreaker.PARENT).getLimit()); + assertEquals(new ByteSizeValue(150, ByteSizeUnit.MB).getBytes(), requestCircuitBreaker.getLimit()); + assertEquals(new ByteSizeValue(150, ByteSizeUnit.MB).getBytes(), fieldDataCircuitBreaker.getLimit()); + + double fieldDataUsedBytes = fieldDataCircuitBreaker + .addEstimateBytesAndMaybeBreak(new ByteSizeValue(50, ByteSizeUnit.MB).getBytes(), "should not break"); + assertEquals(new ByteSizeValue(50, ByteSizeUnit.MB).getBytes(), fieldDataUsedBytes, 0.0); + double requestUsedBytes = requestCircuitBreaker.addEstimateBytesAndMaybeBreak(new ByteSizeValue(50, ByteSizeUnit.MB).getBytes(), + "should not break"); + assertEquals(new ByteSizeValue(50, ByteSizeUnit.MB).getBytes(), requestUsedBytes, 0.0); + requestUsedBytes = requestCircuitBreaker.addEstimateBytesAndMaybeBreak(new ByteSizeValue(50, ByteSizeUnit.MB).getBytes(), + "should not break"); + assertEquals(new ByteSizeValue(100, ByteSizeUnit.MB).getBytes(), requestUsedBytes, 0.0); + CircuitBreakingException exception = expectThrows(CircuitBreakingException.class, () -> requestCircuitBreaker + .addEstimateBytesAndMaybeBreak(new ByteSizeValue(50, ByteSizeUnit.MB).getBytes(), "should break")); + assertThat(exception.getMessage(), containsString("[parent] Data too large, data for [should break] would be")); + assertThat(exception.getMessage(), containsString("which is larger than the limit of [209715200/200mb]")); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index 816c0464d95d9..13e1489795996 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -193,14 +193,55 @@ protected Map, Object>> pluginScripts() { return newAggregation; }); + scripts.put("state.items = new ArrayList()", vars -> + aggContextScript(vars, state -> ((HashMap) state).put("items", new ArrayList()))); + + scripts.put("state.items.add(1)", vars -> + aggContextScript(vars, state -> { + HashMap stateMap = (HashMap) state; + List items = (List) stateMap.get("items"); + items.add(1); + })); + + scripts.put("sum context state values", vars -> { + int sum = 0; + HashMap state = (HashMap) vars.get("state"); + List items = (List) state.get("items"); + + for (Object x : items) { + sum += (Integer)x; + } + + return sum; + }); + + scripts.put("sum context states", vars -> { + Integer sum = 0; + + List states = (List) vars.get("states"); + for (Object state : states) { + sum += ((Number) state).intValue(); + } + + return sum; + }); + return scripts; } - @SuppressWarnings("unchecked") static Object aggScript(Map vars, Consumer fn) { - T agg = (T) vars.get("_agg"); - fn.accept(agg); - return agg; + return aggScript(vars, fn, "_agg"); + } + + static Object aggContextScript(Map vars, Consumer fn) { + return aggScript(vars, fn, "state"); + } + + @SuppressWarnings("unchecked") + private static Object aggScript(Map vars, Consumer fn, String stateVarName) { + T aggState = (T) vars.get(stateVarName); + fn.accept(aggState); + return aggState; } } @@ -1015,4 +1056,37 @@ public void testConflictingAggAndScriptParams() { SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, builder::get); assertThat(ex.getCause().getMessage(), containsString("Parameter name \"param1\" used in both aggregation and script parameters")); } + + public void testAggFromContext() { + Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.items = new ArrayList()", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.items.add(1)", Collections.emptyMap()); + Script combineScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum context state values", Collections.emptyMap()); + Script reduceScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum context states", + Collections.emptyMap()); + + SearchResponse response = client() + .prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + scriptedMetric("scripted") + .initScript(initScript) + .mapScript(mapScript) + .combineScript(combineScript) + .reduceScript(reduceScript)) + .get(); + + Aggregation aggregation = response.getAggregations().get("scripted"); + assertThat(aggregation, notNullValue()); + assertThat(aggregation, instanceOf(ScriptedMetric.class)); + + ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation; + assertThat(scriptedMetricAggregation.getName(), equalTo("scripted")); + assertThat(scriptedMetricAggregation.aggregation(), notNullValue()); + + assertThat(scriptedMetricAggregation.aggregation(), instanceOf(Integer.class)); + Integer aggResult = (Integer) scriptedMetricAggregation.aggregation(); + long totalAgg = aggResult.longValue(); + assertThat(totalAgg, equalTo(numDocs)); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java index 0989b1ce6a3fa..49ea48ce3bf9b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.script.MockScriptEngine; -import org.elasticsearch.script.ScoreAccessor; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptModule; @@ -100,7 +99,7 @@ public static void initMockScripts() { }); SCRIPTS.put("mapScriptScore", params -> { Map agg = (Map) params.get("_agg"); - ((List) agg.get("collector")).add(((ScoreAccessor) params.get("_score")).doubleValue()); + ((List) agg.get("collector")).add(((Number) params.get("_score")).doubleValue()); return agg; }); SCRIPTS.put("combineScriptScore", params -> { diff --git a/server/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatIT.java b/server/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatIT.java index 70be72989cf95..6f4f69ad67e88 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/BlobStoreFormatIT.java @@ -224,7 +224,8 @@ public void testAtomicWriteFailures() throws Exception { IOException writeBlobException = expectThrows(IOException.class, () -> { BlobContainer wrapper = new BlobContainerWrapper(blobContainer) { @Override - public void writeBlobAtomic(String blobName, InputStream inputStream, long blobSize) throws IOException { + public void writeBlobAtomic(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) + throws IOException { throw new IOException("Exception thrown in writeBlobAtomic() for " + blobName); } }; @@ -251,10 +252,9 @@ protected void randomCorruption(BlobContainer blobContainer, String blobName) th int location = randomIntBetween(0, buffer.length - 1); buffer[location] = (byte) (buffer[location] ^ 42); } while (originalChecksum == checksum(buffer)); - blobContainer.deleteBlob(blobName); // delete original before writing new blob BytesArray bytesArray = new BytesArray(buffer); try (StreamInput stream = bytesArray.streamInput()) { - blobContainer.writeBlob(blobName, stream, bytesArray.length()); + blobContainer.writeBlob(blobName, stream, bytesArray.length(), false); } } diff --git a/server/src/test/java/org/elasticsearch/snapshots/mockstore/BlobContainerWrapper.java b/server/src/test/java/org/elasticsearch/snapshots/mockstore/BlobContainerWrapper.java index b5c6339724123..5666869a1aa0b 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/mockstore/BlobContainerWrapper.java +++ b/server/src/test/java/org/elasticsearch/snapshots/mockstore/BlobContainerWrapper.java @@ -49,13 +49,14 @@ public InputStream readBlob(String name) throws IOException { } @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { - delegate.writeBlob(blobName, inputStream, blobSize); + public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + delegate.writeBlob(blobName, inputStream, blobSize, failIfAlreadyExists); } @Override - public void writeBlobAtomic(final String blobName, final InputStream inputStream, final long blobSize) throws IOException { - delegate.writeBlobAtomic(blobName, inputStream, blobSize); + public void writeBlobAtomic(final String blobName, final InputStream inputStream, final long blobSize, + boolean failIfAlreadyExists) throws IOException { + delegate.writeBlobAtomic(blobName, inputStream, blobSize, failIfAlreadyExists); } @Override diff --git a/server/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java b/server/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java index d0702acf10373..d05a10905d858 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java +++ b/server/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java @@ -346,9 +346,9 @@ public Map listBlobsByPrefix(String blobNamePrefix) throws } @Override - public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { + public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { maybeIOExceptionOrBlock(blobName); - super.writeBlob(blobName, inputStream, blobSize); + super.writeBlob(blobName, inputStream, blobSize, failIfAlreadyExists); if (RandomizedContext.current().getRandom().nextBoolean()) { // for network based repositories, the blob may have been written but we may still // get an error with the client connection, so an IOException here simulates this @@ -357,27 +357,28 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize) t } @Override - public void writeBlobAtomic(final String blobName, final InputStream inputStream, final long blobSize) throws IOException { + public void writeBlobAtomic(final String blobName, final InputStream inputStream, final long blobSize, + final boolean failIfAlreadyExists) throws IOException { final Random random = RandomizedContext.current().getRandom(); if (allowAtomicOperations && random.nextBoolean()) { if ((delegate() instanceof FsBlobContainer) && (random.nextBoolean())) { // Simulate a failure between the write and move operation in FsBlobContainer final String tempBlobName = FsBlobContainer.tempBlobName(blobName); - super.writeBlob(tempBlobName, inputStream, blobSize); + super.writeBlob(tempBlobName, inputStream, blobSize, failIfAlreadyExists); maybeIOExceptionOrBlock(blobName); final FsBlobContainer fsBlobContainer = (FsBlobContainer) delegate(); - fsBlobContainer.moveBlobAtomic(tempBlobName, blobName); + fsBlobContainer.moveBlobAtomic(tempBlobName, blobName, failIfAlreadyExists); } else { // Atomic write since it is potentially supported // by the delegating blob container maybeIOExceptionOrBlock(blobName); - super.writeBlobAtomic(blobName, inputStream, blobSize); + super.writeBlobAtomic(blobName, inputStream, blobSize, failIfAlreadyExists); } } else { // Simulate a non-atomic write since many blob container // implementations does not support atomic write maybeIOExceptionOrBlock(blobName); - super.writeBlob(blobName, inputStream, blobSize); + super.writeBlob(blobName, inputStream, blobSize, failIfAlreadyExists); } } } diff --git a/settings.gradle b/settings.gradle index b11c984b4f074..510591005924f 100644 --- a/settings.gradle +++ b/settings.gradle @@ -123,3 +123,6 @@ if (extraProjects.exists()) { addSubProjects('', extraProjectDir) } } + +// enable in preparation for Gradle 5.0 +enableFeaturePreview('STABLE_PUBLISHING') diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java index 43a62bbe662cc..9f12c36999145 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java @@ -61,7 +61,12 @@ public void testWriteRead() throws IOException { try(BlobStore store = newBlobStore()) { final BlobContainer container = store.blobContainer(new BlobPath()); byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); - writeBlob(container, "foobar", new BytesArray(data)); + writeBlob(container, "foobar", new BytesArray(data), randomBoolean()); + if (randomBoolean()) { + // override file, to check if we get latest contents + data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); + writeBlob(container, "foobar", new BytesArray(data), false); + } try (InputStream stream = container.readBlob("foobar")) { BytesRefBuilder target = new BytesRefBuilder(); while (target.length() < data.length) { @@ -123,7 +128,7 @@ public void testDeleteBlob() throws IOException { byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); final BytesArray bytesArray = new BytesArray(data); - writeBlob(container, blobName, bytesArray); + writeBlob(container, blobName, bytesArray, randomBoolean()); container.deleteBlob(blobName); // should not raise // blob deleted, so should raise again @@ -149,20 +154,21 @@ public void testVerifyOverwriteFails() throws IOException { final BlobContainer container = store.blobContainer(new BlobPath()); byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16))); final BytesArray bytesArray = new BytesArray(data); - writeBlob(container, blobName, bytesArray); + writeBlob(container, blobName, bytesArray, true); // should not be able to overwrite existing blob - expectThrows(FileAlreadyExistsException.class, () -> writeBlob(container, blobName, bytesArray)); + expectThrows(FileAlreadyExistsException.class, () -> writeBlob(container, blobName, bytesArray, true)); container.deleteBlob(blobName); - writeBlob(container, blobName, bytesArray); // after deleting the previous blob, we should be able to write to it again + writeBlob(container, blobName, bytesArray, true); // after deleting the previous blob, we should be able to write to it again } } - protected void writeBlob(final BlobContainer container, final String blobName, final BytesArray bytesArray) throws IOException { + protected void writeBlob(final BlobContainer container, final String blobName, final BytesArray bytesArray, + boolean failIfAlreadyExists) throws IOException { try (InputStream stream = bytesArray.streamInput()) { if (randomBoolean()) { - container.writeBlob(blobName, stream, bytesArray.length()); + container.writeBlob(blobName, stream, bytesArray.length(), failIfAlreadyExists); } else { - container.writeBlobAtomic(blobName, stream, bytesArray.length()); + container.writeBlobAtomic(blobName, stream, bytesArray.length(), failIfAlreadyExists); } } } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreTestCase.java index 35a17c2a8dd83..ccc38ae362991 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreTestCase.java @@ -80,7 +80,7 @@ public static byte[] randomBytes(int length) { protected static void writeBlob(BlobContainer container, String blobName, BytesArray bytesArray) throws IOException { try (InputStream stream = bytesArray.streamInput()) { - container.writeBlob(blobName, stream, bytesArray.length()); + container.writeBlob(blobName, stream, bytesArray.length(), true); } } diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index b86cb9ff29352..e608bd13d2559 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -33,6 +33,7 @@ import java.io.IOException; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.function.Function; @@ -115,6 +116,18 @@ public String execute() { } else if (context.instanceClazz.equals(ScoreScript.class)) { ScoreScript.Factory factory = new MockScoreScript(script); return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(ScriptedMetricAggContexts.InitScript.class)) { + ScriptedMetricAggContexts.InitScript.Factory factory = mockCompiled::createMetricAggInitScript; + return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(ScriptedMetricAggContexts.MapScript.class)) { + ScriptedMetricAggContexts.MapScript.Factory factory = mockCompiled::createMetricAggMapScript; + return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(ScriptedMetricAggContexts.CombineScript.class)) { + ScriptedMetricAggContexts.CombineScript.Factory factory = mockCompiled::createMetricAggCombineScript; + return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(ScriptedMetricAggContexts.ReduceScript.class)) { + ScriptedMetricAggContexts.ReduceScript.Factory factory = mockCompiled::createMetricAggReduceScript; + return context.factoryClazz.cast(factory); } throw new IllegalArgumentException("mock script engine does not know how to handle context [" + context.name + "]"); } @@ -179,6 +192,23 @@ public SimilarityWeightScript createSimilarityWeightScript() { public MovingFunctionScript createMovingFunctionScript() { return new MockMovingFunctionScript(); } + + public ScriptedMetricAggContexts.InitScript createMetricAggInitScript(Map params, Object state) { + return new MockMetricAggInitScript(params, state, script != null ? script : ctx -> 42d); + } + + public ScriptedMetricAggContexts.MapScript.LeafFactory createMetricAggMapScript(Map params, Object state, + SearchLookup lookup) { + return new MockMetricAggMapScript(params, state, lookup, script != null ? script : ctx -> 42d); + } + + public ScriptedMetricAggContexts.CombineScript createMetricAggCombineScript(Map params, Object state) { + return new MockMetricAggCombineScript(params, state, script != null ? script : ctx -> 42d); + } + + public ScriptedMetricAggContexts.ReduceScript createMetricAggReduceScript(Map params, List states) { + return new MockMetricAggReduceScript(params, states, script != null ? script : ctx -> 42d); + } } public class MockExecutableScript implements ExecutableScript { @@ -333,6 +363,108 @@ public double execute(Query query, Field field, Term term) throws IOException { } } + public static class MockMetricAggInitScript extends ScriptedMetricAggContexts.InitScript { + private final Function, Object> script; + + MockMetricAggInitScript(Map params, Object state, + Function, Object> script) { + super(params, state); + this.script = script; + } + + public void execute() { + Map map = new HashMap<>(); + + if (getParams() != null) { + map.putAll(getParams()); // TODO: remove this once scripts know to look for params under params key + map.put("params", getParams()); + } + + map.put("state", getState()); + script.apply(map); + } + } + + public static class MockMetricAggMapScript implements ScriptedMetricAggContexts.MapScript.LeafFactory { + private final Map params; + private final Object state; + private final SearchLookup lookup; + private final Function, Object> script; + + MockMetricAggMapScript(Map params, Object state, SearchLookup lookup, + Function, Object> script) { + this.params = params; + this.state = state; + this.lookup = lookup; + this.script = script; + } + + @Override + public ScriptedMetricAggContexts.MapScript newInstance(LeafReaderContext context) { + return new ScriptedMetricAggContexts.MapScript(params, state, lookup, context) { + @Override + public void execute() { + Map map = new HashMap<>(); + + if (getParams() != null) { + map.putAll(getParams()); // TODO: remove this once scripts know to look for params under params key + map.put("params", getParams()); + } + + map.put("state", getState()); + map.put("doc", getDoc()); + map.put("_score", get_score()); + + script.apply(map); + } + }; + } + } + + public static class MockMetricAggCombineScript extends ScriptedMetricAggContexts.CombineScript { + private final Function, Object> script; + + MockMetricAggCombineScript(Map params, Object state, + Function, Object> script) { + super(params, state); + this.script = script; + } + + public Object execute() { + Map map = new HashMap<>(); + + if (getParams() != null) { + map.putAll(getParams()); // TODO: remove this once scripts know to look for params under params key + map.put("params", getParams()); + } + + map.put("state", getState()); + return script.apply(map); + } + } + + public static class MockMetricAggReduceScript extends ScriptedMetricAggContexts.ReduceScript { + private final Function, Object> script; + + MockMetricAggReduceScript(Map params, List states, + Function, Object> script) { + super(params, states); + this.script = script; + } + + public Object execute() { + Map map = new HashMap<>(); + + if (getParams() != null) { + map.putAll(getParams()); // TODO: remove this once scripts know to look for params under params key + map.put("params", getParams()); + } + + map.put("states", getStates()); + return script.apply(map); + } + } + public static Script mockInlineScript(final String script) { return new Script(ScriptType.INLINE, "mock", script, emptyMap()); } @@ -343,15 +475,15 @@ public double execute(Map params, double[] values) { return MovingFunctions.unweightedAvg(values); } } - + public class MockScoreScript implements ScoreScript.Factory { - + private final Function, Object> scripts; - + MockScoreScript(Function, Object> scripts) { this.scripts = scripts; } - + @Override public ScoreScript.LeafFactory newFactory(Map params, SearchLookup lookup) { return new ScoreScript.LeafFactory() { @@ -359,7 +491,7 @@ public ScoreScript.LeafFactory newFactory(Map params, SearchLook public boolean needs_score() { return true; } - + @Override public ScoreScript newInstance(LeafReaderContext ctx) throws IOException { Scorer[] scorerHolder = new Scorer[1]; @@ -373,7 +505,7 @@ public double execute() { } return ((Number) scripts.apply(vars)).doubleValue(); } - + @Override public void setScorer(Scorer scorer) { scorerHolder[0] = scorer; diff --git a/x-pack/docs/en/rest-api/ml/jobcounts.asciidoc b/x-pack/docs/en/rest-api/ml/jobcounts.asciidoc index b2e24a298cbd0..d343cc23ae0ad 100644 --- a/x-pack/docs/en/rest-api/ml/jobcounts.asciidoc +++ b/x-pack/docs/en/rest-api/ml/jobcounts.asciidoc @@ -20,6 +20,10 @@ progress of a job. (object) An object that provides information about the size and contents of the model. See <> +`forecasts_stats`:: + (object) An object that provides statistical information about forecasts + of this job. See <> + `node`:: (object) For open jobs only, contains information about the node where the job runs. See <>. @@ -177,6 +181,33 @@ NOTE: The `over` field values are counted separately for each detector and parti `timestamp`:: (date) The timestamp of the `model_size_stats` according to the timestamp of the data. +[float] +[[ml-forecastsstats]] +==== Forecasts Stats Objects + +The `forecasts_stats` object shows statistics about forecasts. It has the following properties: + +`total`:: + (long) The number of forecasts currently available for this model. + +`forecasted_jobs`:: + (long) The number of jobs that have at least one forecast. + +`memory_bytes`:: + (object) Statistics about the memory usage: minimum, maximum, average and total. + +`records`:: + (object) Statistics about the number of forecast records: minimum, maximum, average and total. + +`processing_time_ms`:: + (object) Statistics about the forecast runtime in milliseconds: minimum, maximum, average and total. + +`status`:: + (object) Counts per forecast status, for example: {"finished" : 2}. + +NOTE: `memory_bytes`, `records`, `processing_time_ms` and `status` require at least 1 forecast, otherwise +these fields are ommitted. + [float] [[ml-stats-node]] ==== Node Objects diff --git a/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java b/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java index fba0e478dbf5e..3d01594e6d730 100644 --- a/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java +++ b/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java @@ -107,7 +107,7 @@ public void reenableWatcher() throws Exception { @Override protected boolean isWatcherTest() { String testName = getTestName(); - return testName != null && testName.contains("watcher/"); + return testName != null && (testName.contains("watcher/") || testName.contains("watcher\\")); } @Override @@ -118,13 +118,13 @@ protected boolean isMonitoringTest() { @Override protected boolean isMachineLearningTest() { String testName = getTestName(); - return testName != null && testName.contains("ml/"); + return testName != null && (testName.contains("ml/") || testName.contains("ml\\")); } @Override protected boolean isRollupTest() { String testName = getTestName(); - return testName != null && testName.contains("rollup/"); + return testName != null && (testName.contains("rollup/") || testName.contains("rollup\\")); } /** diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index eb1a611297d19..431ed1e513759 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -36,7 +36,7 @@ subprojects { // default to main class files if such a source set exists final List files = [] if (project.sourceSets.findByName("main")) { - files.add(project.sourceSets.main.output.classesDir) + files.add(project.sourceSets.main.output.classesDirs) dependsOn project.tasks.classes } // filter out non-existent classes directories from empty source sets diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsage.java index 1779ca703a5d7..ebcaab8495eba 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MachineLearningFeatureSetUsage.java @@ -22,6 +22,7 @@ public class MachineLearningFeatureSetUsage extends XPackFeatureSet.Usage { public static final String DATAFEEDS_FIELD = "datafeeds"; public static final String COUNT = "count"; public static final String DETECTORS = "detectors"; + public static final String FORECASTS = "forecasts"; public static final String MODEL_SIZE = "model_size"; private final Map jobsUsage; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java index 37e41854f7b8b..eb7aaee8a56e6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; +import org.elasticsearch.xpack.core.ml.stats.ForecastStats; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; @@ -46,6 +47,7 @@ public class GetJobsStatsAction extends Action createParser(boolean ignoreUnknownFie private final SortedSet items; private MlFilter(String id, String description, SortedSet items) { - this.id = Objects.requireNonNull(id, ID.getPreferredName() + " must not be null"); + this.id = Objects.requireNonNull(id); this.description = description; - this.items = Objects.requireNonNull(items, ITEMS.getPreferredName() + " must not be null"); + this.items = Objects.requireNonNull(items); } public MlFilter(StreamInput in) throws IOException { @@ -178,6 +190,14 @@ public Builder setItems(String... items) { } public MlFilter build() { + ExceptionsHelper.requireNonNull(id, MlFilter.ID.getPreferredName()); + ExceptionsHelper.requireNonNull(items, MlFilter.ITEMS.getPreferredName()); + if (!MlStrings.isValidId(id)) { + throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.INVALID_ID, ID.getPreferredName(), id)); + } + if (items.size() > MAX_ITEMS) { + throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.FILTER_CONTAINS_TOO_MANY_ITEMS, id, MAX_ITEMS)); + } return new MlFilter(id, description, items); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java index f0329051fed95..259d2d06a9c6e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java @@ -43,6 +43,7 @@ public final class Messages { "Datafeed frequency [{0}] must be a multiple of the aggregation interval [{1}]"; public static final String FILTER_NOT_FOUND = "No filter with id [{0}] exists"; + public static final String FILTER_CONTAINS_TOO_MANY_ITEMS = "Filter [{0}] contains too many items; up to [{1}] items are allowed"; public static final String INCONSISTENT_ID = "Inconsistent {0}; ''{1}'' specified in the body differs from ''{2}'' specified as a URL argument"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/stats/CountAccumulator.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/stats/CountAccumulator.java new file mode 100644 index 0000000000000..638aa8a2fa6be --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/stats/CountAccumulator.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.ml.stats; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import java.util.Map.Entry; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * An accumulator for simple counts where statistical measures + * are not of interest. + */ +public class CountAccumulator implements Writeable { + + private Map counts; + + public CountAccumulator() { + this.counts = new HashMap(); + } + + private CountAccumulator(Map counts) { + this.counts = counts; + } + + public CountAccumulator(StreamInput in) throws IOException { + this.counts = in.readMap(StreamInput::readString, StreamInput::readLong); + } + + public void merge(CountAccumulator other) { + counts = Stream.of(counts, other.counts).flatMap(m -> m.entrySet().stream()) + .collect(Collectors.toMap(Entry::getKey, Entry::getValue, (x, y) -> x + y)); + } + + public void add(String key, Long count) { + counts.put(key, counts.getOrDefault(key, 0L) + count); + } + + public Map asMap() { + return counts; + } + + public static CountAccumulator fromTermsAggregation(StringTerms termsAggregation) { + return new CountAccumulator(termsAggregation.getBuckets().stream() + .collect(Collectors.toMap(bucket -> bucket.getKeyAsString(), bucket -> bucket.getDocCount()))); + } + + public void writeTo(StreamOutput out) throws IOException { + out.writeMap(counts, StreamOutput::writeString, StreamOutput::writeLong); + } + + @Override + public int hashCode() { + return Objects.hash(counts); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + + if (getClass() != obj.getClass()) { + return false; + } + + CountAccumulator other = (CountAccumulator) obj; + return Objects.equals(counts, other.counts); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/stats/ForecastStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/stats/ForecastStats.java new file mode 100644 index 0000000000000..d490e4b98a44a --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/stats/ForecastStats.java @@ -0,0 +1,152 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.ml.stats; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * A class to hold statistics about forecasts. + */ +public class ForecastStats implements ToXContentObject, Writeable { + + public static class Fields { + public static final String TOTAL = "total"; + public static final String FORECASTED_JOBS = "forecasted_jobs"; + public static final String MEMORY = "memory_bytes"; + public static final String RUNTIME = "processing_time_ms"; + public static final String RECORDS = "records"; + public static final String STATUSES = "status"; + } + + private long total; + private long forecastedJobs; + private StatsAccumulator memoryStats; + private StatsAccumulator recordStats; + private StatsAccumulator runtimeStats; + private CountAccumulator statusCounts; + + public ForecastStats() { + this.total = 0; + this.forecastedJobs = 0; + this.memoryStats = new StatsAccumulator(); + this.recordStats = new StatsAccumulator(); + this.runtimeStats = new StatsAccumulator(); + this.statusCounts = new CountAccumulator(); + } + + /* + * Construct ForecastStats for 1 job. Additional statistics can be added by merging other ForecastStats into it. + */ + public ForecastStats(long total, StatsAccumulator memoryStats, StatsAccumulator recordStats, StatsAccumulator runtimeStats, + CountAccumulator statusCounts) { + this.total = total; + this.forecastedJobs = total > 0 ? 1 : 0; + this.memoryStats = Objects.requireNonNull(memoryStats); + this.recordStats = Objects.requireNonNull(recordStats); + this.runtimeStats = Objects.requireNonNull(runtimeStats); + this.statusCounts = Objects.requireNonNull(statusCounts); + } + + public ForecastStats(StreamInput in) throws IOException { + this.total = in.readLong(); + this.forecastedJobs = in.readLong(); + this.memoryStats = new StatsAccumulator(in); + this.recordStats = new StatsAccumulator(in); + this.runtimeStats = new StatsAccumulator(in); + this.statusCounts = new CountAccumulator(in); + } + + public ForecastStats merge(ForecastStats other) { + if (other == null) { + return this; + } + total += other.total; + forecastedJobs += other.forecastedJobs; + memoryStats.merge(other.memoryStats); + recordStats.merge(other.recordStats); + runtimeStats.merge(other.runtimeStats); + statusCounts.merge(other.statusCounts); + + return this; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + doXContentBody(builder, params); + return builder.endObject(); + } + + public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + builder.field(Fields.TOTAL, total); + builder.field(Fields.FORECASTED_JOBS, forecastedJobs); + + if (total > 0) { + builder.field(Fields.MEMORY, memoryStats.asMap()); + builder.field(Fields.RECORDS, recordStats.asMap()); + builder.field(Fields.RUNTIME, runtimeStats.asMap()); + builder.field(Fields.STATUSES, statusCounts.asMap()); + } + + return builder; + } + + public Map asMap() { + Map map = new HashMap<>(); + map.put(Fields.TOTAL, total); + map.put(Fields.FORECASTED_JOBS, forecastedJobs); + + if (total > 0) { + map.put(Fields.MEMORY, memoryStats.asMap()); + map.put(Fields.RECORDS, recordStats.asMap()); + map.put(Fields.RUNTIME, runtimeStats.asMap()); + map.put(Fields.STATUSES, statusCounts.asMap()); + } + + return map; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(total); + out.writeLong(forecastedJobs); + memoryStats.writeTo(out); + recordStats.writeTo(out); + runtimeStats.writeTo(out); + statusCounts.writeTo(out); + } + + @Override + public int hashCode() { + return Objects.hash(total, forecastedJobs, memoryStats, recordStats, runtimeStats, statusCounts); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + + if (getClass() != obj.getClass()) { + return false; + } + + ForecastStats other = (ForecastStats) obj; + return Objects.equals(total, other.total) && Objects.equals(forecastedJobs, other.forecastedJobs) + && Objects.equals(memoryStats, other.memoryStats) && Objects.equals(recordStats, other.recordStats) + && Objects.equals(runtimeStats, other.runtimeStats) && Objects.equals(statusCounts, other.statusCounts); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/stats/StatsAccumulator.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/stats/StatsAccumulator.java new file mode 100644 index 0000000000000..fe987db48ce17 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/stats/StatsAccumulator.java @@ -0,0 +1,126 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.stats; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.search.aggregations.metrics.stats.Stats; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * Helper class to collect min, max, avg and total statistics for a quantity + */ +public class StatsAccumulator implements Writeable { + + public static class Fields { + public static final String MIN = "min"; + public static final String MAX = "max"; + public static final String AVG = "avg"; + public static final String TOTAL = "total"; + } + + private long count; + private double total; + private Double min; + private Double max; + + public StatsAccumulator() { + } + + public StatsAccumulator(StreamInput in) throws IOException { + count = in.readLong(); + total = in.readDouble(); + min = in.readOptionalDouble(); + max = in.readOptionalDouble(); + } + + private StatsAccumulator(long count, double total, double min, double max) { + this.count = count; + this.total = total; + this.min = min; + this.max = max; + } + + public void add(double value) { + count++; + total += value; + min = min == null ? value : (value < min ? value : min); + max = max == null ? value : (value > max ? value : max); + } + + public double getMin() { + return min == null ? 0.0 : min; + } + + public double getMax() { + return max == null ? 0.0 : max; + } + + public double getAvg() { + return count == 0.0 ? 0.0 : total/count; + } + + public double getTotal() { + return total; + } + + public void merge(StatsAccumulator other) { + count += other.count; + total += other.total; + + // note: not using Math.min/max as some internal prefetch optimization causes an NPE + min = min == null ? other.min : (other.min == null ? min : other.min < min ? other.min : min); + max = max == null ? other.max : (other.max == null ? max : other.max > max ? other.max : max); + } + + public Map asMap() { + Map map = new HashMap<>(); + map.put(Fields.MIN, getMin()); + map.put(Fields.MAX, getMax()); + map.put(Fields.AVG, getAvg()); + map.put(Fields.TOTAL, getTotal()); + return map; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(count); + out.writeDouble(total); + out.writeOptionalDouble(min); + out.writeOptionalDouble(max); + } + + public static StatsAccumulator fromStatsAggregation(Stats statsAggregation) { + return new StatsAccumulator(statsAggregation.getCount(), statsAggregation.getSum(), statsAggregation.getMin(), + statsAggregation.getMax()); + } + + @Override + public int hashCode() { + return Objects.hash(count, total, min, max); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + + if (getClass() != obj.getClass()) { + return false; + } + + StatsAccumulator other = (StatsAccumulator) obj; + return Objects.equals(count, other.count) && Objects.equals(total, other.total) && Objects.equals(min, other.min) + && Objects.equals(max, other.max); + } +} + diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfiguration.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfiguration.java index 0f91abac2a73e..731d59a3ac078 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfiguration.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfiguration.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.nio.file.Path; import java.security.GeneralSecurityException; +import java.security.KeyStore; import java.util.ArrayList; import java.util.List; import java.util.Objects; @@ -194,9 +195,9 @@ private static KeyConfig createKeyConfig(Settings settings, SSLConfiguration glo if (System.getProperty("javax.net.ssl.keyStore") != null) { // TODO: we should not support loading a keystore from sysprops... try (SecureString keystorePassword = new SecureString(System.getProperty("javax.net.ssl.keyStorePassword", ""))) { - return new StoreKeyConfig(System.getProperty("javax.net.ssl.keyStore"), "jks", keystorePassword, keystorePassword, - System.getProperty("ssl.KeyManagerFactory.algorithm", KeyManagerFactory.getDefaultAlgorithm()), - System.getProperty("ssl.TrustManagerFactory.algorithm", TrustManagerFactory.getDefaultAlgorithm())); + return new StoreKeyConfig(System.getProperty("javax.net.ssl.keyStore"), KeyStore.getDefaultType(), keystorePassword, + keystorePassword, System.getProperty("ssl.KeyManagerFactory.algorithm", KeyManagerFactory.getDefaultAlgorithm()), + System.getProperty("ssl.TrustManagerFactory.algorithm", TrustManagerFactory.getDefaultAlgorithm())); } } return KeyConfig.NONE; @@ -234,7 +235,7 @@ private static TrustConfig createCertChainTrustConfig(Settings settings, KeyConf return new StoreTrustConfig(trustStorePath, trustStoreType, trustStorePassword, trustStoreAlgorithm); } else if (global == null && System.getProperty("javax.net.ssl.trustStore") != null) { try (SecureString truststorePassword = new SecureString(System.getProperty("javax.net.ssl.trustStorePassword", ""))) { - return new StoreTrustConfig(System.getProperty("javax.net.ssl.trustStore"), "jks", truststorePassword, + return new StoreTrustConfig(System.getProperty("javax.net.ssl.trustStore"), KeyStore.getDefaultType(), truststorePassword, System.getProperty("ssl.TrustManagerFactory.algorithm", TrustManagerFactory.getDefaultAlgorithm())); } } else if (global != null && keyConfig == global.keyConfig()) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetJobStatsActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetJobStatsActionResponseTests.java index ff979a8570aba..86a5b990728f8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetJobStatsActionResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetJobStatsActionResponseTests.java @@ -17,6 +17,8 @@ import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCountsTests; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; +import org.elasticsearch.xpack.core.ml.stats.ForecastStats; +import org.elasticsearch.xpack.core.ml.stats.ForecastStatsTests; import java.net.InetAddress; import java.util.ArrayList; @@ -42,6 +44,12 @@ protected Response createTestInstance() { if (randomBoolean()) { sizeStats = new ModelSizeStats.Builder("foo").build(); } + + ForecastStats forecastStats = null; + if (randomBoolean()) { + forecastStats = new ForecastStatsTests().createTestInstance(); + } + JobState jobState = randomFrom(EnumSet.allOf(JobState.class)); DiscoveryNode node = null; @@ -56,7 +64,8 @@ protected Response createTestInstance() { if (randomBoolean()) { openTime = parseTimeValue(randomPositiveTimeValue(), "open_time-Test"); } - Response.JobStats jobStats = new Response.JobStats(jobId, dataCounts, sizeStats, jobState, node, explanation, openTime); + Response.JobStats jobStats = new Response.JobStats(jobId, dataCounts, sizeStats, forecastStats, jobState, node, explanation, + openTime); jobStatsList.add(jobStats); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutFilterActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutFilterActionRequestTests.java index dfc3f5f37f40c..bed0ab775af12 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutFilterActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutFilterActionRequestTests.java @@ -12,7 +12,7 @@ public class PutFilterActionRequestTests extends AbstractStreamableXContentTestCase { - private final String filterId = randomAlphaOfLengthBetween(1, 20); + private final String filterId = MlFilterTests.randomValidFilterId(); @Override protected Request createTestInstance() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java index c8d8527dc0158..45ba47281a2a1 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java @@ -5,18 +5,23 @@ */ package org.elasticsearch.xpack.core.ml.job.config; +import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.AbstractSerializingTestCase; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import java.util.SortedSet; import java.util.TreeSet; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.startsWith; public class MlFilterTests extends AbstractSerializingTestCase { @@ -30,7 +35,12 @@ protected MlFilter createTestInstance() { } public static MlFilter createRandom() { - return createRandom(randomAlphaOfLengthBetween(1, 20)); + return createRandom(randomValidFilterId()); + } + + public static String randomValidFilterId() { + CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz".toCharArray()); + return generator.ofCodePointsLength(random(), 10, 10); } public static MlFilter createRandom(String filterId) { @@ -58,14 +68,14 @@ protected MlFilter doParseInstance(XContentParser parser) { } public void testNullId() { - NullPointerException ex = expectThrows(NullPointerException.class, () -> MlFilter.builder(null).build()); - assertEquals(MlFilter.ID.getPreferredName() + " must not be null", ex.getMessage()); + Exception ex = expectThrows(IllegalArgumentException.class, () -> MlFilter.builder(null).build()); + assertEquals("[filter_id] must not be null.", ex.getMessage()); } public void testNullItems() { - NullPointerException ex = expectThrows(NullPointerException.class, - () -> MlFilter.builder(randomAlphaOfLength(20)).setItems((SortedSet) null).build()); - assertEquals(MlFilter.ITEMS.getPreferredName() + " must not be null", ex.getMessage()); + Exception ex = expectThrows(IllegalArgumentException.class, + () -> MlFilter.builder(randomValidFilterId()).setItems((SortedSet) null).build()); + assertEquals("[items] must not be null.", ex.getMessage()); } public void testDocumentId() { @@ -89,6 +99,32 @@ public void testLenientParser() throws IOException { } } + public void testInvalidId() { + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> MlFilter.builder("Invalid id").build()); + assertThat(e.getMessage(), startsWith("Invalid filter_id; 'Invalid id' can contain lowercase")); + } + + public void testTooManyItems() { + List items = new ArrayList<>(10001); + for (int i = 0; i < 10001; ++i) { + items.add("item_" + i); + } + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, + () -> MlFilter.builder("huge").setItems(items).build()); + assertThat(e.getMessage(), startsWith("Filter [huge] contains too many items")); + } + + public void testGivenItemsAreMaxAllowed() { + List items = new ArrayList<>(10000); + for (int i = 0; i < 10000; ++i) { + items.add("item_" + i); + } + + MlFilter hugeFilter = MlFilter.builder("huge").setItems(items).build(); + + assertThat(hugeFilter.getItems().size(), equalTo(items.size())); + } + public void testItemsAreSorted() { MlFilter filter = MlFilter.builder("foo").setItems("c", "b", "a").build(); assertThat(filter.getItems(), contains("a", "b", "c")); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/stats/CountAccumulatorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/stats/CountAccumulatorTests.java new file mode 100644 index 0000000000000..4e18a70a3a0a2 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/stats/CountAccumulatorTests.java @@ -0,0 +1,100 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.ml.stats; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; +import org.elasticsearch.search.aggregations.bucket.terms.StringTerms.Bucket; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class CountAccumulatorTests extends AbstractWireSerializingTestCase { + + public void testEmpty() { + CountAccumulator accumulator = new CountAccumulator(); + assertEquals(Collections.emptyMap(), accumulator.asMap()); + } + + public void testAdd() { + CountAccumulator accumulator = new CountAccumulator(); + accumulator.add("a", 22L); + accumulator.add("a", 10L); + accumulator.add("a", 15L); + accumulator.add("a", -12L); + accumulator.add("a", 0L); + + accumulator.add("b", 13L); + accumulator.add("b", 1L); + accumulator.add("b", 40000L); + accumulator.add("b", -2L); + accumulator.add("b", 333L); + + assertEquals(35L, accumulator.asMap().get("a").longValue()); + assertEquals(40345L, accumulator.asMap().get("b").longValue()); + assertEquals(2, accumulator.asMap().size()); + } + + public void testMerge() { + CountAccumulator accumulator = new CountAccumulator(); + accumulator.add("a", 13L); + accumulator.add("b", 42L); + + CountAccumulator accumulator2 = new CountAccumulator(); + accumulator2.add("a", 12L); + accumulator2.add("c", -1L); + + accumulator.merge(accumulator2); + + assertEquals(25L, accumulator.asMap().get("a").longValue()); + assertEquals(42L, accumulator.asMap().get("b").longValue()); + assertEquals(-1L, accumulator.asMap().get("c").longValue()); + assertEquals(3, accumulator.asMap().size()); + } + + public void testFromTermsAggregation() { + StringTerms termsAggregation = mock(StringTerms.class); + + Bucket bucket1 = mock(Bucket.class); + when(bucket1.getKeyAsString()).thenReturn("a"); + when(bucket1.getDocCount()).thenReturn(10L); + + Bucket bucket2 = mock(Bucket.class); + when(bucket2.getKeyAsString()).thenReturn("b"); + when(bucket2.getDocCount()).thenReturn(33L); + + List buckets = Arrays.asList(bucket1, bucket2); + when(termsAggregation.getBuckets()).thenReturn(buckets); + + CountAccumulator accumulator = CountAccumulator.fromTermsAggregation(termsAggregation); + + assertEquals(10L, accumulator.asMap().get("a").longValue()); + assertEquals(33L, accumulator.asMap().get("b").longValue()); + assertEquals(2, accumulator.asMap().size()); + } + + @Override + public CountAccumulator createTestInstance() { + CountAccumulator accumulator = new CountAccumulator(); + for (int i = 0; i < randomInt(10); ++i) { + accumulator.add(randomAlphaOfLengthBetween(1, 20), randomLongBetween(1L, 100L)); + } + + return accumulator; + } + + @Override + protected Reader instanceReader() { + return CountAccumulator::new; + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/stats/ForecastStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/stats/ForecastStatsTests.java new file mode 100644 index 0000000000000..f7f5d16c5e578 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/stats/ForecastStatsTests.java @@ -0,0 +1,254 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.ml.stats; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.ml.stats.ForecastStats; +import org.elasticsearch.xpack.core.ml.stats.ForecastStats.Fields; + +import java.io.IOException; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class ForecastStatsTests extends AbstractWireSerializingTestCase { + + public void testEmpty() throws IOException { + ForecastStats forecastStats = new ForecastStats(); + + XContentBuilder builder = JsonXContent.contentBuilder(); + forecastStats.toXContent(builder, ToXContent.EMPTY_PARAMS); + + XContentParser parser = createParser(builder); + Map properties = parser.map(); + assertTrue(properties.containsKey(Fields.TOTAL)); + assertTrue(properties.containsKey(Fields.FORECASTED_JOBS)); + assertFalse(properties.containsKey(Fields.MEMORY)); + assertFalse(properties.containsKey(Fields.RECORDS)); + assertFalse(properties.containsKey(Fields.RUNTIME)); + assertFalse(properties.containsKey(Fields.STATUSES)); + } + + public void testMerge() { + StatsAccumulator memoryStats = new StatsAccumulator(); + memoryStats.add(1000); + memoryStats.add(45000); + memoryStats.add(2300); + + StatsAccumulator recordStats = new StatsAccumulator(); + recordStats.add(10); + recordStats.add(0); + recordStats.add(20); + + StatsAccumulator runtimeStats = new StatsAccumulator(); + runtimeStats.add(0); + runtimeStats.add(0); + runtimeStats.add(10); + + CountAccumulator statusStats = new CountAccumulator(); + statusStats.add("finished", 2L); + statusStats.add("failed", 5L); + + ForecastStats forecastStats = new ForecastStats(3, memoryStats, recordStats, runtimeStats, statusStats); + + StatsAccumulator memoryStats2 = new StatsAccumulator(); + memoryStats2.add(10); + memoryStats2.add(30); + + StatsAccumulator recordStats2 = new StatsAccumulator(); + recordStats2.add(10); + recordStats2.add(0); + + StatsAccumulator runtimeStats2 = new StatsAccumulator(); + runtimeStats2.add(96); + runtimeStats2.add(0); + + CountAccumulator statusStats2 = new CountAccumulator(); + statusStats2.add("finished", 2L); + statusStats2.add("scheduled", 1L); + + ForecastStats forecastStats2 = new ForecastStats(2, memoryStats2, recordStats2, runtimeStats2, statusStats2); + + forecastStats.merge(forecastStats2); + + Map mergedStats = forecastStats.asMap(); + + assertEquals(2L, mergedStats.get(Fields.FORECASTED_JOBS)); + assertEquals(5L, mergedStats.get(Fields.TOTAL)); + + @SuppressWarnings("unchecked") + Map mergedMemoryStats = (Map) mergedStats.get(Fields.MEMORY); + + assertTrue(mergedMemoryStats != null); + assertThat(mergedMemoryStats.get(StatsAccumulator.Fields.AVG), equalTo(9668.0)); + assertThat(mergedMemoryStats.get(StatsAccumulator.Fields.MAX), equalTo(45000.0)); + assertThat(mergedMemoryStats.get(StatsAccumulator.Fields.MIN), equalTo(10.0)); + + @SuppressWarnings("unchecked") + Map mergedRecordStats = (Map) mergedStats.get(Fields.RECORDS); + + assertTrue(mergedRecordStats != null); + assertThat(mergedRecordStats.get(StatsAccumulator.Fields.AVG), equalTo(8.0)); + assertThat(mergedRecordStats.get(StatsAccumulator.Fields.MAX), equalTo(20.0)); + assertThat(mergedRecordStats.get(StatsAccumulator.Fields.MIN), equalTo(0.0)); + + @SuppressWarnings("unchecked") + Map mergedRuntimeStats = (Map) mergedStats.get(Fields.RUNTIME); + + assertTrue(mergedRuntimeStats != null); + assertThat(mergedRuntimeStats.get(StatsAccumulator.Fields.AVG), equalTo(21.2)); + assertThat(mergedRuntimeStats.get(StatsAccumulator.Fields.MAX), equalTo(96.0)); + assertThat(mergedRuntimeStats.get(StatsAccumulator.Fields.MIN), equalTo(0.0)); + + @SuppressWarnings("unchecked") + Map mergedCountStats = (Map) mergedStats.get(Fields.STATUSES); + + assertTrue(mergedCountStats != null); + assertEquals(3, mergedCountStats.size()); + assertEquals(4, mergedCountStats.get("finished").longValue()); + assertEquals(5, mergedCountStats.get("failed").longValue()); + assertEquals(1, mergedCountStats.get("scheduled").longValue()); + } + + public void testChainedMerge() { + StatsAccumulator memoryStats = new StatsAccumulator(); + memoryStats.add(1000); + memoryStats.add(45000); + memoryStats.add(2300); + StatsAccumulator recordStats = new StatsAccumulator(); + recordStats.add(10); + recordStats.add(0); + recordStats.add(20); + StatsAccumulator runtimeStats = new StatsAccumulator(); + runtimeStats.add(0); + runtimeStats.add(0); + runtimeStats.add(10); + CountAccumulator statusStats = new CountAccumulator(); + statusStats.add("finished", 2L); + statusStats.add("failed", 5L); + ForecastStats forecastStats = new ForecastStats(3, memoryStats, recordStats, runtimeStats, statusStats); + + StatsAccumulator memoryStats2 = new StatsAccumulator(); + memoryStats2.add(10); + memoryStats2.add(30); + StatsAccumulator recordStats2 = new StatsAccumulator(); + recordStats2.add(10); + recordStats2.add(0); + StatsAccumulator runtimeStats2 = new StatsAccumulator(); + runtimeStats2.add(96); + runtimeStats2.add(0); + CountAccumulator statusStats2 = new CountAccumulator(); + statusStats2.add("finished", 2L); + statusStats2.add("scheduled", 1L); + ForecastStats forecastStats2 = new ForecastStats(2, memoryStats2, recordStats2, runtimeStats2, statusStats2); + + StatsAccumulator memoryStats3 = new StatsAccumulator(); + memoryStats3.add(500); + StatsAccumulator recordStats3 = new StatsAccumulator(); + recordStats3.add(50); + StatsAccumulator runtimeStats3 = new StatsAccumulator(); + runtimeStats3.add(32); + CountAccumulator statusStats3 = new CountAccumulator(); + statusStats3.add("finished", 1L); + ForecastStats forecastStats3 = new ForecastStats(1, memoryStats3, recordStats3, runtimeStats3, statusStats3); + + ForecastStats forecastStats4 = new ForecastStats(); + + // merge 4 into 3 + forecastStats3.merge(forecastStats4); + + // merge 3 into 2 + forecastStats2.merge(forecastStats3); + + // merger 2 into 1 + forecastStats.merge(forecastStats2); + + Map mergedStats = forecastStats.asMap(); + + assertEquals(3L, mergedStats.get(Fields.FORECASTED_JOBS)); + assertEquals(6L, mergedStats.get(Fields.TOTAL)); + + @SuppressWarnings("unchecked") + Map mergedMemoryStats = (Map) mergedStats.get(Fields.MEMORY); + + assertTrue(mergedMemoryStats != null); + assertThat(mergedMemoryStats.get(StatsAccumulator.Fields.AVG), equalTo(8140.0)); + assertThat(mergedMemoryStats.get(StatsAccumulator.Fields.MAX), equalTo(45000.0)); + assertThat(mergedMemoryStats.get(StatsAccumulator.Fields.MIN), equalTo(10.0)); + + @SuppressWarnings("unchecked") + Map mergedRecordStats = (Map) mergedStats.get(Fields.RECORDS); + + assertTrue(mergedRecordStats != null); + assertThat(mergedRecordStats.get(StatsAccumulator.Fields.AVG), equalTo(15.0)); + assertThat(mergedRecordStats.get(StatsAccumulator.Fields.MAX), equalTo(50.0)); + assertThat(mergedRecordStats.get(StatsAccumulator.Fields.MIN), equalTo(0.0)); + + @SuppressWarnings("unchecked") + Map mergedRuntimeStats = (Map) mergedStats.get(Fields.RUNTIME); + + assertTrue(mergedRuntimeStats != null); + assertThat(mergedRuntimeStats.get(StatsAccumulator.Fields.AVG), equalTo(23.0)); + assertThat(mergedRuntimeStats.get(StatsAccumulator.Fields.MAX), equalTo(96.0)); + assertThat(mergedRuntimeStats.get(StatsAccumulator.Fields.MIN), equalTo(0.0)); + + @SuppressWarnings("unchecked") + Map mergedCountStats = (Map) mergedStats.get(Fields.STATUSES); + + assertTrue(mergedCountStats != null); + assertEquals(3, mergedCountStats.size()); + assertEquals(5, mergedCountStats.get("finished").longValue()); + assertEquals(5, mergedCountStats.get("failed").longValue()); + assertEquals(1, mergedCountStats.get("scheduled").longValue()); + } + + public void testUniqueCountOfJobs() { + ForecastStats forecastStats = createForecastStats(5, 10); + ForecastStats forecastStats2 = createForecastStats(2, 8); + ForecastStats forecastStats3 = createForecastStats(0, 0); + ForecastStats forecastStats4 = createForecastStats(0, 0); + ForecastStats forecastStats5 = createForecastStats(1, 12); + + forecastStats.merge(forecastStats2); + forecastStats.merge(forecastStats3); + forecastStats.merge(forecastStats4); + forecastStats.merge(forecastStats5); + + assertEquals(3L, forecastStats.asMap().get(Fields.FORECASTED_JOBS)); + } + + @Override + public ForecastStats createTestInstance() { + return createForecastStats(1, 22); + } + + @Override + protected Reader instanceReader() { + return ForecastStats::new; + } + + public ForecastStats createForecastStats(long minTotal, long maxTotal) { + ForecastStats forecastStats = new ForecastStats(randomLongBetween(minTotal, maxTotal), createStatsAccumulator(), + createStatsAccumulator(), createStatsAccumulator(), createCountAccumulator()); + + return forecastStats; + } + + private StatsAccumulator createStatsAccumulator() { + return new StatsAccumulatorTests().createTestInstance(); + } + + private CountAccumulator createCountAccumulator() { + return new CountAccumulatorTests().createTestInstance(); + + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/stats/StatsAccumulatorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/stats/StatsAccumulatorTests.java new file mode 100644 index 0000000000000..bd2df0823ae17 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/stats/StatsAccumulatorTests.java @@ -0,0 +1,160 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.stats; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.search.aggregations.metrics.stats.Stats; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class StatsAccumulatorTests extends AbstractWireSerializingTestCase { + + public void testGivenNoValues() { + StatsAccumulator accumulator = new StatsAccumulator(); + assertThat(accumulator.getMin(), equalTo(0.0)); + assertThat(accumulator.getMax(), equalTo(0.0)); + assertThat(accumulator.getTotal(), equalTo(0.0)); + assertThat(accumulator.getAvg(), equalTo(0.0)); + } + + public void testGivenPositiveValues() { + StatsAccumulator accumulator = new StatsAccumulator(); + + for (int i = 1; i <= 10; i++) { + accumulator.add(i); + } + + assertThat(accumulator.getMin(), equalTo(1.0)); + assertThat(accumulator.getMax(), equalTo(10.0)); + assertThat(accumulator.getTotal(), equalTo(55.0)); + assertThat(accumulator.getAvg(), equalTo(5.5)); + } + + public void testGivenNegativeValues() { + StatsAccumulator accumulator = new StatsAccumulator(); + + for (int i = 1; i <= 10; i++) { + accumulator.add(-1 * i); + } + + assertThat(accumulator.getMin(), equalTo(-10.0)); + assertThat(accumulator.getMax(), equalTo(-1.0)); + assertThat(accumulator.getTotal(), equalTo(-55.0)); + assertThat(accumulator.getAvg(), equalTo(-5.5)); + } + + public void testAsMap() { + StatsAccumulator accumulator = new StatsAccumulator(); + accumulator.add(5.0); + accumulator.add(10.0); + + Map expectedMap = new HashMap<>(); + expectedMap.put("min", 5.0); + expectedMap.put("max", 10.0); + expectedMap.put("avg", 7.5); + expectedMap.put("total", 15.0); + assertThat(accumulator.asMap(), equalTo(expectedMap)); + } + + public void testMerge() { + StatsAccumulator accumulator = new StatsAccumulator(); + accumulator.add(5.0); + accumulator.add(10.0); + + assertThat(accumulator.getMin(), equalTo(5.0)); + assertThat(accumulator.getMax(), equalTo(10.0)); + assertThat(accumulator.getTotal(), equalTo(15.0)); + assertThat(accumulator.getAvg(), equalTo(7.5)); + + StatsAccumulator accumulator2 = new StatsAccumulator(); + accumulator2.add(1.0); + accumulator2.add(3.0); + accumulator2.add(7.0); + + assertThat(accumulator2.getMin(), equalTo(1.0)); + assertThat(accumulator2.getMax(), equalTo(7.0)); + assertThat(accumulator2.getTotal(), equalTo(11.0)); + assertThat(accumulator2.getAvg(), equalTo(11.0 / 3.0)); + + accumulator.merge(accumulator2); + assertThat(accumulator.getMin(), equalTo(1.0)); + assertThat(accumulator.getMax(), equalTo(10.0)); + assertThat(accumulator.getTotal(), equalTo(26.0)); + assertThat(accumulator.getAvg(), equalTo(5.2)); + + // same as accumulator + StatsAccumulator accumulator3 = new StatsAccumulator(); + accumulator3.add(5.0); + accumulator3.add(10.0); + + // merging the other way should yield the same results + accumulator2.merge(accumulator3); + assertThat(accumulator2.getMin(), equalTo(1.0)); + assertThat(accumulator2.getMax(), equalTo(10.0)); + assertThat(accumulator2.getTotal(), equalTo(26.0)); + assertThat(accumulator2.getAvg(), equalTo(5.2)); + } + + public void testMergeMixedEmpty() { + StatsAccumulator accumulator = new StatsAccumulator(); + + StatsAccumulator accumulator2 = new StatsAccumulator(); + accumulator2.add(1.0); + accumulator2.add(3.0); + accumulator.merge(accumulator2); + assertThat(accumulator.getMin(), equalTo(1.0)); + assertThat(accumulator.getMax(), equalTo(3.0)); + assertThat(accumulator.getTotal(), equalTo(4.0)); + + StatsAccumulator accumulator3 = new StatsAccumulator(); + accumulator.merge(accumulator3); + assertThat(accumulator.getMin(), equalTo(1.0)); + assertThat(accumulator.getMax(), equalTo(3.0)); + assertThat(accumulator.getTotal(), equalTo(4.0)); + + StatsAccumulator accumulator4 = new StatsAccumulator(); + accumulator3.merge(accumulator4); + + assertThat(accumulator3.getMin(), equalTo(0.0)); + assertThat(accumulator3.getMax(), equalTo(0.0)); + assertThat(accumulator3.getTotal(), equalTo(0.0)); + } + + public void testFromStatsAggregation() { + Stats stats = mock(Stats.class); + when(stats.getMax()).thenReturn(25.0); + when(stats.getMin()).thenReturn(2.5); + when(stats.getCount()).thenReturn(4L); + when(stats.getSum()).thenReturn(48.0); + when(stats.getAvg()).thenReturn(12.0); + + StatsAccumulator accumulator = StatsAccumulator.fromStatsAggregation(stats); + assertThat(accumulator.getMin(), equalTo(2.5)); + assertThat(accumulator.getMax(), equalTo(25.0)); + assertThat(accumulator.getTotal(), equalTo(48.0)); + assertThat(accumulator.getAvg(), equalTo(12.0)); + } + + @Override + public StatsAccumulator createTestInstance() { + StatsAccumulator accumulator = new StatsAccumulator(); + for (int i = 0; i < randomInt(10); ++i) { + accumulator.add(randomDoubleBetween(0.0, 1000.0, true)); + } + + return accumulator; + } + + @Override + protected Reader instanceReader() { + return StatsAccumulator::new; + } +} \ No newline at end of file diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java index 63a5be610433b..0946ad3ac51c0 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java @@ -194,7 +194,8 @@ public void testReloadingTrustStore() throws Exception { Path trustStorePath = tempDir.resolve("testnode.jks"); Path updatedTruststorePath = tempDir.resolve("testnode_updated.jks"); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"), trustStorePath); - Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.jks"), updatedTruststorePath); + Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.jks"), + updatedTruststorePath); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); Settings settings = Settings.builder() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSet.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSet.java index 121c649718559..de3d54f327129 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSet.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSet.java @@ -34,7 +34,8 @@ import org.elasticsearch.xpack.ml.job.process.NativeController; import org.elasticsearch.xpack.ml.job.process.NativeControllerHolder; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; -import org.elasticsearch.xpack.ml.utils.StatsAccumulator; +import org.elasticsearch.xpack.core.ml.stats.ForecastStats; +import org.elasticsearch.xpack.core.ml.stats.StatsAccumulator; import java.io.IOException; import java.util.Arrays; @@ -194,10 +195,12 @@ public void execute(ActionListener listener) { private void addJobsUsage(GetJobsStatsAction.Response response) { StatsAccumulator allJobsDetectorsStats = new StatsAccumulator(); StatsAccumulator allJobsModelSizeStats = new StatsAccumulator(); + ForecastStats allJobsForecastStats = new ForecastStats(); Map jobCountByState = new HashMap<>(); Map detectorStatsByState = new HashMap<>(); Map modelSizeStatsByState = new HashMap<>(); + Map forecastStatsByState = new HashMap<>(); Map jobs = mlMetadata.getJobs(); List jobsStats = response.getResponse().results(); @@ -208,6 +211,7 @@ private void addJobsUsage(GetJobsStatsAction.Response response) { double modelSize = modelSizeStats == null ? 0.0 : jobStats.getModelSizeStats().getModelBytes(); + allJobsForecastStats.merge(jobStats.getForecastStats()); allJobsDetectorsStats.add(detectorsCount); allJobsModelSizeStats.add(modelSize); @@ -217,24 +221,28 @@ private void addJobsUsage(GetJobsStatsAction.Response response) { js -> new StatsAccumulator()).add(detectorsCount); modelSizeStatsByState.computeIfAbsent(jobState, js -> new StatsAccumulator()).add(modelSize); + forecastStatsByState.merge(jobState, jobStats.getForecastStats(), (f1, f2) -> f1.merge(f2)); } jobsUsage.put(MachineLearningFeatureSetUsage.ALL, createJobUsageEntry(jobs.size(), allJobsDetectorsStats, - allJobsModelSizeStats)); + allJobsModelSizeStats, allJobsForecastStats)); for (JobState jobState : jobCountByState.keySet()) { jobsUsage.put(jobState.name().toLowerCase(Locale.ROOT), createJobUsageEntry( jobCountByState.get(jobState).get(), detectorStatsByState.get(jobState), - modelSizeStatsByState.get(jobState))); + modelSizeStatsByState.get(jobState), + forecastStatsByState.get(jobState))); } } private Map createJobUsageEntry(long count, StatsAccumulator detectorStats, - StatsAccumulator modelSizeStats) { + StatsAccumulator modelSizeStats, + ForecastStats forecastStats) { Map usage = new HashMap<>(); usage.put(MachineLearningFeatureSetUsage.COUNT, count); usage.put(MachineLearningFeatureSetUsage.DETECTORS, detectorStats.asMap()); usage.put(MachineLearningFeatureSetUsage.MODEL_SIZE, modelSizeStats.asMap()); + usage.put(MachineLearningFeatureSetUsage.FORECASTS, forecastStats.asMap()); return usage; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java index 78bfe2c7bc6b0..fb40f3aee2f8c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; +import org.elasticsearch.xpack.core.ml.stats.ForecastStats; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; @@ -108,9 +109,12 @@ protected void taskOperation(GetJobsStatsAction.Request request, TransportOpenJo JobState jobState = MlMetadata.getJobState(jobId, tasks); String assignmentExplanation = pTask.getAssignment().getExplanation(); TimeValue openTime = durationToTimeValue(processManager.jobOpenTime(task)); - GetJobsStatsAction.Response.JobStats jobStats = new GetJobsStatsAction.Response.JobStats(jobId, stats.get().v1(), - stats.get().v2(), jobState, node, assignmentExplanation, openTime); - listener.onResponse(new QueryPage<>(Collections.singletonList(jobStats), 1, Job.RESULTS_FIELD)); + gatherForecastStats(jobId, forecastStats -> { + GetJobsStatsAction.Response.JobStats jobStats = new GetJobsStatsAction.Response.JobStats(jobId, stats.get().v1(), + stats.get().v2(), forecastStats, jobState, node, assignmentExplanation, openTime); + listener.onResponse(new QueryPage<>(Collections.singletonList(jobStats), 1, Job.RESULTS_FIELD)); + }, listener::onFailure); + } else { listener.onResponse(new QueryPage<>(Collections.emptyList(), 0, Job.RESULTS_FIELD)); } @@ -133,25 +137,31 @@ void gatherStatsForClosedJobs(MlMetadata mlMetadata, GetJobsStatsAction.Request for (int i = 0; i < jobIds.size(); i++) { int slot = i; String jobId = jobIds.get(i); - gatherDataCountsAndModelSizeStats(jobId, (dataCounts, modelSizeStats) -> { - JobState jobState = MlMetadata.getJobState(jobId, tasks); - PersistentTasksCustomMetaData.PersistentTask pTask = MlMetadata.getJobTask(jobId, tasks); - String assignmentExplanation = null; - if (pTask != null) { - assignmentExplanation = pTask.getAssignment().getExplanation(); - } - jobStats.set(slot, new GetJobsStatsAction.Response.JobStats(jobId, dataCounts, modelSizeStats, jobState, null, - assignmentExplanation, null)); - if (counter.decrementAndGet() == 0) { - List results = response.getResponse().results(); - results.addAll(jobStats.asList()); - listener.onResponse(new GetJobsStatsAction.Response(response.getTaskFailures(), response.getNodeFailures(), - new QueryPage<>(results, results.size(), Job.RESULTS_FIELD))); - } + gatherForecastStats(jobId, forecastStats -> { + gatherDataCountsAndModelSizeStats(jobId, (dataCounts, modelSizeStats) -> { + JobState jobState = MlMetadata.getJobState(jobId, tasks); + PersistentTasksCustomMetaData.PersistentTask pTask = MlMetadata.getJobTask(jobId, tasks); + String assignmentExplanation = null; + if (pTask != null) { + assignmentExplanation = pTask.getAssignment().getExplanation(); + } + jobStats.set(slot, new GetJobsStatsAction.Response.JobStats(jobId, dataCounts, modelSizeStats, forecastStats, jobState, + null, assignmentExplanation, null)); + if (counter.decrementAndGet() == 0) { + List results = response.getResponse().results(); + results.addAll(jobStats.asList()); + listener.onResponse(new GetJobsStatsAction.Response(response.getTaskFailures(), response.getNodeFailures(), + new QueryPage<>(results, results.size(), Job.RESULTS_FIELD))); + } + }, listener::onFailure); }, listener::onFailure); } } + void gatherForecastStats(String jobId, Consumer handler, Consumer errorHandler) { + jobProvider.getForecastStats(jobId, handler, errorHandler); + } + void gatherDataCountsAndModelSizeStats(String jobId, BiConsumer handler, Consumer errorHandler) { jobProvider.dataCounts(jobId, dataCounts -> { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java index 578ddd1efc78a..7513cb5a5bbc0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java @@ -63,6 +63,9 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; +import org.elasticsearch.search.aggregations.metrics.stats.Stats; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; @@ -93,6 +96,9 @@ import org.elasticsearch.xpack.core.ml.job.results.Influencer; import org.elasticsearch.xpack.core.ml.job.results.ModelPlot; import org.elasticsearch.xpack.core.ml.job.results.Result; +import org.elasticsearch.xpack.core.ml.stats.CountAccumulator; +import org.elasticsearch.xpack.core.ml.stats.ForecastStats; +import org.elasticsearch.xpack.core.ml.stats.StatsAccumulator; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.MlIndicesUtils; import org.elasticsearch.xpack.core.security.support.Exceptions; @@ -1112,6 +1118,53 @@ public void getForecastRequestStats(String jobId, String forecastId, Consumer handler.accept(result.result), errorHandler, () -> null); } + public void getForecastStats(String jobId, Consumer handler, Consumer errorHandler) { + String indexName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); + + QueryBuilder termQuery = new TermsQueryBuilder(Result.RESULT_TYPE.getPreferredName(), ForecastRequestStats.RESULT_TYPE_VALUE); + QueryBuilder jobQuery = new TermsQueryBuilder(Job.ID.getPreferredName(), jobId); + QueryBuilder finalQuery = new BoolQueryBuilder().filter(termQuery).filter(jobQuery); + + SearchRequest searchRequest = new SearchRequest(indexName); + searchRequest.indicesOptions(MlIndicesUtils.addIgnoreUnavailable(searchRequest.indicesOptions())); + SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); + sourceBuilder.query(finalQuery); + sourceBuilder.aggregation( + AggregationBuilders.stats(ForecastStats.Fields.MEMORY).field(ForecastRequestStats.MEMORY_USAGE.getPreferredName())); + sourceBuilder.aggregation(AggregationBuilders.stats(ForecastStats.Fields.RECORDS) + .field(ForecastRequestStats.PROCESSED_RECORD_COUNT.getPreferredName())); + sourceBuilder.aggregation( + AggregationBuilders.stats(ForecastStats.Fields.RUNTIME).field(ForecastRequestStats.PROCESSING_TIME_MS.getPreferredName())); + sourceBuilder.aggregation( + AggregationBuilders.terms(ForecastStats.Fields.STATUSES).field(ForecastRequestStats.STATUS.getPreferredName())); + sourceBuilder.size(0); + + searchRequest.source(sourceBuilder); + + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, searchRequest, + ActionListener.wrap(searchResponse -> { + long totalHits = searchResponse.getHits().getTotalHits(); + Aggregations aggregations = searchResponse.getAggregations(); + if (totalHits == 0 || aggregations == null) { + handler.accept(new ForecastStats()); + return; + } + Map aggregationsAsMap = aggregations.asMap(); + StatsAccumulator memoryStats = StatsAccumulator + .fromStatsAggregation((Stats) aggregationsAsMap.get(ForecastStats.Fields.MEMORY)); + StatsAccumulator recordStats = StatsAccumulator + .fromStatsAggregation((Stats) aggregationsAsMap.get(ForecastStats.Fields.RECORDS)); + StatsAccumulator runtimeStats = StatsAccumulator + .fromStatsAggregation((Stats) aggregationsAsMap.get(ForecastStats.Fields.RUNTIME)); + CountAccumulator statusCount = CountAccumulator + .fromTermsAggregation((StringTerms) aggregationsAsMap.get(ForecastStats.Fields.STATUSES)); + + ForecastStats forecastStats = new ForecastStats(totalHits, memoryStats, recordStats, runtimeStats, statusCount); + handler.accept(forecastStats); + }, errorHandler), client::search); + + } + public void updateCalendar(String calendarId, Set jobIdsToAdd, Set jobIdsToRemove, Consumer handler, Consumer errorHandler) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/StatsAccumulator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/StatsAccumulator.java deleted file mode 100644 index 1f1df147d80a1..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/StatsAccumulator.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.utils; - -import java.util.HashMap; -import java.util.Map; - -/** - * Helper class to collect min, max, avg and total statistics for a quantity - */ -public class StatsAccumulator { - - private static final String MIN = "min"; - private static final String MAX = "max"; - private static final String AVG = "avg"; - private static final String TOTAL = "total"; - - private long count; - private double total; - private Double min; - private Double max; - - public void add(double value) { - count++; - total += value; - min = min == null ? value : (value < min ? value : min); - max = max == null ? value : (value > max ? value : max); - } - - public double getMin() { - return min == null ? 0.0 : min; - } - - public double getMax() { - return max == null ? 0.0 : max; - } - - public double getAvg() { - return count == 0.0 ? 0.0 : total/count; - } - - public double getTotal() { - return total; - } - - public Map asMap() { - Map map = new HashMap<>(); - map.put(MIN, getMin()); - map.put(MAX, getMax()); - map.put(AVG, getAvg()); - map.put(TOTAL, getTotal()); - return map; - } -} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java index eba2054054c0d..5893a863fe38f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningFeatureSetTests.java @@ -39,6 +39,8 @@ import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; +import org.elasticsearch.xpack.core.ml.stats.ForecastStats; +import org.elasticsearch.xpack.core.ml.stats.ForecastStatsTests; import org.elasticsearch.xpack.core.watcher.support.xcontent.XContentSource; import org.junit.Before; @@ -138,11 +140,11 @@ public void testUsage() throws Exception { settings.put("xpack.ml.enabled", true); Job opened1 = buildJob("opened1", Arrays.asList(buildMinDetector("foo"))); - GetJobsStatsAction.Response.JobStats opened1JobStats = buildJobStats("opened1", JobState.OPENED, 100L); + GetJobsStatsAction.Response.JobStats opened1JobStats = buildJobStats("opened1", JobState.OPENED, 100L, 3L); Job opened2 = buildJob("opened2", Arrays.asList(buildMinDetector("foo"), buildMinDetector("bar"))); - GetJobsStatsAction.Response.JobStats opened2JobStats = buildJobStats("opened2", JobState.OPENED, 200L); + GetJobsStatsAction.Response.JobStats opened2JobStats = buildJobStats("opened2", JobState.OPENED, 200L, 8L); Job closed1 = buildJob("closed1", Arrays.asList(buildMinDetector("foo"), buildMinDetector("bar"), buildMinDetector("foobar"))); - GetJobsStatsAction.Response.JobStats closed1JobStats = buildJobStats("closed1", JobState.CLOSED, 300L); + GetJobsStatsAction.Response.JobStats closed1JobStats = buildJobStats("closed1", JobState.CLOSED, 300L, 0); givenJobs(Arrays.asList(opened1, opened2, closed1), Arrays.asList(opened1JobStats, opened2JobStats, closed1JobStats)); @@ -210,6 +212,15 @@ public void testUsage() throws Exception { assertThat(source.getValue("datafeeds._all.count"), equalTo(3)); assertThat(source.getValue("datafeeds.started.count"), equalTo(2)); assertThat(source.getValue("datafeeds.stopped.count"), equalTo(1)); + + assertThat(source.getValue("jobs._all.forecasts.total"), equalTo(11)); + assertThat(source.getValue("jobs._all.forecasts.forecasted_jobs"), equalTo(2)); + + assertThat(source.getValue("jobs.closed.forecasts.total"), equalTo(0)); + assertThat(source.getValue("jobs.closed.forecasts.forecasted_jobs"), equalTo(0)); + + assertThat(source.getValue("jobs.opened.forecasts.total"), equalTo(11)); + assertThat(source.getValue("jobs.opened.forecasts.forecasted_jobs"), equalTo(2)); } } @@ -301,12 +312,16 @@ private static Job buildJob(String jobId, List detectors) { .build(new Date(randomNonNegativeLong())); } - private static GetJobsStatsAction.Response.JobStats buildJobStats(String jobId, JobState state, long modelBytes) { + private static GetJobsStatsAction.Response.JobStats buildJobStats(String jobId, JobState state, long modelBytes, + long numberOfForecasts) { ModelSizeStats.Builder modelSizeStats = new ModelSizeStats.Builder(jobId); modelSizeStats.setModelBytes(modelBytes); GetJobsStatsAction.Response.JobStats jobStats = mock(GetJobsStatsAction.Response.JobStats.class); + ForecastStats forecastStats = buildForecastStats(numberOfForecasts); + when(jobStats.getJobId()).thenReturn(jobId); when(jobStats.getModelSizeStats()).thenReturn(modelSizeStats.build()); + when(jobStats.getForecastStats()).thenReturn(forecastStats); when(jobStats.getState()).thenReturn(state); return jobStats; } @@ -316,4 +331,8 @@ private static GetDatafeedsStatsAction.Response.DatafeedStats buildDatafeedStats when(stats.getDatafeedState()).thenReturn(state); return stats; } + + private static ForecastStats buildForecastStats(long numberOfForecasts) { + return new ForecastStatsTests().createForecastStats(numberOfForecasts, numberOfForecasts); + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsActionTests.java index 40bc82c6048c7..2e00ad71251db 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsActionTests.java @@ -37,7 +37,7 @@ public void testDetermineJobIds() { result = determineNonDeletedJobIdsWithoutLiveStats(mlMetadata, Collections.singletonList("id1"), Collections.singletonList( - new GetJobsStatsAction.Response.JobStats("id1", new DataCounts("id1"), null, JobState.OPENED, null, null, null))); + new GetJobsStatsAction.Response.JobStats("id1", new DataCounts("id1"), null, null, JobState.OPENED, null, null, null))); assertEquals(0, result.size()); result = determineNonDeletedJobIdsWithoutLiveStats(mlMetadata, @@ -49,7 +49,7 @@ public void testDetermineJobIds() { result = determineNonDeletedJobIdsWithoutLiveStats(mlMetadata, Arrays.asList("id1", "id2", "id3"), - Collections.singletonList(new GetJobsStatsAction.Response.JobStats("id1", new DataCounts("id1"), null, + Collections.singletonList(new GetJobsStatsAction.Response.JobStats("id1", new DataCounts("id1"), null, null, JobState.CLOSED, null, null, null)) ); assertEquals(2, result.size()); @@ -58,17 +58,16 @@ public void testDetermineJobIds() { result = determineNonDeletedJobIdsWithoutLiveStats(mlMetadata, Arrays.asList("id1", "id2", "id3"), Arrays.asList( - new GetJobsStatsAction.Response.JobStats("id1", new DataCounts("id1"), null, JobState.OPENED, null, null, null), - new GetJobsStatsAction.Response.JobStats("id3", new DataCounts("id3"), null, JobState.OPENED, null, null, null) + new GetJobsStatsAction.Response.JobStats("id1", new DataCounts("id1"), null, null, JobState.OPENED, null, null, null), + new GetJobsStatsAction.Response.JobStats("id3", new DataCounts("id3"), null, null, JobState.OPENED, null, null, null) )); assertEquals(1, result.size()); assertEquals("id2", result.get(0)); - result = determineNonDeletedJobIdsWithoutLiveStats(mlMetadata, Arrays.asList("id1", "id2", "id3"), - Arrays.asList( - new GetJobsStatsAction.Response.JobStats("id1", new DataCounts("id1"), null, JobState.OPENED, null, null, null), - new GetJobsStatsAction.Response.JobStats("id2", new DataCounts("id2"), null, JobState.OPENED, null, null, null), - new GetJobsStatsAction.Response.JobStats("id3", new DataCounts("id3"), null, JobState.OPENED, null, null, null))); + result = determineNonDeletedJobIdsWithoutLiveStats(mlMetadata, Arrays.asList("id1", "id2", "id3"), Arrays.asList( + new GetJobsStatsAction.Response.JobStats("id1", new DataCounts("id1"), null, null, JobState.OPENED, null, null, null), + new GetJobsStatsAction.Response.JobStats("id2", new DataCounts("id2"), null, null, JobState.OPENED, null, null, null), + new GetJobsStatsAction.Response.JobStats("id3", new DataCounts("id3"), null, null, JobState.OPENED, null, null, null))); assertEquals(0, result.size()); // No jobs running, but job 4 is being deleted diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/StatsAccumulatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/StatsAccumulatorTests.java deleted file mode 100644 index ae9b6a7360c13..0000000000000 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/StatsAccumulatorTests.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.utils; - -import org.elasticsearch.test.ESTestCase; - -import java.util.HashMap; -import java.util.Map; - -import static org.hamcrest.Matchers.equalTo; - -public class StatsAccumulatorTests extends ESTestCase { - - public void testGivenNoValues() { - StatsAccumulator accumulator = new StatsAccumulator(); - assertThat(accumulator.getMin(), equalTo(0.0)); - assertThat(accumulator.getMax(), equalTo(0.0)); - assertThat(accumulator.getTotal(), equalTo(0.0)); - assertThat(accumulator.getAvg(), equalTo(0.0)); - } - - public void testGivenPositiveValues() { - StatsAccumulator accumulator = new StatsAccumulator(); - - for (int i = 1; i <= 10; i++) { - accumulator.add(i); - } - - assertThat(accumulator.getMin(), equalTo(1.0)); - assertThat(accumulator.getMax(), equalTo(10.0)); - assertThat(accumulator.getTotal(), equalTo(55.0)); - assertThat(accumulator.getAvg(), equalTo(5.5)); - } - - public void testGivenNegativeValues() { - StatsAccumulator accumulator = new StatsAccumulator(); - - for (int i = 1; i <= 10; i++) { - accumulator.add(-1 * i); - } - - assertThat(accumulator.getMin(), equalTo(-10.0)); - assertThat(accumulator.getMax(), equalTo(-1.0)); - assertThat(accumulator.getTotal(), equalTo(-55.0)); - assertThat(accumulator.getAvg(), equalTo(-5.5)); - } - - public void testAsMap() { - StatsAccumulator accumulator = new StatsAccumulator(); - accumulator.add(5.0); - accumulator.add(10.0); - - Map expectedMap = new HashMap<>(); - expectedMap.put("min", 5.0); - expectedMap.put("max", 10.0); - expectedMap.put("avg", 7.5); - expectedMap.put("total", 15.0); - assertThat(accumulator.asMap(), equalTo(expectedMap)); - } -} \ No newline at end of file diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsMonitoringDocTests.java index 88f34c4577c1c..9d37073a426cc 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsMonitoringDocTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; +import org.elasticsearch.xpack.core.ml.stats.ForecastStats; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; import org.elasticsearch.xpack.monitoring.exporter.BaseMonitoringDocTestCase; @@ -100,7 +101,9 @@ public void testToXContent() throws IOException { .build(); final DataCounts dataCounts = new DataCounts("_job_id", 0L, 1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L, date3, date4, date5, date6, date7); - final JobStats jobStats = new JobStats("_job", dataCounts, modelStats, JobState.OPENED, discoveryNode, "_explanation", time); + final ForecastStats forecastStats = new ForecastStats(); + final JobStats jobStats = new JobStats("_job", dataCounts, modelStats, forecastStats, JobState.OPENED, discoveryNode, + "_explanation", time); final MonitoringDoc.Node node = new MonitoringDoc.Node("_uuid", "_host", "_addr", "_ip", "_name", 1504169190855L); final JobStatsMonitoringDoc document = new JobStatsMonitoringDoc("_cluster", 1502266739402L, 1506593717631L, node, jobStats); @@ -152,6 +155,9 @@ public void testToXContent() throws IOException { + "\"log_time\":1483315322002," + "\"timestamp\":1483228861001" + "}," + + "\"forecasts_stats\":{" + + "\"total\":0,\"forecasted_jobs\":0" + + "}," + "\"state\":\"opened\"," + "\"node\":{" + "\"id\":\"_node_id\"," diff --git a/x-pack/plugin/sql/jdbc/build.gradle b/x-pack/plugin/sql/jdbc/build.gradle index ca8d966a031cf..9d27c2030d676 100644 --- a/x-pack/plugin/sql/jdbc/build.gradle +++ b/x-pack/plugin/sql/jdbc/build.gradle @@ -6,7 +6,7 @@ buildscript { } } dependencies { - classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.2' + classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4' } } diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java index 351ac73a88f28..201ae251ca0df 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java @@ -344,7 +344,7 @@ public T getObject(int columnIndex, Class type) throws SQLException { throw new SQLException("type is null"); } - return getObject(columnIndex, type); + return convert(columnIndex, type); } private T convert(int columnIndex, Class type) throws SQLException { diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java index 1e24a03c8b31c..aa9d434f332e3 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java @@ -61,9 +61,9 @@ private TypeConverter() { static { Map, JDBCType> aMap = Arrays.stream(DataType.values()) - .filter(dataType -> dataType.javaClass() != null - && dataType != DataType.HALF_FLOAT - && dataType != DataType.SCALED_FLOAT + .filter(dataType -> dataType.javaClass() != null + && dataType != DataType.HALF_FLOAT + && dataType != DataType.SCALED_FLOAT && dataType != DataType.TEXT) .collect(Collectors.toMap(dataType -> dataType.javaClass(), dataType -> dataType.jdbcType)); // apart from the mappings in {@code DataType} three more Java classes can be mapped to a {@code JDBCType.TIMESTAMP} @@ -254,7 +254,7 @@ static Object convert(Object v, JDBCType columnType) throws SQLException { case REAL: return floatValue(v); // Float might be represented as string for infinity and NaN values case TIMESTAMP: - return ((Number) v).longValue(); + return new Timestamp(((Number) v).longValue()); default: throw new SQLException("Unexpected column type [" + columnType.getName() + "]"); @@ -428,7 +428,7 @@ private static Float asFloat(Object val, JDBCType columnType) throws SQLExceptio case SMALLINT: case INTEGER: case BIGINT: - return Float.valueOf((float) ((Number) val).longValue()); + return Float.valueOf(((Number) val).longValue()); case REAL: case FLOAT: case DOUBLE: @@ -447,7 +447,7 @@ private static Double asDouble(Object val, JDBCType columnType) throws SQLExcept case SMALLINT: case INTEGER: case BIGINT: - return Double.valueOf((double) ((Number) val).longValue()); + return Double.valueOf(((Number) val).longValue()); case REAL: case FLOAT: case DOUBLE: diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverterTests.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverterTests.java index 0182ea63f637d..51c130a39118e 100644 --- a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverterTests.java +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverterTests.java @@ -14,6 +14,7 @@ import org.joda.time.ReadableDateTime; import java.sql.JDBCType; +import java.sql.Timestamp; import static org.hamcrest.Matchers.instanceOf; @@ -41,8 +42,8 @@ public void testDoubleAsNative() throws Exception { public void testTimestampAsNative() throws Exception { DateTime now = DateTime.now(); - assertThat(convertAsNative(now, JDBCType.TIMESTAMP), instanceOf(Long.class)); - assertEquals(now.getMillis(), convertAsNative(now, JDBCType.TIMESTAMP)); + assertThat(convertAsNative(now, JDBCType.TIMESTAMP), instanceOf(Timestamp.class)); + assertEquals(now.getMillis(), ((Timestamp) convertAsNative(now, JDBCType.TIMESTAMP)).getTime()); } private Object convertAsNative(Object value, JDBCType type) throws Exception { diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml index d787e07b8c28c..6e9579a061339 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml @@ -109,6 +109,18 @@ setup: filter_id: "filter-foo" from: 0 size: 1 + +--- +"Test create filter given invalid filter_id": + - do: + catch: bad_request + xpack.ml.put_filter: + filter_id: Invalid + body: > + { + "description": "this id is invalid due to an upper case character" + } + --- "Test create filter api": - do: diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/get_watch/30_with_chain_input.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/get_watch/30_with_chain_input.yml new file mode 100644 index 0000000000000..81a12fe6f7ddb --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/get_watch/30_with_chain_input.yml @@ -0,0 +1,51 @@ +--- +"Test get watch api with chained input and basic auth": + - do: + cluster.health: + wait_for_status: yellow + + - do: + xpack.watcher.put_watch: + id: "my_watch" + body: > + { + "trigger": { + "schedule": { + "cron": "0 0 0 1 * ? 2099" + } + }, + "input": { + "chain": { + "inputs": [ + { + "http": { + "http": { + "request": { + "url" : "http://localhost/", + "auth": { + "basic": { + "username": "Username123", + "password": "Password123" + } + } + } + } + } + } + ] + } + }, + "actions": { + "logging": { + "logging": { + "text": "logging statement here" + } + } + } + } + + - do: + xpack.watcher.get_watch: + id: "my_watch" + - match: { found : true} + - match: { _id: "my_watch" } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/stats/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/stats/10_basic.yml index 554e339687ba4..9844dea9135a3 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/stats/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/stats/10_basic.yml @@ -1,17 +1,11 @@ --- "Test watcher stats output": - - skip: - version: "all" - reason: "@AwaitsFix: https://github.com/elastic/elasticsearch/issues/30298" - do: {xpack.watcher.stats: {}} - match: { "manually_stopped": false } - match: { "stats.0.watcher_state": "started" } --- "Test watcher stats supports emit_stacktraces parameter": - - skip: - version: "all" - reason: "@AwaitsFix: https://github.com/elastic/elasticsearch/issues/30298" - do: xpack.watcher.stats: metric: "all" diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/chain/ChainInput.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/chain/ChainInput.java index 3c62f4d1066d2..1599531429bf5 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/chain/ChainInput.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/chain/ChainInput.java @@ -41,7 +41,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startArray(INPUTS.getPreferredName()); for (Tuple tuple : inputs) { builder.startObject().startObject(tuple.v1()); - builder.field(tuple.v2().type(), tuple.v2()); + builder.field(tuple.v2().type(), tuple.v2(), params); builder.endObject().endObject(); } builder.endArray(); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java index 9870bcd086534..88399d3cb93d8 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java @@ -7,11 +7,14 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.function.BiFunction; @@ -25,7 +28,14 @@ public abstract class NotificationService extends AbstractComponent { private Map accounts; private Account defaultAccount; - public NotificationService(Settings settings, String type) { + public NotificationService(Settings settings, String type, + ClusterSettings clusterSettings, List> pluginSettings) { + this(settings, type); + clusterSettings.addSettingsUpdateConsumer(this::setAccountSetting, pluginSettings); + } + + // Used for testing only + NotificationService(Settings settings, String type) { super(settings); this.type = type; } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java index 41a2ecc3bcc80..e0687ee5d6316 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java @@ -94,9 +94,8 @@ public class EmailService extends NotificationService { private final CryptoService cryptoService; public EmailService(Settings settings, @Nullable CryptoService cryptoService, ClusterSettings clusterSettings) { - super(settings, "email"); + super(settings, "email", clusterSettings, EmailService.getSettings()); this.cryptoService = cryptoService; - clusterSettings.addSettingsUpdateConsumer(this::setAccountSetting, getSettings()); // ensure logging of setting changes clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_ACCOUNT, (s) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_PROFILE, (s, o) -> {}, (s, o) -> {}); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java index ae6bd05285bf1..b7473b10afeb7 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java @@ -64,9 +64,8 @@ public class HipChatService extends NotificationService { private HipChatServer defaultServer; public HipChatService(Settings settings, HttpClient httpClient, ClusterSettings clusterSettings) { - super(settings, "hipchat"); + super(settings, "hipchat", clusterSettings, HipChatService.getSettings()); this.httpClient = httpClient; - clusterSettings.addSettingsUpdateConsumer(this::setAccountSetting, getSettings()); // ensure logging of setting changes clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_ACCOUNT, (s) -> {}); clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_HOST, (s) -> {}); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java index 84dbccd18d402..b06baba23367c 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java @@ -60,9 +60,8 @@ public class JiraService extends NotificationService { private final HttpClient httpClient; public JiraService(Settings settings, HttpClient httpClient, ClusterSettings clusterSettings) { - super(settings, "jira"); + super(settings, "jira", clusterSettings, JiraService.getSettings()); this.httpClient = httpClient; - clusterSettings.addSettingsUpdateConsumer(this::setAccountSetting, getSettings()); // ensure logging of setting changes clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_ACCOUNT, (s) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_ALLOW_HTTP, (s, o) -> {}, (s, o) -> {}); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java index 92016c59db598..cb35fdcf27dce 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java @@ -39,7 +39,7 @@ public class PagerDutyService extends NotificationService { private final HttpClient httpClient; public PagerDutyService(Settings settings, HttpClient httpClient, ClusterSettings clusterSettings) { - super(settings, "pagerduty"); + super(settings, "pagerduty", clusterSettings, PagerDutyService.getSettings()); this.httpClient = httpClient; clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_ACCOUNT, (s) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SERVICE_API_KEY, (s, o) -> {}, (s, o) -> {}); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java index 777a985dd6ae9..00b5fafbe099c 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java @@ -39,9 +39,8 @@ public class SlackService extends NotificationService { private final HttpClient httpClient; public SlackService(Settings settings, HttpClient httpClient, ClusterSettings clusterSettings) { - super(settings, "slack"); + super(settings, "slack", clusterSettings, SlackService.getSettings()); this.httpClient = httpClient; - clusterSettings.addSettingsUpdateConsumer(this::setAccountSetting, getSettings()); clusterSettings.addSettingsUpdateConsumer(SETTING_DEFAULT_ACCOUNT, (s) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_URL, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_URL_SECURE, (s, o) -> {}, (s, o) -> {}); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ChainInputTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ChainInputTests.java index e654452779ab8..cc19cef7b4768 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ChainInputTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/input/chain/ChainInputTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -18,6 +19,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext; +import org.elasticsearch.xpack.core.watcher.input.Input; import org.elasticsearch.xpack.core.watcher.watch.Payload; import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; import org.elasticsearch.xpack.watcher.common.http.auth.basic.BasicAuth; @@ -29,6 +31,7 @@ import org.elasticsearch.xpack.watcher.input.simple.SimpleInputFactory; import org.elasticsearch.xpack.watcher.test.WatcherTestUtils; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -46,6 +49,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; public class ChainInputTests extends ESTestCase { @@ -220,4 +224,24 @@ public void testParsingShouldBeStrictWhenStartingInputs() throws Exception { expectThrows(ElasticsearchParseException.class, () -> chainInputFactory.parseInput("test", parser)); assertThat(e.getMessage(), containsString("Expected starting JSON object after [first] in watch [test]")); } + + public void testThatXContentParametersArePassedToInputs() throws Exception { + ToXContent.Params randomParams = new ToXContent.MapParams(Collections.singletonMap(randomAlphaOfLength(5), randomAlphaOfLength(5))); + ChainInput chainInput = new ChainInput(Collections.singletonList(Tuple.tuple("whatever", new Input() { + @Override + public String type() { + return "test"; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) { + assertThat(params, sameInstance(randomParams)); + return builder; + } + }))); + + try (XContentBuilder builder = jsonBuilder()) { + chainInput.toXContent(builder, randomParams); + } + } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/notification/NotificationServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java similarity index 98% rename from x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/notification/NotificationServiceTests.java rename to x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java index bb5f234ca950a..829337e9acb7a 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/notification/NotificationServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.notification; +package org.elasticsearch.xpack.watcher.notification; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; @@ -90,4 +90,4 @@ protected String createAccount(String name, Settings accountSettings) { return name; } } -} \ No newline at end of file +} diff --git a/x-pack/qa/smoke-test-ml-with-security/build.gradle b/x-pack/qa/smoke-test-ml-with-security/build.gradle index 58e5eca3600f6..2a12aa2f28d3f 100644 --- a/x-pack/qa/smoke-test-ml-with-security/build.gradle +++ b/x-pack/qa/smoke-test-ml-with-security/build.gradle @@ -39,6 +39,7 @@ integTestRunner { 'ml/delete_model_snapshot/Test delete snapshot missing job_id', 'ml/delete_model_snapshot/Test delete with in-use model', 'ml/filter_crud/Test create filter api with mismatching body ID', + 'ml/filter_crud/Test create filter given invalid filter_id', 'ml/filter_crud/Test get filter API with bad ID', 'ml/filter_crud/Test invalid param combinations', 'ml/filter_crud/Test non-existing filter', diff --git a/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java b/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java index 6c999ca2a7291..c427d8bf32c86 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java +++ b/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java @@ -5,6 +5,8 @@ */ package org.elasticsearch.smoketest; +import io.netty.util.ThreadDeathWatcher; +import io.netty.util.concurrent.GlobalEventExecutor; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.common.network.NetworkAddress; @@ -19,12 +21,15 @@ import org.elasticsearch.xpack.core.security.SecurityField; import org.junit.After; import org.junit.Before; +import org.junit.ClassRule; +import org.junit.rules.ExternalResource; import java.net.InetSocketAddress; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Optional; +import java.util.concurrent.TimeUnit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; @@ -42,6 +47,36 @@ * indexed in the cluster. */ public class SmokeTestMonitoringWithSecurityIT extends ESIntegTestCase { + + /** + * A JUnit class level rule that runs after the AfterClass method in {@link ESIntegTestCase}, + * which stops the cluster. After the cluster is stopped, there are a few netty threads that + * can linger, so we wait for them to finish otherwise these lingering threads can intermittently + * trigger the thread leak detector + */ + @ClassRule + public static final ExternalResource STOP_NETTY_RESOURCE = new ExternalResource() { + @Override + protected void after() { + try { + GlobalEventExecutor.INSTANCE.awaitInactivity(5, TimeUnit.SECONDS); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } catch (IllegalStateException e) { + if (e.getMessage().equals("thread was not started") == false) { + throw e; + } + // ignore since the thread was never started + } + + try { + ThreadDeathWatcher.awaitInactivity(5, TimeUnit.SECONDS); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } + }; + private static final String USER = "test_user"; private static final String PASS = "x-pack-test-password"; private static final String MONITORING_PATTERN = ".monitoring-*"; diff --git a/x-pack/qa/sql/build.gradle b/x-pack/qa/sql/build.gradle index 4d061fffa9110..a56325dff2c35 100644 --- a/x-pack/qa/sql/build.gradle +++ b/x-pack/qa/sql/build.gradle @@ -10,6 +10,8 @@ dependencies { // JDBC testing dependencies compile project(path: xpackModule('sql:jdbc'), configuration: 'nodeps') + + compile project(path: xpackModule('sql:sql-action')) compile "net.sourceforge.csvjdbc:csvjdbc:1.0.34" // CLI testing dependencies @@ -76,6 +78,7 @@ thirdPartyAudit.excludes = [ subprojects { apply plugin: 'elasticsearch.standalone-rest-test' dependencies { + /* Since we're a standalone rest test we actually get transitive * dependencies but we don't really want them because they cause * all kinds of trouble with the jar hell checks. So we suppress diff --git a/x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcDocCsvSpectIT.java b/x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcDocCsvSpectIT.java new file mode 100644 index 0000000000000..24e8c170cc39d --- /dev/null +++ b/x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcDocCsvSpectIT.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.qa.sql.nosecurity; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.xpack.qa.sql.jdbc.CsvTestUtils.CsvTestCase; +import org.elasticsearch.xpack.qa.sql.jdbc.DataLoader; +import org.elasticsearch.xpack.qa.sql.jdbc.JdbcAssert; +import org.elasticsearch.xpack.qa.sql.jdbc.SpecBaseIntegrationTestCase; +import org.elasticsearch.xpack.qa.sql.jdbc.SqlSpecTestCase; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.List; + +import static org.elasticsearch.xpack.qa.sql.jdbc.CsvTestUtils.csvConnection; +import static org.elasticsearch.xpack.qa.sql.jdbc.CsvTestUtils.executeCsvQuery; +import static org.elasticsearch.xpack.qa.sql.jdbc.CsvTestUtils.specParser; + +/** + * CSV test specification for DOC examples. + * While we could use the existing tests, their purpose is to test corner-cases which + * gets reflected in the dataset structure. + * The doc tests while redundant, try to be expressive first and foremost and sometimes + * the dataset isn't exactly convenient. + * + * Also looking around for the tests across the test files isn't trivial. + * + * That's not to say the two cannot be merged however that felt like too much of an effort + * at this stage and, to not keep things stalling, started with this approach. + */ +public class JdbcDocCsvSpectIT extends SpecBaseIntegrationTestCase { + + private final CsvTestCase testCase; + + @Override + protected String indexName() { + return "library"; + } + + @Override + protected void loadDataset(RestClient client) throws Exception { + DataLoader.loadDocsDatasetIntoEs(client); + } + + @ParametersFactory(shuffle = false, argumentFormatting = SqlSpecTestCase.PARAM_FORMATTING) + public static List readScriptSpec() throws Exception { + Parser parser = specParser(); + return readScriptSpec("/docs.csv-spec", parser); + } + + public JdbcDocCsvSpectIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase) { + super(fileName, groupName, testName, lineNumber); + this.testCase = testCase; + } + + @Override + protected void assertResults(ResultSet expected, ResultSet elastic) throws SQLException { + Logger log = logEsResultSet() ? logger : null; + + // + // uncomment this to printout the result set and create new CSV tests + // + //JdbcTestUtils.logLikeCLI(elastic, log); + JdbcAssert.assertResultSets(expected, elastic, log, true); + } + + @Override + protected boolean logEsResultSet() { + return true; + } + + @Override + protected final void doTest() throws Throwable { + try (Connection csv = csvConnection(testCase.expectedResults); Connection es = esJdbc()) { + + // pass the testName as table for debugging purposes (in case the underlying reader is missing) + ResultSet expected = executeCsvQuery(csv, testName); + ResultSet elasticResults = executeJdbcQuery(es, testCase.query); + assertResults(expected, elasticResults); + } + } +} \ No newline at end of file diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvSpecTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvSpecTestCase.java index e37688eb90465..99e8432370471 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvSpecTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvSpecTestCase.java @@ -6,14 +6,13 @@ package org.elasticsearch.xpack.qa.sql.jdbc; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.elasticsearch.xpack.qa.sql.jdbc.CsvTestUtils.CsvTestCase; -import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration; import java.sql.Connection; import java.sql.ResultSet; import java.util.ArrayList; import java.util.List; -import java.util.Properties; import static org.elasticsearch.xpack.qa.sql.jdbc.CsvTestUtils.csvConnection; import static org.elasticsearch.xpack.qa.sql.jdbc.CsvTestUtils.executeCsvQuery; @@ -57,13 +56,4 @@ protected final void doTest() throws Throwable { assertResults(expected, elasticResults); } } - - // make sure ES uses UTC (otherwise JDBC driver picks up the JVM timezone per spec/convention) - @Override - protected Properties connectionProperties() { - Properties connectionProperties = new Properties(); - connectionProperties.setProperty(JdbcConfiguration.TIME_ZONE, "UTC"); - return connectionProperties; - } - } diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvTestUtils.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvTestUtils.java index fbbc2285ed123..ad26db3104758 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvTestUtils.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvTestUtils.java @@ -190,7 +190,7 @@ public Object parse(String line) { } public static class CsvTestCase { - String query; - String expectedResults; + public String query; + public String expectedResults; } -} +} \ No newline at end of file diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java index 655f02d97b8ad..05140577bcdf6 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java @@ -32,18 +32,28 @@ public class DataLoader { public static void main(String[] args) throws Exception { try (RestClient client = RestClient.builder(new HttpHost("localhost", 9200)).build()) { - loadDatasetIntoEs(client); + loadEmpDatasetIntoEs(client); Loggers.getLogger(DataLoader.class).info("Data loaded"); } } protected static void loadDatasetIntoEs(RestClient client) throws Exception { - loadDatasetIntoEs(client, "test_emp"); - loadDatasetIntoEs(client, "test_emp_copy"); + loadEmpDatasetIntoEs(client); + } + + protected static void loadEmpDatasetIntoEs(RestClient client) throws Exception { + loadEmpDatasetIntoEs(client, "test_emp"); + loadEmpDatasetIntoEs(client, "test_emp_copy"); makeAlias(client, "test_alias", "test_emp", "test_emp_copy"); makeAlias(client, "test_alias_emp", "test_emp", "test_emp_copy"); } + public static void loadDocsDatasetIntoEs(RestClient client) throws Exception { + loadEmpDatasetIntoEs(client, "emp"); + loadLibDatasetIntoEs(client, "library"); + makeAlias(client, "employees", "emp"); + } + private static void createString(String name, XContentBuilder builder) throws Exception { builder.startObject(name).field("type", "text") .startObject("fields") @@ -51,7 +61,8 @@ private static void createString(String name, XContentBuilder builder) throws Ex .endObject() .endObject(); } - protected static void loadDatasetIntoEs(RestClient client, String index) throws Exception { + + protected static void loadEmpDatasetIntoEs(RestClient client, String index) throws Exception { Request request = new Request("PUT", "/" + index); XContentBuilder createIndex = JsonXContent.contentBuilder().startObject(); createIndex.startObject("settings"); @@ -151,6 +162,52 @@ protected static void loadDatasetIntoEs(RestClient client, String index) throws client.performRequest(request); } + protected static void loadLibDatasetIntoEs(RestClient client, String index) throws Exception { + Request request = new Request("PUT", "/" + index); + XContentBuilder createIndex = JsonXContent.contentBuilder().startObject(); + createIndex.startObject("settings"); + { + createIndex.field("number_of_shards", 1); + createIndex.field("number_of_replicas", 1); + } + createIndex.endObject(); + createIndex.startObject("mappings"); + { + createIndex.startObject("book"); + { + createIndex.startObject("properties"); + { + createString("name", createIndex); + createString("author", createIndex); + createIndex.startObject("release_date").field("type", "date").endObject(); + createIndex.startObject("page_count").field("type", "short").endObject(); + } + createIndex.endObject(); + } + createIndex.endObject(); + } + createIndex.endObject().endObject(); + request.setJsonEntity(Strings.toString(createIndex)); + client.performRequest(request); + + request = new Request("POST", "/" + index + "/book/_bulk"); + request.addParameter("refresh", "true"); + StringBuilder bulk = new StringBuilder(); + csvToLines("library", (titles, fields) -> { + bulk.append("{\"index\":{\"_id\":\"" + fields.get(0) + "\"}}\n"); + bulk.append("{"); + for (int f = 0; f < titles.size(); f++) { + if (f > 0) { + bulk.append(","); + } + bulk.append('"').append(titles.get(f)).append("\":\"").append(fields.get(f)).append('"'); + } + bulk.append("}\n"); + }); + request.setJsonEntity(bulk.toString()); + client.performRequest(request); + } + protected static void makeAlias(RestClient client, String aliasName, String... indices) throws Exception { for (String index : indices) { client.performRequest(new Request("POST", "/" + index + "/_alias/" + aliasName)); diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DebugCsvSpec.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DebugCsvSpec.java index c0d3db026d8bd..c4d25f4311327 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DebugCsvSpec.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DebugCsvSpec.java @@ -10,13 +10,11 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.qa.sql.jdbc.CsvTestUtils.CsvTestCase; -import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.util.List; -import java.util.Properties; import static org.elasticsearch.xpack.qa.sql.jdbc.CsvTestUtils.csvConnection; import static org.elasticsearch.xpack.qa.sql.jdbc.CsvTestUtils.executeCsvQuery; @@ -65,12 +63,4 @@ protected final void doTest() throws Throwable { assertResults(expected, elasticResults); } } - - // make sure ES uses UTC (otherwise JDBC driver picks up the JVM timezone per spec/convention) - @Override - protected Properties connectionProperties() { - Properties connectionProperties = new Properties(); - connectionProperties.setProperty(JdbcConfiguration.TIME_ZONE, "UTC"); - return connectionProperties; - } } \ No newline at end of file diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcAssert.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcAssert.java index 801f40639fad1..47f531ebd1f9b 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcAssert.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcAssert.java @@ -20,10 +20,20 @@ import java.util.TimeZone; import static java.lang.String.format; +import static java.sql.Types.BIGINT; +import static java.sql.Types.DOUBLE; +import static java.sql.Types.FLOAT; +import static java.sql.Types.INTEGER; +import static java.sql.Types.REAL; +import static java.sql.Types.SMALLINT; +import static java.sql.Types.TINYINT; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +/** + * Utility class for doing JUnit-style asserts over JDBC. + */ public class JdbcAssert { private static final Calendar UTC_CALENDAR = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT); @@ -32,14 +42,29 @@ public static void assertResultSets(ResultSet expected, ResultSet actual) throws } public static void assertResultSets(ResultSet expected, ResultSet actual, Logger logger) throws SQLException { + assertResultSets(expected, actual, logger, false); + } + + /** + * Assert the given result sets, potentially in a lenient way. + * When lenient is specified, the type comparison of a column is widden to reach a common, compatible ground. + * This means promoting integer types to long and floating types to double and comparing their values. + * For example in a non-lenient, strict case a comparison between an int and a tinyint would fail, with lenient it will succeed as + * long as the actual value is the same. + */ + public static void assertResultSets(ResultSet expected, ResultSet actual, Logger logger, boolean lenient) throws SQLException { try (ResultSet ex = expected; ResultSet ac = actual) { - assertResultSetMetadata(ex, ac, logger); - assertResultSetData(ex, ac, logger); + assertResultSetMetadata(ex, ac, logger, lenient); + assertResultSetData(ex, ac, logger, lenient); } } - // metadata doesn't consume a ResultSet thus it shouldn't close it public static void assertResultSetMetadata(ResultSet expected, ResultSet actual, Logger logger) throws SQLException { + assertResultSetMetadata(expected, actual, logger, false); + } + + // metadata doesn't consume a ResultSet thus it shouldn't close it + public static void assertResultSetMetadata(ResultSet expected, ResultSet actual, Logger logger, boolean lenient) throws SQLException { ResultSetMetaData expectedMeta = expected.getMetaData(); ResultSetMetaData actualMeta = actual.getMetaData(); @@ -81,8 +106,8 @@ public static void assertResultSetMetadata(ResultSet expected, ResultSet actual, } // use the type not the name (timestamp with timezone returns spaces for example) - int expectedType = expectedMeta.getColumnType(column); - int actualType = actualMeta.getColumnType(column); + int expectedType = typeOf(expectedMeta.getColumnType(column), lenient); + int actualType = typeOf(actualMeta.getColumnType(column), lenient); // since H2 cannot use a fixed timezone, the data is stored in UTC (and thus with timezone) if (expectedType == Types.TIMESTAMP_WITH_TIMEZONE) { @@ -92,6 +117,7 @@ public static void assertResultSetMetadata(ResultSet expected, ResultSet actual, if (expectedType == Types.FLOAT && expected instanceof CsvResultSet) { expectedType = Types.REAL; } + // when lenient is used, an int is equivalent to a short, etc... assertEquals("Different column type for column [" + expectedName + "] (" + JDBCType.valueOf(expectedType) + " != " + JDBCType.valueOf(actualType) + ")", expectedType, actualType); } @@ -99,12 +125,16 @@ public static void assertResultSetMetadata(ResultSet expected, ResultSet actual, // The ResultSet is consumed and thus it should be closed public static void assertResultSetData(ResultSet expected, ResultSet actual, Logger logger) throws SQLException { + assertResultSetData(expected, actual, logger, false); + } + + public static void assertResultSetData(ResultSet expected, ResultSet actual, Logger logger, boolean lenient) throws SQLException { try (ResultSet ex = expected; ResultSet ac = actual) { - doAssertResultSetData(ex, ac, logger); + doAssertResultSetData(ex, ac, logger, lenient); } } - private static void doAssertResultSetData(ResultSet expected, ResultSet actual, Logger logger) throws SQLException { + private static void doAssertResultSetData(ResultSet expected, ResultSet actual, Logger logger, boolean lenient) throws SQLException { ResultSetMetaData metaData = expected.getMetaData(); int columns = metaData.getColumnCount(); @@ -118,10 +148,33 @@ private static void doAssertResultSetData(ResultSet expected, ResultSet actual, } for (int column = 1; column <= columns; column++) { - Object expectedObject = expected.getObject(column); - Object actualObject = actual.getObject(column); - int type = metaData.getColumnType(column); + Class expectedColumnClass = null; + try { + String columnClassName = metaData.getColumnClassName(column); + + // fix for CSV which returns the shortName not fully-qualified name + if (!columnClassName.contains(".")) { + switch (columnClassName) { + case "Timestamp": + columnClassName = "java.sql.Timestamp"; + break; + case "Int": + columnClassName = "java.lang.Integer"; + break; + default: + columnClassName = "java.lang." + columnClassName; + break; + } + } + + expectedColumnClass = Class.forName(columnClassName); + } catch (ClassNotFoundException cnfe) { + throw new SQLException(cnfe); + } + + Object expectedObject = expected.getObject(column); + Object actualObject = lenient ? actual.getObject(column, expectedColumnClass) : actual.getObject(column); String msg = format(Locale.ROOT, "Different result for column [" + metaData.getColumnName(column) + "], " + "entry [" + (count + 1) + "]"); @@ -161,4 +214,20 @@ else if (type == Types.DOUBLE) { } } + /** + * Returns the value of the given type either in a lenient fashion (widened) or strict. + */ + private static int typeOf(int columnType, boolean lenient) { + if (lenient) { + // integer upcast to long + if (columnType == TINYINT || columnType == SMALLINT || columnType == INTEGER || columnType == BIGINT) { + return BIGINT; + } + if (columnType == FLOAT || columnType == REAL || columnType == DOUBLE) { + return REAL; + } + } + + return columnType; + } } \ No newline at end of file diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcIntegrationTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcIntegrationTestCase.java index a2b524c20b070..a339222445a1a 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcIntegrationTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcIntegrationTestCase.java @@ -82,7 +82,11 @@ protected Connection useDataSource() throws SQLException { } public static void index(String index, CheckedConsumer body) throws IOException { - Request request = new Request("PUT", "/" + index + "/doc/1"); + index(index, "1", body); + } + + public static void index(String index, String documentId, CheckedConsumer body) throws IOException { + Request request = new Request("PUT", "/" + index + "/doc/" + documentId); request.addParameter("refresh", "true"); XContentBuilder builder = JsonXContent.contentBuilder().startObject(); body.accept(builder); diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcTestUtils.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcTestUtils.java index 5062525f2b31e..2bb4697749a3a 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcTestUtils.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcTestUtils.java @@ -6,10 +6,16 @@ package org.elasticsearch.xpack.qa.sql.jdbc; import org.apache.logging.log4j.Logger; +import org.elasticsearch.xpack.sql.action.CliFormatter; +import org.elasticsearch.xpack.sql.proto.ColumnInfo; +import java.sql.JDBCType; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; public abstract class JdbcTestUtils { @@ -96,4 +102,36 @@ private static StringBuilder trimOrPad(StringBuilder buffer) { } return buffer; } + + public static void logLikeCLI(ResultSet rs, Logger logger) throws SQLException { + ResultSetMetaData metaData = rs.getMetaData(); + int columns = metaData.getColumnCount(); + + List cols = new ArrayList<>(columns); + + for (int i = 1; i <= columns; i++) { + cols.add(new ColumnInfo(metaData.getTableName(i), metaData.getColumnName(i), metaData.getColumnTypeName(i), + JDBCType.valueOf(metaData.getColumnType(i)), metaData.getColumnDisplaySize(i))); + } + + + List> data = new ArrayList<>(); + + while (rs.next()) { + List entry = new ArrayList<>(columns); + for (int i = 1; i <= columns; i++) { + Object value = rs.getObject(i); + // timestamp to string is similar but not ISO8601 - fix it + if (value instanceof Timestamp) { + Timestamp ts = (Timestamp) value; + value = ts.toInstant().toString(); + } + entry.add(value); + } + data.add(entry); + } + + CliFormatter formatter = new CliFormatter(cols, data); + logger.info("\n" + formatter.formatWithHeader(cols, data)); + } } \ No newline at end of file diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ResultSetTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ResultSetTestCase.java new file mode 100644 index 0000000000000..861a6dccaba57 --- /dev/null +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/ResultSetTestCase.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.qa.sql.jdbc; + +import java.io.IOException; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.util.Date; + +public class ResultSetTestCase extends JdbcIntegrationTestCase { + public void testGettingTimestamp() throws Exception { + long randomMillis = randomLongBetween(0, System.currentTimeMillis()); + + index("library", "1", builder -> { + builder.field("name", "Don Quixote"); + builder.field("page_count", 1072); + builder.timeField("release_date", new Date(randomMillis)); + builder.timeField("republish_date", null); + }); + index("library", "2", builder -> { + builder.field("name", "1984"); + builder.field("page_count", 328); + builder.timeField("release_date", new Date(-649036800000L)); + builder.timeField("republish_date", new Date(599616000000L)); + }); + + try (Connection connection = esJdbc()) { + try (PreparedStatement statement = connection.prepareStatement("SELECT name, release_date, republish_date FROM library")) { + try (ResultSet results = statement.executeQuery()) { + ResultSetMetaData resultSetMetaData = results.getMetaData(); + + results.next(); + assertEquals(3, resultSetMetaData.getColumnCount()); + assertEquals(randomMillis, results.getTimestamp("release_date").getTime()); + assertEquals(randomMillis, results.getTimestamp(2).getTime()); + assertTrue(results.getObject(2) instanceof Timestamp); + assertEquals(randomMillis, ((Timestamp) results.getObject("release_date")).getTime()); + + assertNull(results.getTimestamp(3)); + assertNull(results.getObject("republish_date")); + + assertTrue(results.next()); + assertEquals(599616000000L, results.getTimestamp("republish_date").getTime()); + assertEquals(-649036800000L, ((Timestamp) results.getObject(2)).getTime()); + + assertFalse(results.next()); + } + } + } + } + + /* + * Checks StackOverflowError fix for https://github.com/elastic/elasticsearch/pull/31735 + */ + public void testNoInfiniteRecursiveGetObjectCalls() throws SQLException, IOException { + index("library", "1", builder -> { + builder.field("name", "Don Quixote"); + builder.field("page_count", 1072); + }); + Connection conn = esJdbc(); + PreparedStatement statement = conn.prepareStatement("SELECT * FROM library"); + ResultSet results = statement.executeQuery(); + + try { + results.next(); + results.getObject("name"); + results.getObject("page_count"); + results.getObject(1); + results.getObject(1, String.class); + results.getObject("page_count", Integer.class); + } catch (StackOverflowError soe) { + fail("Infinite recursive call on getObject() method"); + } + } +} diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SpecBaseIntegrationTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SpecBaseIntegrationTestCase.java index d8ba1ade959ae..9ece8d7d1d33c 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SpecBaseIntegrationTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SpecBaseIntegrationTestCase.java @@ -8,8 +8,10 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration; import org.junit.AfterClass; import org.junit.Before; @@ -28,6 +30,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Properties; /** * Tests that compare the Elasticsearch JDBC client to some other JDBC client @@ -50,11 +53,19 @@ public SpecBaseIntegrationTestCase(String fileName, String groupName, String tes @Before public void setupTestDataIfNeeded() throws Exception { - if (client().performRequest(new Request("HEAD", "/test_emp")).getStatusLine().getStatusCode() == 404) { - DataLoader.loadDatasetIntoEs(client()); + if (client().performRequest(new Request("HEAD", "/" + indexName())).getStatusLine().getStatusCode() == 404) { + loadDataset(client()); } } + protected String indexName() { + return "test_emp"; + } + + protected void loadDataset(RestClient client) throws Exception { + DataLoader.loadEmpDatasetIntoEs(client); + } + @Override protected boolean preserveIndicesUponCompletion() { return true; @@ -95,6 +106,14 @@ protected ResultSet executeJdbcQuery(Connection con, String query) throws SQLExc return statement.executeQuery(query); } + // TODO: use UTC for now until deciding on a strategy for handling date extraction + @Override + protected Properties connectionProperties() { + Properties connectionProperties = new Properties(); + connectionProperties.setProperty(JdbcConfiguration.TIME_ZONE, "UTC"); + return connectionProperties; + } + protected boolean logEsResultSet() { return false; } diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java index f1bcef6f750fc..3b5cae742d34b 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java @@ -7,14 +7,12 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration; import org.junit.ClassRule; import java.sql.Connection; import java.sql.ResultSet; import java.util.ArrayList; import java.util.List; -import java.util.Properties; /** * Tests comparing sql queries executed against our jdbc client @@ -67,12 +65,4 @@ protected final void doTest() throws Throwable { assertResults(expected, elasticResults); } } - - // TODO: use UTC for now until deciding on a strategy for handling date extraction - @Override - protected Properties connectionProperties() { - Properties connectionProperties = new Properties(); - connectionProperties.setProperty(JdbcConfiguration.TIME_ZONE, "UTC"); - return connectionProperties; - } } diff --git a/x-pack/qa/sql/src/main/resources/docs.csv-spec b/x-pack/qa/sql/src/main/resources/docs.csv-spec new file mode 100644 index 0000000000000..8bf74bc4a2f89 --- /dev/null +++ b/x-pack/qa/sql/src/main/resources/docs.csv-spec @@ -0,0 +1,639 @@ +// +// CSV spec used by the docs +// + +/////////////////////////////// +// +// Describe table +// +/////////////////////////////// + +describeTable +// tag::describeTable +DESCRIBE emp; + + column | type +--------------------+--------------- +birth_date |TIMESTAMP +dep |STRUCT +dep.dep_id |VARCHAR +dep.dep_name |VARCHAR +dep.dep_name.keyword|VARCHAR +dep.from_date |TIMESTAMP +dep.to_date |TIMESTAMP +emp_no |INTEGER +first_name |VARCHAR +first_name.keyword |VARCHAR +gender |VARCHAR +hire_date |TIMESTAMP +languages |TINYINT +last_name |VARCHAR +last_name.keyword |VARCHAR +salary |INTEGER + +// end::describeTable +; + +//describeTableAlias +// tag::describeTableAlias +//DESCRIBE employee; + +// column | type +//---------------+--------------- + +// end::describeTableAlias +//; + +// +// Show columns +// +showColumns +// tag::showColumns +SHOW COLUMNS IN emp; + + column | type +--------------------+--------------- +birth_date |TIMESTAMP +dep |STRUCT +dep.dep_id |VARCHAR +dep.dep_name |VARCHAR +dep.dep_name.keyword|VARCHAR +dep.from_date |TIMESTAMP +dep.to_date |TIMESTAMP +emp_no |INTEGER +first_name |VARCHAR +first_name.keyword |VARCHAR +gender |VARCHAR +hire_date |TIMESTAMP +languages |TINYINT +last_name |VARCHAR +last_name.keyword |VARCHAR +salary |INTEGER + +// end::showColumns +; + +//showColumnsInAlias +// tag::showColumnsInAlias +//SHOW COLUMNS FROM employee; + +// column | type +//---------------+--------------- + +// end::showColumnsInAlias +//; + +/////////////////////////////// +// +// Show Tables +// +/////////////////////////////// + +showTables +// tag::showTables +SHOW TABLES; + + name | type +---------------+--------------- +emp |BASE TABLE +employees |ALIAS +library |BASE TABLE + +// end::showTables +; + +showTablesLikeExact +// tag::showTablesLikeExact +SHOW TABLES LIKE 'emp'; + + name | type +---------------+--------------- +emp |BASE TABLE + +// end::showTablesLikeExact +; + +showTablesLikeWildcard +// tag::showTablesLikeWildcard +SHOW TABLES LIKE 'emp%'; + + name | type +---------------+--------------- +emp |BASE TABLE +employees |ALIAS + +// end::showTablesLikeWildcard +; + + +showTablesLikeOneChar +// tag::showTablesLikeOneChar +SHOW TABLES LIKE 'em_'; + + name | type +---------------+--------------- +emp |BASE TABLE + +// end::showTablesLikeOneChar +; + +showTablesLikeMixed +// tag::showTablesLikeMixed +SHOW TABLES LIKE '%em_'; + + name | type +---------------+--------------- +emp |BASE TABLE + +// end::showTablesLikeMixed +; + +/////////////////////////////// +// +// Show Functions +// +/////////////////////////////// + +showFunctions +// tag::showFunctions +SHOW FUNCTIONS; + + name | type +----------------+--------------- +AVG |AGGREGATE +COUNT |AGGREGATE +MAX |AGGREGATE +MIN |AGGREGATE +SUM |AGGREGATE +STDDEV_POP |AGGREGATE +VAR_POP |AGGREGATE +PERCENTILE |AGGREGATE +PERCENTILE_RANK |AGGREGATE +SUM_OF_SQUARES |AGGREGATE +SKEWNESS |AGGREGATE +KURTOSIS |AGGREGATE +DAY_OF_MONTH |SCALAR +DAY |SCALAR +DOM |SCALAR +DAY_OF_WEEK |SCALAR +DOW |SCALAR +DAY_OF_YEAR |SCALAR +DOY |SCALAR +HOUR_OF_DAY |SCALAR +HOUR |SCALAR +MINUTE_OF_DAY |SCALAR +MINUTE_OF_HOUR |SCALAR +MINUTE |SCALAR +SECOND_OF_MINUTE|SCALAR +SECOND |SCALAR +MONTH_OF_YEAR |SCALAR +MONTH |SCALAR +YEAR |SCALAR +WEEK_OF_YEAR |SCALAR +WEEK |SCALAR +ABS |SCALAR +ACOS |SCALAR +ASIN |SCALAR +ATAN |SCALAR +ATAN2 |SCALAR +CBRT |SCALAR +CEIL |SCALAR +CEILING |SCALAR +COS |SCALAR +COSH |SCALAR +COT |SCALAR +DEGREES |SCALAR +E |SCALAR +EXP |SCALAR +EXPM1 |SCALAR +FLOOR |SCALAR +LOG |SCALAR +LOG10 |SCALAR +MOD |SCALAR +PI |SCALAR +POWER |SCALAR +RADIANS |SCALAR +RANDOM |SCALAR +RAND |SCALAR +ROUND |SCALAR +SIGN |SCALAR +SIGNUM |SCALAR +SIN |SCALAR +SINH |SCALAR +SQRT |SCALAR +TAN |SCALAR +SCORE |SCORE + +// end::showFunctions +; + +showFunctionsLikeExact +// tag::showFunctionsLikeExact +SHOW FUNCTIONS LIKE 'ABS'; + + name | type +---------------+--------------- +ABS |SCALAR + +// end::showFunctionsLikeExact +; + +showFunctionsLikeWildcard +// tag::showFunctionsLikeWildcard +SHOW FUNCTIONS LIKE 'A%'; + + name | type +---------------+--------------- +AVG |AGGREGATE +ABS |SCALAR +ACOS |SCALAR +ASIN |SCALAR +ATAN |SCALAR +ATAN2 |SCALAR +// end::showFunctionsLikeWildcard +; + +showFunctionsLikeChar +// tag::showFunctionsLikeChar +SHOW FUNCTIONS LIKE 'A__'; + + name | type +---------------+--------------- +AVG |AGGREGATE +ABS |SCALAR +// end::showFunctionsLikeChar +; + +showFunctionsWithPattern +// tag::showFunctionsWithPattern +SHOW FUNCTIONS '%DAY%'; + + name | type +---------------+--------------- +DAY_OF_MONTH |SCALAR +DAY |SCALAR +DAY_OF_WEEK |SCALAR +DAY_OF_YEAR |SCALAR +HOUR_OF_DAY |SCALAR +MINUTE_OF_DAY |SCALAR + +// end::showFunctionsWithPattern +; + +/////////////////////////////// +// +// Select +// +/////////////////////////////// + +selectColumnAlias +// tag::selectColumnAlias +SELECT 1 + 1 AS result + + result +--------------- +2 + +// end::selectColumnAlias +; + +selectInline +// tag::selectInline +SELECT 1 + 1; + + (1 + 1) +--------------- +2 + +// end::selectInline +; + +selectColumn +// tag::selectColumn +SELECT emp_no FROM emp LIMIT 1; + + emp_no +--------------- +10001 + +// end::selectColumn +; + +selectQualifiedColumn +// tag::selectQualifiedColumn +SELECT emp.emp_no FROM emp LIMIT 1; + + emp_no +--------------- +10001 + +// end::selectQualifiedColumn +; + + +wildcardWithOrder +// tag::wildcardWithOrder +SELECT * FROM emp LIMIT 1; + + birth_date | emp_no | first_name | gender | hire_date | languages | last_name | salary +--------------------+---------------+---------------+---------------+--------------------+---------------+---------------+--------------- +1953-09-02T00:00:00Z|10001 |Georgi |M |1986-06-26T00:00:00Z|2 |Facello |57305 + +// end::wildcardWithOrder +; + +fromTable +// tag::fromTable +SELECT * FROM emp LIMIT 1; + + birth_date | emp_no | first_name | gender | hire_date | languages | last_name | salary +--------------------+---------------+---------------+---------------+--------------------+---------------+---------------+--------------- +1953-09-02T00:00:00Z|10001 |Georgi |M |1986-06-26T00:00:00Z|2 |Facello |57305 + + +// end::fromTable +; + +fromTableQuoted +// tag::fromTableQuoted +SELECT * FROM "emp" LIMIT 1; + + birth_date | emp_no | first_name | gender | hire_date | languages | last_name | salary +--------------------+---------------+---------------+---------------+--------------------+---------------+---------------+--------------- +1953-09-02T00:00:00Z|10001 |Georgi |M |1986-06-26T00:00:00Z|2 |Facello |57305 + +// end::fromTableQuoted +; + +fromTableQuoted +// tag::fromTablePatternQuoted +SELECT emp_no FROM "e*p" LIMIT 1; + + emp_no +--------------- +10001 + +// end::fromTablePatternQuoted +; + +fromTableAlias +// tag::fromTableAlias +SELECT e.emp_no FROM emp AS e LIMIT 1; + + emp_no +------------- +10001 + +// end::fromTableAlias +; + +basicWhere +// tag::basicWhere +SELECT last_name FROM emp WHERE emp_no = 10001; + + last_name +--------------- +Facello + +// end::basicWhere +; + +/////////////////////////////// +// +// Group By +// +/////////////////////////////// + +groupByColumn +// tag::groupByColumn +SELECT gender AS g FROM emp GROUP BY gender; + + g +--------------- +F +M + +// end::groupByColumn +; + +groupByOrdinal +// tag::groupByOrdinal +SELECT gender FROM emp GROUP BY 1; + + gender +--------------- +F +M + +// end::groupByOrdinal +; + +groupByAlias +// tag::groupByAlias +SELECT gender AS g FROM emp GROUP BY g; + + g +--------------- +F +M + +// end::groupByAlias +; + +groupByExpression +// tag::groupByExpression +SELECT languages + 1 AS l FROM emp GROUP BY l; + + l +--------------- +2 +3 +4 +5 +6 + + +// end::groupByExpression +; + +groupByAndAgg +// tag::groupByAndAgg +SELECT gender AS g, COUNT(*) AS c FROM emp GROUP BY gender; + + g | c +---------------+--------------- +F |37 +M |63 + +// end::groupByAndAgg +; + +groupByAndAggExpression +// tag::groupByAndAggExpression +SELECT gender AS g, ROUND(MIN(salary) / 100) AS salary FROM emp GROUP BY gender; + + g | salary +---------------+--------------- +F |260 +M |253 + +// end::groupByAndAggExpression +; + +groupByAndMultipleAggs +// tag::groupByAndMultipleAggs +SELECT gender AS g, KURTOSIS(salary) AS k, SKEWNESS(salary) AS s FROM emp GROUP BY gender; + + g | k | s +---------------+------------------+------------------- +F |1.8427808415250482|0.04517149340491813 +M |2.259327644285826 |0.40268950715550333 + +// end::groupByAndMultipleAggs +; + +groupByImplicitCount +// tag::groupByImplicitCount +SELECT COUNT(*) AS count FROM emp; + + count +--------------- +100 + +// end::groupByImplicitCount +; + +/////////////////////////////// +// +// Having +// +/////////////////////////////// + +groupByHaving +// tag::groupByHaving +SELECT languages AS l, COUNT(*) AS c FROM emp GROUP BY l HAVING c BETWEEN 15 AND 20; + + l | c +---------------+--------------- +1 |16 +2 |20 +4 |18 + +// end::groupByHaving +; + +groupByHavingMultiple +// tag::groupByHavingMultiple +SELECT MIN(salary) AS min, MAX(salary) AS max, MAX(salary) - MIN(salary) AS diff FROM emp GROUP BY languages HAVING diff - max % min > 0 AND AVG(salary) > 30000; + + min | max | diff +---------------+---------------+--------------- +25976 |73717 |47741 +29175 |73578 |44403 +26436 |74999 |48563 +27215 |74572 |47357 +25324 |73851 |48527 + +// end::groupByHavingMultiple +; + +groupByImplicitMultipleAggs +// tag::groupByImplicitMultipleAggs +SELECT MIN(salary) AS min, MAX(salary) AS max, AVG(salary) AS avg, COUNT(*) AS count FROM emp; + + min | max | avg | count +---------------+---------------+---------------+--------------- +25324 |74999 |48248 |100 + +// end::groupByImplicitMultipleAggs +; + +groupByHavingImplicitMatch +// tag::groupByHavingImplicitMatch +SELECT MIN(salary) AS min, MAX(salary) AS max FROM emp HAVING min > 25000; + + min | max +---------------+--------------- +25324 |74999 + +// end::groupByHavingImplicitMatch +; + +//groupByHavingImplicitNoMatch +// tag::groupByHavingImplicitNoMatch +//SELECT MIN(salary) AS min, MAX(salary) AS max FROM emp HAVING max > 75000; + +// min | max +//---------------+--------------- + +// end::groupByHavingImplicitNoMatch +//; + +/////////////////////////////// +// +// Order by +// +/////////////////////////////// + +orderByBasic +// tag::orderByBasic +SELECT * FROM library ORDER BY page_count DESC LIMIT 5; + + author | name | page_count | release_date +-----------------+--------------------+---------------+-------------------- +Peter F. Hamilton|Pandora's Star |768 |2004-03-02T00:00:00Z +Vernor Vinge |A Fire Upon the Deep|613 |1992-06-01T00:00:00Z +Frank Herbert |Dune |604 |1965-06-01T00:00:00Z +Alastair Reynolds|Revelation Space |585 |2000-03-15T00:00:00Z +James S.A. Corey |Leviathan Wakes |561 |2011-06-02T00:00:00Z + + + +// end::orderByBasic +; + +orderByScore +// tag::orderByScore +SELECT SCORE(), * FROM library WHERE match(name, 'dune') ORDER BY SCORE() DESC; + + SCORE() | author | name | page_count | release_date +---------------+---------------+-------------------+---------------+-------------------- +2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00Z +1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00Z +1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00Z +1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00Z + +// end::orderByScore +; + +orderByScoreWithMatch +// tag::orderByScoreWithMatch +SELECT SCORE(), * FROM library WHERE match(name, 'dune') ORDER BY page_count DESC; + + SCORE() | author | name | page_count | release_date +---------------+---------------+-------------------+---------------+-------------------- +2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00Z +1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00Z +1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00Z +1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00Z + +// end::orderByScoreWithMatch +; + + +/////////////////////////////// +// +// Limit +// +/////////////////////////////// + +limitBasic +// tag::limitBasic +SELECT first_name, last_name, emp_no FROM emp LIMIT 1; + + first_name | last_name | emp_no +---------------+---------------+--------------- +Georgi |Facello |10001 + +// end::limitBasic +; diff --git a/x-pack/qa/sql/src/main/resources/library.csv b/x-pack/qa/sql/src/main/resources/library.csv new file mode 100644 index 0000000000000..a93be21abe63e --- /dev/null +++ b/x-pack/qa/sql/src/main/resources/library.csv @@ -0,0 +1,25 @@ +name,author,release_date,page_count +Leviathan Wakes,James S.A. Corey,2011-06-02T00:00:00Z,561 +Hyperion,Dan Simmons,1989-05-26T00:00:00Z,482 +Dune,Frank Herbert,1965-06-01T00:00:00Z,604 +Dune Messiah,Frank Herbert,1969-10-15T00:00:00Z,331 +Children of Dune,Frank Herbert,1976-04-21T00:00:00Z,408 +God Emperor of Dune,Frank Herbert,1981-05-28T00:00:00Z,454 +Consider Phlebas,Iain M. Banks,1987-04-23T00:00:00Z,471 +Pandora's Star,Peter F. Hamilton,2004-03-02T00:00:00Z,768 +Revelation Space,Alastair Reynolds,2000-03-15T00:00:00Z,585 +A Fire Upon the Deep,Vernor Vinge,1992-06-01T00:00:00Z,613 +Ender's Game,Orson Scott Card,1985-06-01T00:00:00Z,324 +1984,George Orwell,1985-06-01T00:00:00Z,328 +Fahrenheit 451,Ray Bradbury,1953-10-15T00:00:00Z,227 +Brave New World,Aldous Huxley,1932-06-01T00:00:00Z,268 +Foundation,Isaac Asimov,1951-06-01T00:00:00Z,224 +The Giver,Lois Lowry,1993-04-26T00:00:00Z,208 +Slaughterhouse-Five,Kurt Vonnegut,1969-06-01T00:00:00Z,275 +The Hitchhiker's Guide to the Galaxy,Douglas Adams,1979-10-12T00:00:00Z,180 +Snow Crash,Neal Stephenson,1992-06-01T00:00:00Z,470 +Neuromancer,William Gibson,1984-07-01T00:00:00Z,271 +The Handmaid's Tale,Margaret Atwood,1985-06-01T00:00:00Z,311 +Starship Troopers,Robert A. Heinlein,1959-12-01T00:00:00Z,335 +The Left Hand of Darkness,Ursula K. Le Guin,1969-06-01T00:00:00Z,304 +The Moon is a Harsh Mistress,Robert A. Heinlein,1966-04-01T00:00:00Z,288 diff --git a/x-pack/qa/sql/src/main/resources/select.sql-spec b/x-pack/qa/sql/src/main/resources/select.sql-spec index 76562a07c86f7..ce57606e35b0c 100644 --- a/x-pack/qa/sql/src/main/resources/select.sql-spec +++ b/x-pack/qa/sql/src/main/resources/select.sql-spec @@ -3,9 +3,7 @@ // wildcardWithOrder -// tag::wildcardWithOrder SELECT * FROM test_emp ORDER BY emp_no; -// end::wildcardWithOrder column SELECT last_name FROM "test_emp" ORDER BY emp_no; columnWithAlias