From c4ed1cd99992462f5676f2a89fa29877e3e22104 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 16 May 2018 18:17:12 +0200 Subject: [PATCH] Rest High Level client: Add List Tasks (#29546) This change adds a `listTasks` method to the high level java ClusterClient which allows listing running tasks through the task management API. Related to #27205 --- .../elasticsearch/client/ClusterClient.java | 24 +++ .../client/RequestConverters.java | 60 ++++++ .../elasticsearch/client/ClusterClientIT.java | 31 ++++ .../client/RequestConvertersTests.java | 175 ++++++++++++------ .../ClusterClientDocumentationIT.java | 95 ++++++++++ .../high-level/cluster/list_tasks.asciidoc | 101 ++++++++++ .../high-level/supported-apis.asciidoc | 5 +- .../action/TaskOperationFailure.java | 38 +++- .../node/tasks/list/ListTasksResponse.java | 51 ++++- .../support/tasks/BaseTasksResponse.java | 8 +- .../admin/cluster/RestListTasksAction.java | 7 +- .../org/elasticsearch/tasks/TaskInfo.java | 5 + .../action/TaskOperationFailureTests.java | 63 +++++++ .../admin/cluster/node/tasks/TasksIT.java | 3 +- .../tasks/ListTasksResponseTests.java | 65 ++++++- .../elasticsearch/tasks/TaskInfoTests.java | 156 ++++++++++++++++ .../elasticsearch/tasks/TaskResultTests.java | 35 +--- .../org/elasticsearch/test/ESTestCase.java | 8 +- .../core/ml/action/GetJobsStatsAction.java | 4 +- 19 files changed, 806 insertions(+), 128 deletions(-) create mode 100644 docs/java-rest/high-level/cluster/list_tasks.asciidoc create mode 100644 server/src/test/java/org/elasticsearch/action/TaskOperationFailureTests.java create mode 100644 server/src/test/java/org/elasticsearch/tasks/TaskInfoTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java index f3c84db79d65f..e78e4686d6991 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java @@ -21,6 +21,8 @@ import org.apache.http.Header; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; @@ -63,4 +65,26 @@ public void putSettingsAsync(ClusterUpdateSettingsRequest clusterUpdateSettingsR restHighLevelClient.performRequestAsyncAndParseEntity(clusterUpdateSettingsRequest, RequestConverters::clusterPutSettings, ClusterUpdateSettingsResponse::fromXContent, listener, emptySet(), headers); } + + /** + * Get current tasks using the Task Management API + *

+ * See + * Task Management API on elastic.co + */ + public ListTasksResponse listTasks(ListTasksRequest request, Header... headers) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::listTasks, ListTasksResponse::fromXContent, + emptySet(), headers); + } + + /** + * Asynchronously get current tasks using the Task Management API + *

+ * See + * Task Management API on elastic.co + */ + public void listTasksAsync(ListTasksRequest request, ActionListener listener, Header... headers) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::listTasks, ListTasksResponse::fromXContent, + listener, emptySet(), headers); + } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 103e7cd6784d4..facffa4144e35 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -29,6 +29,7 @@ import org.apache.http.entity.ContentType; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; @@ -79,6 +80,7 @@ import org.elasticsearch.index.rankeval.RankEvalRequest; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; +import org.elasticsearch.tasks.TaskId; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -580,6 +582,22 @@ static Request clusterPutSettings(ClusterUpdateSettingsRequest clusterUpdateSett return request; } + static Request listTasks(ListTasksRequest listTaskRequest) { + if (listTaskRequest.getTaskId() != null && listTaskRequest.getTaskId().isSet()) { + throw new IllegalArgumentException("TaskId cannot be used for list tasks request"); + } + Request request = new Request(HttpGet.METHOD_NAME, "/_tasks"); + Params params = new Params(request); + params.withTimeout(listTaskRequest.getTimeout()) + .withDetailed(listTaskRequest.getDetailed()) + .withWaitForCompletion(listTaskRequest.getWaitForCompletion()) + .withParentTaskId(listTaskRequest.getParentTaskId()) + .withNodes(listTaskRequest.getNodes()) + .withActions(listTaskRequest.getActions()) + .putParam("group_by", "none"); + return request; + } + static Request rollover(RolloverRequest rolloverRequest) throws IOException { String endpoint = new EndpointBuilder().addPathPart(rolloverRequest.getAlias()).addPathPartAsIs("_rollover") .addPathPart(rolloverRequest.getNewIndexName()).build(); @@ -880,6 +898,48 @@ Params withPreserveExisting(boolean preserveExisting) { } return this; } + + Params withDetailed(boolean detailed) { + if (detailed) { + return putParam("detailed", Boolean.TRUE.toString()); + } + return this; + } + + Params withWaitForCompletion(boolean waitForCompletion) { + if (waitForCompletion) { + return putParam("wait_for_completion", Boolean.TRUE.toString()); + } + return this; + } + + Params withNodes(String[] nodes) { + if (nodes != null && nodes.length > 0) { + return putParam("nodes", String.join(",", nodes)); + } + return this; + } + + Params withActions(String[] actions) { + if (actions != null && actions.length > 0) { + return putParam("actions", String.join(",", actions)); + } + return this; + } + + Params withParentTaskId(TaskId parentTaskId) { + if (parentTaskId != null && parentTaskId.isSet()) { + return putParam("parent_task_id", parentTaskId.toString()); + } + return this; + } + + Params withVerify(boolean verify) { + if (verify) { + return putParam("verify", Boolean.TRUE.toString()); + } + return this; + } } /** diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java index 9314bb2e36cea..fa3086442f528 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java @@ -20,6 +20,9 @@ package org.elasticsearch.client; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; @@ -29,13 +32,16 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.TaskInfo; import java.io.IOException; import java.util.HashMap; import java.util.Map; +import static java.util.Collections.emptyList; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -105,4 +111,29 @@ public void testClusterUpdateSettingNonExistent() { assertThat(exception.getMessage(), equalTo( "Elasticsearch exception [type=illegal_argument_exception, reason=transient setting [" + setting + "], not recognized]")); } + + public void testListTasks() throws IOException { + ListTasksRequest request = new ListTasksRequest(); + ListTasksResponse response = execute(request, highLevelClient().cluster()::listTasks, highLevelClient().cluster()::listTasksAsync); + + assertThat(response, notNullValue()); + assertThat(response.getNodeFailures(), equalTo(emptyList())); + assertThat(response.getTaskFailures(), equalTo(emptyList())); + // It's possible that there are other tasks except 'cluster:monitor/tasks/lists[n]' and 'action":"cluster:monitor/tasks/lists' + assertThat(response.getTasks().size(), greaterThanOrEqualTo(2)); + boolean listTasksFound = false; + for (TaskGroup taskGroup : response.getTaskGroups()) { + TaskInfo parent = taskGroup.getTaskInfo(); + if ("cluster:monitor/tasks/lists".equals(parent.getAction())) { + assertThat(taskGroup.getChildTasks().size(), equalTo(1)); + TaskGroup childGroup = taskGroup.getChildTasks().iterator().next(); + assertThat(childGroup.getChildTasks().isEmpty(), equalTo(true)); + TaskInfo child = childGroup.getTaskInfo(); + assertThat(child.getAction(), equalTo("cluster:monitor/tasks/lists[n]")); + assertThat(child.getParentTaskId(), equalTo(parent.getTaskId())); + listTasksFound = true; + } + } + assertTrue("List tasks were not found", listTasksFound); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 5e6e6da7ece96..59b3be2796fdd 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -29,6 +29,7 @@ import org.apache.http.util.EntityUtils; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.indices.alias.Alias; @@ -103,6 +104,7 @@ import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.RandomObjects; @@ -130,6 +132,7 @@ import static org.elasticsearch.search.RandomSearchRequestGenerator.randomSearchRequest; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class RequestConvertersTests extends ESTestCase { @@ -176,8 +179,7 @@ public void testMultiGet() throws IOException { int numberOfRequests = randomIntBetween(0, 32); for (int i = 0; i < numberOfRequests; i++) { - MultiGetRequest.Item item = - new MultiGetRequest.Item(randomAlphaOfLength(4), randomAlphaOfLength(4), randomAlphaOfLength(4)); + MultiGetRequest.Item item = new MultiGetRequest.Item(randomAlphaOfLength(4), randomAlphaOfLength(4), randomAlphaOfLength(4)); if (randomBoolean()) { item.routing(randomAlphaOfLength(4)); } @@ -264,7 +266,7 @@ public void testIndicesExist() { public void testIndicesExistEmptyIndices() { expectThrows(IllegalArgumentException.class, () -> RequestConverters.indicesExist(new GetIndexRequest())); - expectThrows(IllegalArgumentException.class, () -> RequestConverters.indicesExist(new GetIndexRequest().indices((String[])null))); + expectThrows(IllegalArgumentException.class, () -> RequestConverters.indicesExist(new GetIndexRequest().indices((String[]) null))); } private static void getAndExistsTest(Function requestConverter, String method) { @@ -938,22 +940,21 @@ public void testBulkWithDifferentContentTypes() throws IOException { bulkRequest.add(new IndexRequest("index", "type", "0").source(singletonMap("field", "value"), XContentType.SMILE)); bulkRequest.add(new IndexRequest("index", "type", "1").source(singletonMap("field", "value"), XContentType.JSON)); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> RequestConverters.bulk(bulkRequest)); - assertEquals("Mismatching content-type found for request with content-type [JSON], " + - "previous requests have content-type [SMILE]", exception.getMessage()); + assertEquals( + "Mismatching content-type found for request with content-type [JSON], " + "previous requests have content-type [SMILE]", + exception.getMessage()); } { BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(new IndexRequest("index", "type", "0") - .source(singletonMap("field", "value"), XContentType.JSON)); - bulkRequest.add(new IndexRequest("index", "type", "1") - .source(singletonMap("field", "value"), XContentType.JSON)); + bulkRequest.add(new IndexRequest("index", "type", "0").source(singletonMap("field", "value"), XContentType.JSON)); + bulkRequest.add(new IndexRequest("index", "type", "1").source(singletonMap("field", "value"), XContentType.JSON)); bulkRequest.add(new UpdateRequest("index", "type", "2") .doc(new IndexRequest().source(singletonMap("field", "value"), XContentType.JSON)) - .upsert(new IndexRequest().source(singletonMap("field", "value"), XContentType.SMILE)) - ); + .upsert(new IndexRequest().source(singletonMap("field", "value"), XContentType.SMILE))); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> RequestConverters.bulk(bulkRequest)); - assertEquals("Mismatching content-type found for request with content-type [SMILE], " + - "previous requests have content-type [JSON]", exception.getMessage()); + assertEquals( + "Mismatching content-type found for request with content-type [SMILE], " + "previous requests have content-type [JSON]", + exception.getMessage()); } { XContentType xContentType = randomFrom(XContentType.CBOR, XContentType.YAML); @@ -1023,9 +1024,10 @@ public void testSearch() throws Exception { setRandomIndicesOptions(searchRequest::indicesOptions, searchRequest::indicesOptions, expectedParams); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - //rarely skip setting the search source completely + // rarely skip setting the search source completely if (frequently()) { - //frequently set the search source to have some content, otherwise leave it empty but still set it + // frequently set the search source to have some content, otherwise leave it + // empty but still set it if (frequently()) { if (randomBoolean()) { searchSourceBuilder.size(randomIntBetween(0, Integer.MAX_VALUE)); @@ -1095,7 +1097,8 @@ public void testMultiSearch() throws IOException { MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); for (int i = 0; i < numberOfSearchRequests; i++) { SearchRequest searchRequest = randomSearchRequest(() -> { - // No need to return a very complex SearchSourceBuilder here, that is tested elsewhere + // No need to return a very complex SearchSourceBuilder here, that is tested + // elsewhere SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.from(randomInt(10)); searchSourceBuilder.size(randomIntBetween(20, 100)); @@ -1103,14 +1106,13 @@ public void testMultiSearch() throws IOException { }); // scroll is not supported in the current msearch api, so unset it: searchRequest.scroll((Scroll) null); - // only expand_wildcards, ignore_unavailable and allow_no_indices can be specified from msearch api, so unset other options: + // only expand_wildcards, ignore_unavailable and allow_no_indices can be + // specified from msearch api, so unset other options: IndicesOptions randomlyGenerated = searchRequest.indicesOptions(); IndicesOptions msearchDefault = new MultiSearchRequest().indicesOptions(); - searchRequest.indicesOptions(IndicesOptions.fromOptions( - randomlyGenerated.ignoreUnavailable(), randomlyGenerated.allowNoIndices(), randomlyGenerated.expandWildcardsOpen(), - randomlyGenerated.expandWildcardsClosed(), msearchDefault.allowAliasesToMultipleIndices(), - msearchDefault.forbidClosedIndices(), msearchDefault.ignoreAliases() - )); + searchRequest.indicesOptions(IndicesOptions.fromOptions(randomlyGenerated.ignoreUnavailable(), + randomlyGenerated.allowNoIndices(), randomlyGenerated.expandWildcardsOpen(), randomlyGenerated.expandWildcardsClosed(), + msearchDefault.allowAliasesToMultipleIndices(), msearchDefault.forbidClosedIndices(), msearchDefault.ignoreAliases())); multiSearchRequest.add(searchRequest); } @@ -1135,8 +1137,8 @@ public void testMultiSearch() throws IOException { requests.add(searchRequest); }; MultiSearchRequest.readMultiLineFormat(new BytesArray(EntityUtils.toByteArray(request.getEntity())), - REQUEST_BODY_CONTENT_TYPE.xContent(), consumer, null, multiSearchRequest.indicesOptions(), null, null, - null, xContentRegistry(), true); + REQUEST_BODY_CONTENT_TYPE.xContent(), consumer, null, multiSearchRequest.indicesOptions(), null, null, null, + xContentRegistry(), true); assertEquals(requests, multiSearchRequest.requests()); } @@ -1172,7 +1174,7 @@ public void testExistsAlias() { GetAliasesRequest getAliasesRequest = new GetAliasesRequest(); String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5); getAliasesRequest.indices(indices); - //the HEAD endpoint requires at least an alias or an index + // the HEAD endpoint requires at least an alias or an index boolean hasIndices = indices != null && indices.length > 0; String[] aliases; if (hasIndices) { @@ -1203,15 +1205,15 @@ public void testExistsAlias() { public void testExistsAliasNoAliasNoIndex() { { GetAliasesRequest getAliasesRequest = new GetAliasesRequest(); - IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> - RequestConverters.existsAlias(getAliasesRequest)); + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, + () -> RequestConverters.existsAlias(getAliasesRequest)); assertEquals("existsAlias requires at least an alias or an index", iae.getMessage()); } { - GetAliasesRequest getAliasesRequest = new GetAliasesRequest((String[])null); - getAliasesRequest.indices((String[])null); - IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> - RequestConverters.existsAlias(getAliasesRequest)); + GetAliasesRequest getAliasesRequest = new GetAliasesRequest((String[]) null); + getAliasesRequest.indices((String[]) null); + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, + () -> RequestConverters.existsAlias(getAliasesRequest)); assertEquals("existsAlias requires at least an alias or an index", iae.getMessage()); } } @@ -1368,6 +1370,66 @@ public void testIndexPutSettings() throws IOException { assertEquals(expectedParams, request.getParameters()); } + public void testListTasks() { + { + ListTasksRequest request = new ListTasksRequest(); + Map expectedParams = new HashMap<>(); + if (randomBoolean()) { + request.setDetailed(randomBoolean()); + if (request.getDetailed()) { + expectedParams.put("detailed", "true"); + } + } + if (randomBoolean()) { + request.setWaitForCompletion(randomBoolean()); + if (request.getWaitForCompletion()) { + expectedParams.put("wait_for_completion", "true"); + } + } + if (randomBoolean()) { + String timeout = randomTimeValue(); + request.setTimeout(timeout); + expectedParams.put("timeout", timeout); + } + if (randomBoolean()) { + if (randomBoolean()) { + TaskId taskId = new TaskId(randomAlphaOfLength(5), randomNonNegativeLong()); + request.setParentTaskId(taskId); + expectedParams.put("parent_task_id", taskId.toString()); + } else { + request.setParentTask(TaskId.EMPTY_TASK_ID); + } + } + if (randomBoolean()) { + String[] nodes = generateRandomStringArray(10, 8, false); + request.setNodes(nodes); + if (nodes.length > 0) { + expectedParams.put("nodes", String.join(",", nodes)); + } + } + if (randomBoolean()) { + String[] actions = generateRandomStringArray(10, 8, false); + request.setActions(actions); + if (actions.length > 0) { + expectedParams.put("actions", String.join(",", actions)); + } + } + expectedParams.put("group_by", "none"); + Request httpRequest = RequestConverters.listTasks(request); + assertThat(httpRequest, notNullValue()); + assertThat(httpRequest.getMethod(), equalTo(HttpGet.METHOD_NAME)); + assertThat(httpRequest.getEntity(), nullValue()); + assertThat(httpRequest.getEndpoint(), equalTo("/_tasks")); + assertThat(httpRequest.getParameters(), equalTo(expectedParams)); + } + { + ListTasksRequest request = new ListTasksRequest(); + request.setTaskId(new TaskId(randomAlphaOfLength(5), randomNonNegativeLong())); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> RequestConverters.listTasks(request)); + assertEquals("TaskId cannot be used for list tasks request", exception.getMessage()); + } + } + public void testGetRepositories() { Map expectedParams = new HashMap<>(); StringBuilder endpoint = new StringBuilder("/_snapshot"); @@ -1377,7 +1439,7 @@ public void testGetRepositories() { setRandomLocal(getRepositoriesRequest, expectedParams); if (randomBoolean()) { - String[] entries = new String[] {"a", "b", "c"}; + String[] entries = new String[] { "a", "b", "c" }; getRepositoriesRequest.repositories(entries); endpoint.append("/" + String.join(",", entries)); } @@ -1395,9 +1457,8 @@ public void testPutTemplateRequest() throws Exception { names.put("-#template", "-%23template"); names.put("foo^bar", "foo%5Ebar"); - PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest() - .name(randomFrom(names.keySet())) - .patterns(Arrays.asList(generateRandomStringArray(20, 100, false, false))); + PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest().name(randomFrom(names.keySet())) + .patterns(Arrays.asList(generateRandomStringArray(20, 100, false, false))); if (randomBoolean()) { putTemplateRequest.order(randomInt()); } @@ -1454,14 +1515,12 @@ public void testEndpointBuilder() { assertEquals("/a/b", endpointBuilder.build()); } { - EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPart("a").addPathPart("b") - .addPathPartAsIs("_create"); + EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPart("a").addPathPart("b").addPathPartAsIs("_create"); assertEquals("/a/b/_create", endpointBuilder.build()); } { - EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPart("a", "b", "c") - .addPathPartAsIs("_create"); + EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPart("a", "b", "c").addPathPartAsIs("_create"); assertEquals("/a/b/c/_create", endpointBuilder.build()); } { @@ -1520,13 +1579,12 @@ public void testEndpointBuilderEncodeParts() { assertEquals("/foo%5Ebar", endpointBuilder.build()); } { - EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPart("cluster1:index1,index2") - .addPathPartAsIs("_search"); + EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPart("cluster1:index1,index2").addPathPartAsIs("_search"); assertEquals("/cluster1:index1,index2/_search", endpointBuilder.build()); } { - EndpointBuilder endpointBuilder = new EndpointBuilder() - .addCommaSeparatedPathParts(new String[]{"index1", "index2"}).addPathPartAsIs("cache/clear"); + EndpointBuilder endpointBuilder = new EndpointBuilder().addCommaSeparatedPathParts(new String[] { "index1", "index2" }) + .addPathPartAsIs("cache/clear"); assertEquals("/index1,index2/cache/clear", endpointBuilder.build()); } } @@ -1534,12 +1592,12 @@ public void testEndpointBuilderEncodeParts() { public void testEndpoint() { assertEquals("/index/type/id", RequestConverters.endpoint("index", "type", "id")); assertEquals("/index/type/id/_endpoint", RequestConverters.endpoint("index", "type", "id", "_endpoint")); - assertEquals("/index1,index2", RequestConverters.endpoint(new String[]{"index1", "index2"})); - assertEquals("/index1,index2/_endpoint", RequestConverters.endpoint(new String[]{"index1", "index2"}, "_endpoint")); - assertEquals("/index1,index2/type1,type2/_endpoint", RequestConverters.endpoint(new String[]{"index1", "index2"}, - new String[]{"type1", "type2"}, "_endpoint")); - assertEquals("/index1,index2/_endpoint/suffix1,suffix2", RequestConverters.endpoint(new String[]{"index1", "index2"}, - "_endpoint", new String[]{"suffix1", "suffix2"})); + assertEquals("/index1,index2", RequestConverters.endpoint(new String[] { "index1", "index2" })); + assertEquals("/index1,index2/_endpoint", RequestConverters.endpoint(new String[] { "index1", "index2" }, "_endpoint")); + assertEquals("/index1,index2/type1,type2/_endpoint", + RequestConverters.endpoint(new String[] { "index1", "index2" }, new String[] { "type1", "type2" }, "_endpoint")); + assertEquals("/index1,index2/_endpoint/suffix1,suffix2", + RequestConverters.endpoint(new String[] { "index1", "index2" }, "_endpoint", new String[] { "suffix1", "suffix2" })); } public void testCreateContentType() { @@ -1555,20 +1613,22 @@ public void testEnforceSameContentType() { XContentType bulkContentType = randomBoolean() ? xContentType : null; - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> - enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), XContentType.CBOR), bulkContentType)); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), XContentType.CBOR), + bulkContentType)); assertEquals("Unsupported content-type found for request with content-type [CBOR], only JSON and SMILE are supported", exception.getMessage()); - exception = expectThrows(IllegalArgumentException.class, () -> - enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), XContentType.YAML), bulkContentType)); + exception = expectThrows(IllegalArgumentException.class, + () -> enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), XContentType.YAML), + bulkContentType)); assertEquals("Unsupported content-type found for request with content-type [YAML], only JSON and SMILE are supported", exception.getMessage()); XContentType requestContentType = xContentType == XContentType.JSON ? XContentType.SMILE : XContentType.JSON; - exception = expectThrows(IllegalArgumentException.class, () -> - enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), requestContentType), xContentType)); + exception = expectThrows(IllegalArgumentException.class, + () -> enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), requestContentType), xContentType)); assertEquals("Mismatching content-type found for request with content-type [" + requestContentType + "], " + "previous requests have content-type [" + xContentType + "]", exception.getMessage()); } @@ -1603,11 +1663,10 @@ private static void randomizeFetchSourceContextParams(Consumer setter, Supplier getter, - Map expectedParams) { + Map expectedParams) { if (randomBoolean()) { - setter.accept(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), - randomBoolean())); + setter.accept(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); } expectedParams.put("ignore_unavailable", Boolean.toString(getter.get().ignoreUnavailable())); expectedParams.put("allow_no_indices", Boolean.toString(getter.get().allowNoIndices())); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java index 2e7ea1650f424..d41b11c68fe44 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java @@ -19,8 +19,14 @@ package org.elasticsearch.client.documentation; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; @@ -31,14 +37,20 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.indices.recovery.RecoverySettings; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.tasks.TaskInfo; import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static java.util.Collections.emptyList; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.notNullValue; /** * This class is used to generate the Java Cluster API documentation. @@ -177,4 +189,87 @@ public void onFailure(Exception e) { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } + + public void testListTasks() throws IOException { + RestHighLevelClient client = highLevelClient(); + { + // tag::list-tasks-request + ListTasksRequest request = new ListTasksRequest(); + // end::list-tasks-request + + // tag::list-tasks-request-filter + request.setActions("cluster:*"); // <1> + request.setNodes("nodeId1", "nodeId2"); // <2> + request.setParentTaskId(new TaskId("parentTaskId", 42)); // <3> + // end::list-tasks-request-filter + + // tag::list-tasks-request-detailed + request.setDetailed(true); // <1> + // end::list-tasks-request-detailed + + // tag::list-tasks-request-wait-completion + request.setWaitForCompletion(true); // <1> + request.setTimeout(TimeValue.timeValueSeconds(50)); // <2> + request.setTimeout("50s"); // <3> + // end::list-tasks-request-wait-completion + } + + ListTasksRequest request = new ListTasksRequest(); + + // tag::list-tasks-execute + ListTasksResponse response = client.cluster().listTasks(request); + // end::list-tasks-execute + + assertThat(response, notNullValue()); + + // tag::list-tasks-response-tasks + List tasks = response.getTasks(); // <1> + // end::list-tasks-response-tasks + + // tag::list-tasks-response-calc + Map> perNodeTasks = response.getPerNodeTasks(); // <1> + List groups = response.getTaskGroups(); // <2> + // end::list-tasks-response-calc + + // tag::list-tasks-response-failures + List nodeFailures = response.getNodeFailures(); // <1> + List taskFailures = response.getTaskFailures(); // <2> + // end::list-tasks-response-failures + + assertThat(response.getNodeFailures(), equalTo(emptyList())); + assertThat(response.getTaskFailures(), equalTo(emptyList())); + assertThat(response.getTasks().size(), greaterThanOrEqualTo(2)); + } + + public void testListTasksAsync() throws Exception { + RestHighLevelClient client = highLevelClient(); + { + ListTasksRequest request = new ListTasksRequest(); + + // tag::list-tasks-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(ListTasksResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::list-tasks-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::list-tasks-execute-async + client.cluster().listTasksAsync(request, listener); // <1> + // end::list-tasks-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } } diff --git a/docs/java-rest/high-level/cluster/list_tasks.asciidoc b/docs/java-rest/high-level/cluster/list_tasks.asciidoc new file mode 100644 index 0000000000000..1a2117b2e66e6 --- /dev/null +++ b/docs/java-rest/high-level/cluster/list_tasks.asciidoc @@ -0,0 +1,101 @@ +[[java-rest-high-cluster-list-tasks]] +=== List Tasks API + +The List Tasks API allows to get information about the tasks currently executing in the cluster. + +[[java-rest-high-cluster-list-tasks-request]] +==== List Tasks Request + +A `ListTasksRequest`: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-request] +-------------------------------------------------- +There is no required parameters. By default the client will list all tasks and will not wait +for task completion. + +==== Parameters + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-request-filter] +-------------------------------------------------- +<1> Request only cluster-related tasks +<2> Request all tasks running on nodes nodeId1 and nodeId2 +<3> Request only children of a particular task + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-request-detailed] +-------------------------------------------------- +<1> Should the information include detailed, potentially slow to generate data. Defaults to `false` + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-request-wait-completion] +-------------------------------------------------- +<1> Should this request wait for all found tasks to complete. Defaults to `false` +<2> Timeout for the request as a `TimeValue`. Applicable only if `setWaitForCompletion` is `true`. +Defaults to 30 seconds +<3> Timeout as a `String` + +[[java-rest-high-cluster-list-tasks-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-execute] +-------------------------------------------------- + +[[java-rest-high-cluster-list-tasks-async]] +==== Asynchronous Execution + +The asynchronous execution of a cluster update settings requires both the +`ListTasksRequest` instance and an `ActionListener` instance to be +passed to the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-execute-async] +-------------------------------------------------- +<1> The `ListTasksRequest` to execute and the `ActionListener` to use +when the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `ListTasksResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of a failure. The raised exception is provided as an argument + +[[java-rest-high-cluster-list-tasks-response]] +==== List Tasks Response + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-response-tasks] +-------------------------------------------------- +<1> List of currently running tasks + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-response-calc] +-------------------------------------------------- +<1> List of tasks grouped by a node +<2> List of tasks grouped by a parent task + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-response-failures] +-------------------------------------------------- +<1> List of node failures +<2> List of tasks failures diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index c3988d8b0027e..658d6023caea5 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -98,8 +98,10 @@ include::indices/put_template.asciidoc[] The Java High Level REST Client supports the following Cluster APIs: * <> +* <> include::cluster/put_settings.asciidoc[] +include::cluster/list_tasks.asciidoc[] == Snapshot APIs @@ -107,4 +109,5 @@ The Java High Level REST Client supports the following Snapshot APIs: * <> -include::snapshot/get_repository.asciidoc[] \ No newline at end of file +include::snapshot/get_repository.asciidoc[] +include::snapshot/create_repository.asciidoc[] diff --git a/server/src/main/java/org/elasticsearch/action/TaskOperationFailure.java b/server/src/main/java/org/elasticsearch/action/TaskOperationFailure.java index 885647441d01f..8740c446b068e 100644 --- a/server/src/main/java/org/elasticsearch/action/TaskOperationFailure.java +++ b/server/src/main/java/org/elasticsearch/action/TaskOperationFailure.java @@ -21,17 +21,20 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.RestStatus; import java.io.IOException; import static org.elasticsearch.ExceptionsHelper.detailedMessage; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; /** * Information about task operation failures @@ -39,7 +42,10 @@ * The class is final due to serialization limitations */ public final class TaskOperationFailure implements Writeable, ToXContentFragment { - + private static final String TASK_ID = "task_id"; + private static final String NODE_ID = "node_id"; + private static final String STATUS = "status"; + private static final String REASON = "reason"; private final String nodeId; private final long taskId; @@ -48,6 +54,21 @@ public final class TaskOperationFailure implements Writeable, ToXContentFragment private final RestStatus status; + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("task_info", true, constructorObjects -> { + int i = 0; + String nodeId = (String) constructorObjects[i++]; + long taskId = (long) constructorObjects[i++]; + ElasticsearchException reason = (ElasticsearchException) constructorObjects[i]; + return new TaskOperationFailure(nodeId, taskId, reason); + }); + + static { + PARSER.declareString(constructorArg(), new ParseField(NODE_ID)); + PARSER.declareLong(constructorArg(), new ParseField(TASK_ID)); + PARSER.declareObject(constructorArg(), (parser, c) -> ElasticsearchException.fromXContent(parser), new ParseField(REASON)); + } + public TaskOperationFailure(String nodeId, long taskId, Exception e) { this.nodeId = nodeId; this.taskId = taskId; @@ -98,13 +119,17 @@ public String toString() { return "[" + nodeId + "][" + taskId + "] failed, reason [" + getReason() + "]"; } + public static TaskOperationFailure fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field("task_id", getTaskId()); - builder.field("node_id", getNodeId()); - builder.field("status", status.name()); + builder.field(TASK_ID, getTaskId()); + builder.field(NODE_ID, getNodeId()); + builder.field(STATUS, status.name()); if (reason != null) { - builder.field("reason"); + builder.field(REASON); builder.startObject(); ElasticsearchException.generateThrowableXContent(builder, params, reason); builder.endObject(); @@ -112,5 +137,4 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java index 88d8ff4679917..1233b7143ab77 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java @@ -19,16 +19,19 @@ package org.elasticsearch.action.admin.cluster.node.tasks.list; -import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskInfo; @@ -40,10 +43,16 @@ import java.util.Map; import java.util.stream.Collectors; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + /** * Returns the list of tasks currently running on the nodes */ public class ListTasksResponse extends BaseTasksResponse implements ToXContentObject { + private static final String TASKS = "tasks"; + private static final String TASK_FAILURES = "task_failures"; + private static final String NODE_FAILURES = "node_failures"; private List tasks; @@ -56,11 +65,31 @@ public ListTasksResponse() { } public ListTasksResponse(List tasks, List taskFailures, - List nodeFailures) { + List nodeFailures) { super(taskFailures, nodeFailures); this.tasks = tasks == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(tasks)); } + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("list_tasks_response", true, + constructingObjects -> { + int i = 0; + @SuppressWarnings("unchecked") + List tasks = (List) constructingObjects[i++]; + @SuppressWarnings("unchecked") + List tasksFailures = (List) constructingObjects[i++]; + @SuppressWarnings("unchecked") + List nodeFailures = (List) constructingObjects[i]; + return new ListTasksResponse(tasks, tasksFailures, nodeFailures); + }); + + static { + PARSER.declareObjectArray(constructorArg(), TaskInfo.PARSER, new ParseField(TASKS)); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> TaskOperationFailure.fromXContent(p), new ParseField(TASK_FAILURES)); + PARSER.declareObjectArray(optionalConstructorArg(), + (parser, c) -> ElasticsearchException.fromXContent(parser), new ParseField(NODE_FAILURES)); + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -159,7 +188,7 @@ public XContentBuilder toXContentGroupedByNode(XContentBuilder builder, Params p builder.endObject(); } } - builder.startObject("tasks"); + builder.startObject(TASKS); for(TaskInfo task : entry.getValue()) { builder.startObject(task.getTaskId().toString()); task.toXContent(builder, params); @@ -177,7 +206,7 @@ public XContentBuilder toXContentGroupedByNode(XContentBuilder builder, Params p */ public XContentBuilder toXContentGroupedByParents(XContentBuilder builder, Params params) throws IOException { toXContentCommon(builder, params); - builder.startObject("tasks"); + builder.startObject(TASKS); for (TaskGroup group : getTaskGroups()) { builder.field(group.getTaskInfo().getTaskId().toString()); group.toXContent(builder, params); @@ -191,7 +220,7 @@ public XContentBuilder toXContentGroupedByParents(XContentBuilder builder, Param */ public XContentBuilder toXContentGroupedByNone(XContentBuilder builder, Params params) throws IOException { toXContentCommon(builder, params); - builder.startArray("tasks"); + builder.startArray(TASKS); for (TaskInfo taskInfo : getTasks()) { builder.startObject(); taskInfo.toXContent(builder, params); @@ -204,14 +233,14 @@ public XContentBuilder toXContentGroupedByNone(XContentBuilder builder, Params p @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - toXContentGroupedByParents(builder, params); + toXContentGroupedByNone(builder, params); builder.endObject(); return builder; } private void toXContentCommon(XContentBuilder builder, Params params) throws IOException { if (getTaskFailures() != null && getTaskFailures().size() > 0) { - builder.startArray("task_failures"); + builder.startArray(TASK_FAILURES); for (TaskOperationFailure ex : getTaskFailures()){ builder.startObject(); builder.value(ex); @@ -221,8 +250,8 @@ private void toXContentCommon(XContentBuilder builder, Params params) throws IOE } if (getNodeFailures() != null && getNodeFailures().size() > 0) { - builder.startArray("node_failures"); - for (FailedNodeException ex : getNodeFailures()) { + builder.startArray(NODE_FAILURES); + for (ElasticsearchException ex : getNodeFailures()) { builder.startObject(); ex.toXContent(builder, params); builder.endObject(); @@ -231,6 +260,10 @@ private void toXContentCommon(XContentBuilder builder, Params params) throws IOE } } + public static ListTasksResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + @Override public String toString() { return Strings.toString(this); diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java b/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java index fdbd8e6fe708f..1436410bf2046 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/BaseTasksResponse.java @@ -42,9 +42,9 @@ */ public class BaseTasksResponse extends ActionResponse { private List taskFailures; - private List nodeFailures; + private List nodeFailures; - public BaseTasksResponse(List taskFailures, List nodeFailures) { + public BaseTasksResponse(List taskFailures, List nodeFailures) { this.taskFailures = taskFailures == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(taskFailures)); this.nodeFailures = nodeFailures == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(nodeFailures)); } @@ -59,7 +59,7 @@ public List getTaskFailures() { /** * The list of node failures exception. */ - public List getNodeFailures() { + public List getNodeFailures() { return nodeFailures; } @@ -99,7 +99,7 @@ public void writeTo(StreamOutput out) throws IOException { exp.writeTo(out); } out.writeVInt(nodeFailures.size()); - for (FailedNodeException exp : nodeFailures) { + for (ElasticsearchException exp : nodeFailures) { exp.writeTo(out); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestListTasksAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestListTasksAction.java index 8e6447e0e4980..ec4058fea9d7c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestListTasksAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestListTasksAction.java @@ -103,18 +103,17 @@ public RestResponse buildResponse(T response, XContentBuilder builder) throws Ex return new BytesRestResponse(RestStatus.OK, builder); } }; - } else if ("none".equals(groupBy)) { + } else if ("parents".equals(groupBy)) { return new RestBuilderListener(channel) { @Override public RestResponse buildResponse(T response, XContentBuilder builder) throws Exception { builder.startObject(); - response.toXContentGroupedByNone(builder, channel.request()); + response.toXContentGroupedByParents(builder, channel.request()); builder.endObject(); return new BytesRestResponse(RestStatus.OK, builder); } }; - - } else if ("parents".equals(groupBy)) { + } else if ("none".equals(groupBy)) { return new RestToXContentListener<>(channel); } else { throw new IllegalArgumentException("[group_by] must be one of [nodes], [parents] or [none] but was [" + groupBy + "]"); diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java b/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java index da4909bb3817f..26aabec3e9fc2 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.Collections; @@ -214,6 +215,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + public static TaskInfo fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "task_info", true, a -> { int i = 0; diff --git a/server/src/test/java/org/elasticsearch/action/TaskOperationFailureTests.java b/server/src/test/java/org/elasticsearch/action/TaskOperationFailureTests.java new file mode 100644 index 0000000000000..442cb55def5f2 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/TaskOperationFailureTests.java @@ -0,0 +1,63 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; + +public class TaskOperationFailureTests extends AbstractXContentTestCase { + + @Override + protected TaskOperationFailure createTestInstance() { + return new TaskOperationFailure(randomAlphaOfLength(5), randomNonNegativeLong(), new IllegalStateException("message")); + } + + @Override + protected TaskOperationFailure doParseInstance(XContentParser parser) throws IOException { + return TaskOperationFailure.fromXContent(parser); + } + + @Override + protected void assertEqualInstances(TaskOperationFailure expectedInstance, TaskOperationFailure newInstance) { + assertNotSame(expectedInstance, newInstance); + assertThat(newInstance.getNodeId(), equalTo(expectedInstance.getNodeId())); + assertThat(newInstance.getTaskId(), equalTo(expectedInstance.getTaskId())); + assertThat(newInstance.getStatus(), equalTo(expectedInstance.getStatus())); + // XContent loses the original exception and wraps it as a message in Elasticsearch exception + assertThat(newInstance.getCause().getMessage(), equalTo("Elasticsearch exception [type=illegal_state_exception, reason=message]")); + // getReason returns Exception class and the message + assertThat(newInstance.getReason(), + equalTo("ElasticsearchException[Elasticsearch exception [type=illegal_state_exception, reason=message]]")); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected boolean assertToXContentEquivalence() { + return false; + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index b04205ed01813..4ab54cdd206be 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.action.admin.cluster.node.tasks; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; @@ -716,7 +717,7 @@ public void testTasksWaitForAllTask() throws Exception { .setTimeout(timeValueSeconds(10)).get(); // It should finish quickly and without complaint and list the list tasks themselves - assertThat(response.getNodeFailures(), emptyCollectionOf(FailedNodeException.class)); + assertThat(response.getNodeFailures(), emptyCollectionOf(ElasticsearchException.class)); assertThat(response.getTaskFailures(), emptyCollectionOf(TaskOperationFailure.class)); assertThat(response.getTasks().size(), greaterThanOrEqualTo(1)); } diff --git a/server/src/test/java/org/elasticsearch/tasks/ListTasksResponseTests.java b/server/src/test/java/org/elasticsearch/tasks/ListTasksResponseTests.java index be0624d6bba83..295ff955e41a5 100644 --- a/server/src/test/java/org/elasticsearch/tasks/ListTasksResponseTests.java +++ b/server/src/test/java/org/elasticsearch/tasks/ListTasksResponseTests.java @@ -19,18 +19,33 @@ package org.elasticsearch.tasks; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.AbstractXContentTestCase; +import java.io.IOException; +import java.util.ArrayList; import java.util.Collections; +import java.util.List; +import java.util.Objects; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; -public class ListTasksResponseTests extends ESTestCase { +public class ListTasksResponseTests extends AbstractXContentTestCase { public void testEmptyToString() { - assertEquals("{\"tasks\":{}}", new ListTasksResponse().toString()); + assertEquals("{\"tasks\":[]}", new ListTasksResponse().toString()); } public void testNonEmptyToString() { @@ -38,8 +53,48 @@ public void testNonEmptyToString() { new TaskId("node1", 1), "dummy-type", "dummy-action", "dummy-description", null, 0, 1, true, new TaskId("node1", 0), Collections.singletonMap("foo", "bar")); ListTasksResponse tasksResponse = new ListTasksResponse(singletonList(info), emptyList(), emptyList()); - assertEquals("{\"tasks\":{\"node1:1\":{\"node\":\"node1\",\"id\":1,\"type\":\"dummy-type\",\"action\":\"dummy-action\"," + assertEquals("{\"tasks\":[{\"node\":\"node1\",\"id\":1,\"type\":\"dummy-type\",\"action\":\"dummy-action\"," + "\"description\":\"dummy-description\",\"start_time_in_millis\":0,\"running_time_in_nanos\":1,\"cancellable\":true," - + "\"parent_task_id\":\"node1:0\",\"headers\":{\"foo\":\"bar\"}}}}", tasksResponse.toString()); + + "\"parent_task_id\":\"node1:0\",\"headers\":{\"foo\":\"bar\"}}]}", tasksResponse.toString()); + } + + @Override + protected ListTasksResponse createTestInstance() { + List tasks = new ArrayList<>(); + for (int i = 0; i < randomInt(10); i++) { + tasks.add(TaskInfoTests.randomTaskInfo()); + } + List taskFailures = new ArrayList<>(); + for (int i = 0; i < randomInt(5); i++) { + taskFailures.add(new TaskOperationFailure( + randomAlphaOfLength(5), randomNonNegativeLong(), new IllegalStateException("message"))); + } + return new ListTasksResponse(tasks, taskFailures, Collections.singletonList(new FailedNodeException("", "message", null))); + } + + @Override + protected ListTasksResponse doParseInstance(XContentParser parser) throws IOException { + return ListTasksResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected void assertEqualInstances(ListTasksResponse expectedInstance, ListTasksResponse newInstance) { + assertNotSame(expectedInstance, newInstance); + assertThat(newInstance.getTasks(), equalTo(expectedInstance.getTasks())); + assertThat(newInstance.getNodeFailures().size(), equalTo(1)); + for (ElasticsearchException failure : newInstance.getNodeFailures()) { + assertThat(failure, notNullValue()); + assertThat(failure.getMessage(), equalTo("Elasticsearch exception [type=failed_node_exception, reason=message]")); + } + } + + @Override + protected boolean assertToXContentEquivalence() { + return false; } } diff --git a/server/src/test/java/org/elasticsearch/tasks/TaskInfoTests.java b/server/src/test/java/org/elasticsearch/tasks/TaskInfoTests.java new file mode 100644 index 0000000000000..616ac1053871e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/tasks/TaskInfoTests.java @@ -0,0 +1,156 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.tasks; + +import org.elasticsearch.client.Requests; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Predicate; + +public class TaskInfoTests extends AbstractSerializingTestCase { + + @Override + protected TaskInfo doParseInstance(XContentParser parser) { + return TaskInfo.fromXContent(parser); + } + + @Override + protected TaskInfo createTestInstance() { + return randomTaskInfo(); + } + + @Override + protected Writeable.Reader instanceReader() { + return TaskInfo::new; + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(Collections.singletonList( + new NamedWriteableRegistry.Entry(Task.Status.class, RawTaskStatus.NAME, RawTaskStatus::new))); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> "status".equals(field) || "headers".equals(field); + } + + @Override + protected TaskInfo mutateInstance(TaskInfo info) throws IOException { + switch (between(0, 9)) { + case 0: + TaskId taskId = new TaskId(info.getTaskId().getNodeId() + randomAlphaOfLength(5), info.getTaskId().getId()); + return new TaskInfo(taskId, info.getType(), info.getAction(), info.getDescription(), info.getStatus(), + info.getStartTime(), info.getRunningTimeNanos(), info.isCancellable(), info.getParentTaskId(), info.getHeaders()); + case 1: + return new TaskInfo(info.getTaskId(), info.getType() + randomAlphaOfLength(5), info.getAction(), info.getDescription(), + info.getStatus(), info.getStartTime(), info.getRunningTimeNanos(), info.isCancellable(), info.getParentTaskId(), + info.getHeaders()); + case 2: + return new TaskInfo(info.getTaskId(), info.getType(), info.getAction() + randomAlphaOfLength(5), info.getDescription(), + info.getStatus(), info.getStartTime(), info.getRunningTimeNanos(), info.isCancellable(), info.getParentTaskId(), + info.getHeaders()); + case 3: + return new TaskInfo(info.getTaskId(), info.getType(), info.getAction(), info.getDescription() + randomAlphaOfLength(5), + info.getStatus(), info.getStartTime(), info.getRunningTimeNanos(), info.isCancellable(), info.getParentTaskId(), + info.getHeaders()); + case 4: + Task.Status newStatus = randomValueOtherThan(info.getStatus(), TaskInfoTests::randomRawTaskStatus); + return new TaskInfo(info.getTaskId(), info.getType(), info.getAction(), info.getDescription(), newStatus, + info.getStartTime(), info.getRunningTimeNanos(), info.isCancellable(), info.getParentTaskId(), info.getHeaders()); + case 5: + return new TaskInfo(info.getTaskId(), info.getType(), info.getAction(), info.getDescription(), info.getStatus(), + info.getStartTime() + between(1, 100), info.getRunningTimeNanos(), info.isCancellable(), info.getParentTaskId(), + info.getHeaders()); + case 6: + return new TaskInfo(info.getTaskId(), info.getType(), info.getAction(), info.getDescription(), info.getStatus(), + info.getStartTime(), info.getRunningTimeNanos() + between(1, 100), info.isCancellable(), info.getParentTaskId(), + info.getHeaders()); + case 7: + return new TaskInfo(info.getTaskId(), info.getType(), info.getAction(), info.getDescription(), info.getStatus(), + info.getStartTime(), info.getRunningTimeNanos(), info.isCancellable() == false, info.getParentTaskId(), + info.getHeaders()); + case 8: + TaskId parentId = new TaskId(info.getParentTaskId().getNodeId() + randomAlphaOfLength(5), info.getParentTaskId().getId()); + return new TaskInfo(info.getTaskId(), info.getType(), info.getAction(), info.getDescription(), info.getStatus(), + info.getStartTime(), info.getRunningTimeNanos(), info.isCancellable(), parentId, info.getHeaders()); + case 9: + Map headers = info.getHeaders(); + if (headers == null) { + headers = new HashMap<>(1); + } else { + headers = new HashMap<>(info.getHeaders()); + } + headers.put(randomAlphaOfLength(15), randomAlphaOfLength(15)); + return new TaskInfo(info.getTaskId(), info.getType(), info.getAction(), info.getDescription(), info.getStatus(), + info.getStartTime(), info.getRunningTimeNanos(), info.isCancellable(), info.getParentTaskId(), headers); + default: + throw new IllegalStateException(); + } + } + + static TaskInfo randomTaskInfo() { + TaskId taskId = randomTaskId(); + String type = randomAlphaOfLength(5); + String action = randomAlphaOfLength(5); + Task.Status status = randomBoolean() ? randomRawTaskStatus() : null; + String description = randomBoolean() ? randomAlphaOfLength(5) : null; + long startTime = randomLong(); + long runningTimeNanos = randomLong(); + boolean cancellable = randomBoolean(); + TaskId parentTaskId = randomBoolean() ? TaskId.EMPTY_TASK_ID : randomTaskId(); + Map headers = randomBoolean() ? + Collections.emptyMap() : + Collections.singletonMap(randomAlphaOfLength(5), randomAlphaOfLength(5)); + return new TaskInfo(taskId, type, action, description, status, startTime, runningTimeNanos, cancellable, parentTaskId, headers); + } + + private static TaskId randomTaskId() { + return new TaskId(randomAlphaOfLength(5), randomLong()); + } + + private static RawTaskStatus randomRawTaskStatus() { + try (XContentBuilder builder = XContentBuilder.builder(Requests.INDEX_CONTENT_TYPE.xContent())) { + builder.startObject(); + int fields = between(0, 10); + for (int f = 0; f < fields; f++) { + builder.field(randomAlphaOfLength(5), randomAlphaOfLength(5)); + } + builder.endObject(); + return new RawTaskStatus(BytesReference.bytes(builder)); + } catch (IOException e) { + throw new IllegalStateException(e); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/tasks/TaskResultTests.java b/server/src/test/java/org/elasticsearch/tasks/TaskResultTests.java index 7a481100f1372..71916c0c94435 100644 --- a/server/src/test/java/org/elasticsearch/tasks/TaskResultTests.java +++ b/server/src/test/java/org/elasticsearch/tasks/TaskResultTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.tasks; -import org.elasticsearch.client.Requests; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -37,6 +35,8 @@ import java.util.Map; import java.util.TreeMap; +import static org.elasticsearch.tasks.TaskInfoTests.randomTaskInfo; + /** * Round trip tests for {@link TaskResult} and those classes that it includes like {@link TaskInfo} and {@link RawTaskStatus}. */ @@ -125,37 +125,6 @@ private static TaskResult randomTaskResult() throws IOException { } } - private static TaskInfo randomTaskInfo() throws IOException { - TaskId taskId = randomTaskId(); - String type = randomAlphaOfLength(5); - String action = randomAlphaOfLength(5); - Task.Status status = randomBoolean() ? randomRawTaskStatus() : null; - String description = randomBoolean() ? randomAlphaOfLength(5) : null; - long startTime = randomLong(); - long runningTimeNanos = randomLong(); - boolean cancellable = randomBoolean(); - TaskId parentTaskId = randomBoolean() ? TaskId.EMPTY_TASK_ID : randomTaskId(); - Map headers = - randomBoolean() ? Collections.emptyMap() : Collections.singletonMap(randomAlphaOfLength(5), randomAlphaOfLength(5)); - return new TaskInfo(taskId, type, action, description, status, startTime, runningTimeNanos, cancellable, parentTaskId, headers); - } - - private static TaskId randomTaskId() { - return new TaskId(randomAlphaOfLength(5), randomLong()); - } - - private static RawTaskStatus randomRawTaskStatus() throws IOException { - try (XContentBuilder builder = XContentBuilder.builder(Requests.INDEX_CONTENT_TYPE.xContent())) { - builder.startObject(); - int fields = between(0, 10); - for (int f = 0; f < fields; f++) { - builder.field(randomAlphaOfLength(5), randomAlphaOfLength(5)); - } - builder.endObject(); - return new RawTaskStatus(BytesReference.bytes(builder)); - } - } - private static ToXContent randomTaskResponse() { Map result = new TreeMap<>(); int fields = between(0, 10); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 4717fc7c1ba31..edc762632b428 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -659,20 +659,20 @@ public static String randomRealisticUnicodeOfCodepointLength(int codePoints) { return RandomizedTest.randomRealisticUnicodeOfCodepointLength(codePoints); } - public static String[] generateRandomStringArray(int maxArraySize, int maxStringSize, boolean allowNull, boolean allowEmpty) { + public static String[] generateRandomStringArray(int maxArraySize, int stringSize, boolean allowNull, boolean allowEmpty) { if (allowNull && random().nextBoolean()) { return null; } int arraySize = randomIntBetween(allowEmpty ? 0 : 1, maxArraySize); String[] array = new String[arraySize]; for (int i = 0; i < arraySize; i++) { - array[i] = RandomStrings.randomAsciiOfLength(random(), maxStringSize); + array[i] = RandomStrings.randomAsciiOfLength(random(), stringSize); } return array; } - public static String[] generateRandomStringArray(int maxArraySize, int maxStringSize, boolean allowNull) { - return generateRandomStringArray(maxArraySize, maxStringSize, allowNull, true); + public static String[] generateRandomStringArray(int maxArraySize, int stringSize, boolean allowNull) { + return generateRandomStringArray(maxArraySize, stringSize, allowNull, true); } private static final String[] TIME_SUFFIXES = new String[]{"d", "h", "ms", "s", "m", "micros", "nanos"}; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java index b53f61e35fcf3..37e41854f7b8b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java @@ -5,11 +5,11 @@ */ package org.elasticsearch.xpack.core.ml.action; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.tasks.BaseTasksRequest; import org.elasticsearch.action.support.tasks.BaseTasksResponse; @@ -297,7 +297,7 @@ public Response(QueryPage jobsStats) { this.jobsStats = jobsStats; } - public Response(List taskFailures, List nodeFailures, + public Response(List taskFailures, List nodeFailures, QueryPage jobsStats) { super(taskFailures, nodeFailures); this.jobsStats = jobsStats;