Skip to content

Commit

Permalink
Reindex remove outer level size
Browse files Browse the repository at this point in the history
This commit finalizes the work done to rename `size` to `max_docs` in
reindex and update/delete by query. `size` is no longer supported in URL
or outer level body for the 3 APIs.

Continuation of elastic#41894

Closes elastic#24344
  • Loading branch information
henningandersen committed Jun 19, 2019
1 parent 680d6ed commit 86e98c1
Show file tree
Hide file tree
Showing 17 changed files with 64 additions and 325 deletions.
16 changes: 15 additions & 1 deletion docs/reference/migration/migrate_8_0/reindex.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,18 @@ Instead, please specify the index-name without any encoding.
[float]
==== Removal of types

The `/{index}/{type}/_delete_by_query` and `/{index}/{type}/_update_by_query` REST endpoints have been removed in favour of `/{index}/_delete_by_query` and `/{index}/_update_by_query`, since indexes no longer contain types, these typed endpoints are obsolete.
The `/{index}/{type}/_delete_by_query` and `/{index}/{type}/_update_by_query` REST endpoints have been removed in favour of `/{index}/_delete_by_query` and `/{index}/_update_by_query`, since indexes no longer contain types, these typed endpoints are obsolete.

[float]
==== Removal of size parameter

Previously, a `_reindex` request had two different size specifications in the body:

- Outer level, determining the maximum number of documents to process
- Inside the `source` element, determining the scroll/batch size.

The outer level `size` parameter has now been renamed to `max_docs` to
avoid confusion and clarify its semantics.

Similarly, the `size` parameter has been renamed to `max_docs` for
`_delete_by_query` and `_update_by_query` to keep the 3 interfaces consistent.
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
Expand Down Expand Up @@ -53,7 +52,7 @@ protected void parseInternalRequest(Request internal, RestRequest restRequest,
SearchRequest searchRequest = internal.getSearchRequest();

try (XContentParser parser = extractRequestSpecificFields(restRequest, bodyConsumers)) {
RestSearchAction.parseSearchRequest(searchRequest, restRequest, parser, size -> setMaxDocsFromSearchSize(internal, size));
RestSearchAction.parseSearchRequest(searchRequest, restRequest, parser, size -> failOnSizeSpecified());
}

searchRequest.source().size(restRequest.paramAsInt("scroll_size", searchRequest.source().size()));
Expand Down Expand Up @@ -96,8 +95,7 @@ private XContentParser extractRequestSpecificFields(RestRequest restRequest,
}
}

private void setMaxDocsFromSearchSize(Request request, int size) {
LoggingDeprecationHandler.INSTANCE.usedDeprecatedName("size", "max_docs");
setMaxDocsValidateIdentical(request, size);
protected static void failOnSizeSpecified() {
throw new IllegalArgumentException("invalid parameter [size], use [max_docs] instead");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ protected DeleteByQueryRequest buildRequest(RestRequest request) throws IOExcept
Map<String, Consumer<Object>> consumers = new HashMap<>();
consumers.put("conflicts", o -> internal.setConflicts((String) o));
consumers.put("max_docs", s -> setMaxDocsValidateIdentical(internal, ((Number) s).intValue()));
consumers.put("size", s -> failOnSizeSpecified());

parseInternalRequest(internal, request, consumers);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ protected UpdateByQueryRequest buildRequest(RestRequest request) throws IOExcept
consumers.put("conflicts", o -> internal.setConflicts((String) o));
consumers.put("script", o -> internal.setScript(parseScript(o)));
consumers.put("max_docs", s -> setMaxDocsValidateIdentical(internal, ((Number) s).intValue()));
consumers.put("size", s -> failOnSizeSpecified());

parseInternalRequest(internal, request, consumers);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,11 +107,7 @@ private void randomRequest(AbstractBulkByScrollRequest<?> request) {
request.getSearchRequest().indices("test");
request.getSearchRequest().source().size(between(1, 1000));
if (randomBoolean()) {
if (randomBoolean()) {
request.setMaxDocs(between(1, Integer.MAX_VALUE));
} else {
request.setSize(between(1, Integer.MAX_VALUE));
}
request.setMaxDocs(between(1, Integer.MAX_VALUE));
}
request.setAbortOnVersionConflict(random().nextBoolean());
request.setRefresh(rarely());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -279,52 +279,6 @@

- match: {count: 1}

---
"Limit by size":
- skip:
version: " - 7.2.99"
reason: "deprecation warnings only emitted on 7.3+"
features: warnings

- do:
index:
index: twitter
id: 1
body: { "user": "kimchy" }
- do:
index:
index: twitter
id: 2
body: { "user": "kimchy" }
- do:
indices.refresh: {}

- do:
warnings:
- Deprecated field [size] used, expected [max_docs] instead
delete_by_query:
index: twitter
size: 1
body:
query:
match_all: {}

- match: {deleted: 1}
- match: {version_conflicts: 0}
- match: {batches: 1}
- match: {failures: []}
- match: {throttled_millis: 0}
- gte: { took: 0 }

- do:
indices.refresh: {}

- do:
count:
index: twitter

- match: {count: 1}

---
"Limit by size pre 7.3":
- skip:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,18 +31,23 @@
match_all: {}

---
"invalid size fails":
"specifying size fails":
- skip:
version: " - 7.99.99"
reason: "size supported until 8"

- do:
index:
index: test
id: 1
body: { "text": "test" }
index: twitter
id: 1
body: { "user": "kimchy" }

- do:
catch: /\[max_docs\] parameter cannot be negative, found \[-4\]/
catch: /invalid parameter \[size\], use \[max_docs\] instead/
delete_by_query:
index: test
size: -4
index: twitter
body:
size: 1
query:
match_all: {}

Expand All @@ -66,27 +71,6 @@
query:
match_all: {}

---
"both max_docs and size fails":
- skip:
version: " - 7.2.99"
reason: "max_docs introduced in 7.3.0"

- do:
index:
index: test
id: 1
body: { "text": "test" }
- do:
catch: /\[max_docs\] set to two different values \[4\] and \[5\]/
delete_by_query:
index: test
size: 4
max_docs: 5
body:
query:
match_all: {}

---
"invalid scroll_size fails":
- do:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,21 +95,26 @@
conflicts: cat

---
"invalid size fails":
"specifying size fails":
- skip:
version: " - 7.99.99"
reason: "size supported until 8"

- do:
index:
index: test
id: 1
body: { "text": "test" }
index: test
id: 1
body: { "text": "test" }

- do:
catch: /\[max_docs\] parameter cannot be negative, found \[-4\]/
catch: /invalid parameter \[size\], use \[max_docs\] instead/
reindex:
body:
source:
index: test
dest:
index: dest
size: -4
size: 1

---
"invalid max_docs in body fails":
Expand Down Expand Up @@ -153,28 +158,6 @@
dest:
index: dest

---
"inconsistent max_docs and size fails":
- skip:
version: " - 7.2.99"
reason: "max_docs introduced in 7.3.0"

- do:
index:
index: test
id: 1
body: { "text": "test" }
- do:
catch: /\[max_docs\] set to two different values \[4\] and \[5\]/
reindex:
body:
source:
index: test
dest:
index: dest
size: 4
max_docs: 5

---
"inconsistent max_docs in body and max_docs in URL fails":
- skip:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,52 +31,6 @@
index: target
- match: { hits.total: 1 }

---
"Sorting and size combined":
- skip:
version: " - 7.2.99"
reason: "deprecation warnings only emitted on 7.3+"
features: warnings

- do:
index:
index: test
id: 1
body: { "order": 1 }
- do:
index:
index: test
id: 2
body: { "order": 2 }
- do:
indices.refresh: {}

- do:
warnings:
- Deprecated field [size] used, expected [max_docs] instead
reindex:
refresh: true
body:
size: 1
source:
index: test
sort: order
dest:
index: target

- do:
search:
rest_total_hits_as_int: true
index: target
- match: { hits.total: 1 }

- do:
search:
rest_total_hits_as_int: true
index: target
q: order:1
- match: { hits.total: 1 }

---
"Sorting and size combined pre 7.3":
- skip:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -217,72 +217,6 @@
metric: search
- match: {indices.source.total.search.open_contexts: 0}

---
"Reindex from remote with size":
- skip:
version: "7.3.0 - "
reason: "7.3 should use max_docs or get deprecation warning"

- do:
index:
index: source
id: 1
body: { "text": "test" }
refresh: true
- do:
index:
index: source
id: 2
body: { "text": "test" }
refresh: true

# Fetch the http host. We use the host of the master because we know there will always be a master.
- do:
cluster.state: {}
- set: { master_node: master }
- do:
nodes.info:
metric: [ http ]
- is_true: nodes.$master.http.publish_address
- set: {nodes.$master.http.publish_address: host}
- do:
reindex:
refresh: true
body:
size: 1
source:
remote:
host: http://${host}
index: source
dest:
index: dest
- match: {created: 1}
- match: {updated: 0}
- match: {version_conflicts: 0}
- match: {batches: 1}
- match: {failures: []}
- match: {throttled_millis: 0}
- gte: { took: 0 }
- is_false: task
- is_false: deleted

- do:
search:
rest_total_hits_as_int: true
index: dest
body:
query:
match:
text: test
- match: {hits.total: 1}

# Make sure reindex closed all the scroll contexts
- do:
indices.stats:
index: source
metric: search
- match: {indices.source.total.search.open_contexts: 0}

---
"Reindex from remote with max_docs":
- skip:
Expand Down
Loading

0 comments on commit 86e98c1

Please sign in to comment.