From 655103de589a36aabb5253942c83faa7a84d219e Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 16 Jan 2019 13:17:10 -0500 Subject: [PATCH 01/11] Increase timeout for testAddNewReplicas We flush quite often in testAddNewReplicas to create the safe index commit with gaps in sequence numbers. This test is failing recently because CI is too slow to complete 5 small flushes in 10 seconds. This commit increases timeout for this test and also ensures to always terminate the background indexing. The latter is to eliminate unrelated failures if this test fails again. Closes #37183 --- .../RecoveryDuringReplicationTests.java | 46 ++++++++++++------- 1 file changed, 29 insertions(+), 17 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index 80107e6f1e83b..53db2b7dd8a9e 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; @@ -66,6 +67,7 @@ import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -691,26 +693,38 @@ public void testTransferMaxSeenAutoIdTimestampOnResync() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37183") public void testAddNewReplicas() throws Exception { - try (ReplicationGroup shards = createGroup(between(0, 1))) { + AtomicBoolean stopped = new AtomicBoolean(); + List threads = new ArrayList<>(); + Runnable stopIndexing = () -> { + try { + stopped.set(true); + for (Thread thread : threads) { + thread.join(); + } + } catch (Exception e) { + throw new AssertionError(e); + } + }; + try (ReplicationGroup shards = createGroup(between(0, 1)); + Releasable ignored = stopIndexing::run) { shards.startAll(); - Thread[] threads = new Thread[between(1, 3)]; - AtomicBoolean isStopped = new AtomicBoolean(); boolean appendOnly = randomBoolean(); AtomicInteger docId = new AtomicInteger(); - for (int i = 0; i < threads.length; i++) { - threads[i] = new Thread(() -> { - while (isStopped.get() == false) { + int numThreads = between(1, 3); + for (int i = 0; i < numThreads; i++) { + Thread thread = new Thread(() -> { + while (stopped.get() == false) { try { + int nextId = docId.incrementAndGet(); if (appendOnly) { - String id = randomBoolean() ? Integer.toString(docId.incrementAndGet()) : null; + String id = randomBoolean() ? Integer.toString(nextId) : null; shards.index(new IndexRequest(index.getName(), "type", id).source("{}", XContentType.JSON)); } else if (frequently()) { - String id = Integer.toString(frequently() ? docId.incrementAndGet() : between(0, 10)); + String id = Integer.toString(frequently() ? nextId : between(0, nextId)); shards.index(new IndexRequest(index.getName(), "type", id).source("{}", XContentType.JSON)); } else { - String id = Integer.toString(between(0, docId.get())); + String id = Integer.toString(between(0, nextId)); shards.delete(new DeleteRequest(index.getName(), "type", id)); } if (randomInt(100) < 10) { @@ -721,17 +735,15 @@ public void testAddNewReplicas() throws Exception { } } }); - threads[i].start(); + threads.add(thread); + thread.start(); } - assertBusy(() -> assertThat(docId.get(), greaterThanOrEqualTo(50))); + assertBusy(() -> assertThat(docId.get(), greaterThanOrEqualTo(50)), 60, TimeUnit.SECONDS); // we flush quite often shards.getPrimary().sync(); IndexShard newReplica = shards.addReplica(); shards.recoverReplica(newReplica); - assertBusy(() -> assertThat(docId.get(), greaterThanOrEqualTo(100))); - isStopped.set(true); - for (Thread thread : threads) { - thread.join(); - } + assertBusy(() -> assertThat(docId.get(), greaterThanOrEqualTo(100)), 60, TimeUnit.SECONDS); // we flush quite often + stopIndexing.run(); assertBusy(() -> assertThat(getDocIdAndSeqNos(newReplica), equalTo(getDocIdAndSeqNos(shards.getPrimary())))); } } From 18a3e48a4a7aaff948dc209f7c0798b703d439cc Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 16 Jan 2019 17:19:12 -0500 Subject: [PATCH 02/11] Change file descriptor limit to 65535 (#37537) Some systems default to a nofile ulimit of 65535. To reduce the pain of deploying Elasticsearch to such systems, this commit lowers the required limit from 65536 to 65535. --- distribution/packages/src/common/env/elasticsearch | 2 +- .../packages/src/common/systemd/elasticsearch.service | 2 +- distribution/packages/src/deb/init.d/elasticsearch | 2 +- distribution/packages/src/rpm/init.d/elasticsearch | 2 +- docs/reference/getting-started.asciidoc | 2 +- docs/reference/setup/install/docker.asciidoc | 2 +- docs/reference/setup/install/sysconfig-file.asciidoc | 2 +- docs/reference/setup/sysconfig/configuring.asciidoc | 4 ++-- docs/reference/setup/sysconfig/file-descriptors.asciidoc | 6 +++--- .../src/test/resources/packaging/tests/60_systemd.bats | 2 +- .../java/org/elasticsearch/bootstrap/BootstrapChecks.java | 2 +- .../org/elasticsearch/bootstrap/BootstrapChecksTests.java | 2 +- 12 files changed, 15 insertions(+), 15 deletions(-) diff --git a/distribution/packages/src/common/env/elasticsearch b/distribution/packages/src/common/env/elasticsearch index 995a264e51380..76d9473b0fb5a 100644 --- a/distribution/packages/src/common/env/elasticsearch +++ b/distribution/packages/src/common/env/elasticsearch @@ -36,7 +36,7 @@ ES_STARTUP_SLEEP_TIME=5 # Specifies the maximum file descriptor number that can be opened by this process # When using Systemd, this setting is ignored and the LimitNOFILE defined in # /usr/lib/systemd/system/elasticsearch.service takes precedence -#MAX_OPEN_FILES=65536 +#MAX_OPEN_FILES=65535 # The maximum number of bytes of memory that may be locked into RAM # Set to "unlimited" if you use the 'bootstrap.memory_lock: true' option diff --git a/distribution/packages/src/common/systemd/elasticsearch.service b/distribution/packages/src/common/systemd/elasticsearch.service index a4d67d8830a56..a03f1befdb748 100644 --- a/distribution/packages/src/common/systemd/elasticsearch.service +++ b/distribution/packages/src/common/systemd/elasticsearch.service @@ -29,7 +29,7 @@ StandardOutput=journal StandardError=inherit # Specifies the maximum file descriptor number that can be opened by this process -LimitNOFILE=65536 +LimitNOFILE=65535 # Specifies the maximum number of processes LimitNPROC=4096 diff --git a/distribution/packages/src/deb/init.d/elasticsearch b/distribution/packages/src/deb/init.d/elasticsearch index 5ff7a5e74c62f..1e503fa0cfad0 100755 --- a/distribution/packages/src/deb/init.d/elasticsearch +++ b/distribution/packages/src/deb/init.d/elasticsearch @@ -39,7 +39,7 @@ ES_HOME=/usr/share/$NAME #ES_JAVA_OPTS= # Maximum number of open files -MAX_OPEN_FILES=65536 +MAX_OPEN_FILES=65535 # Maximum amount of locked memory #MAX_LOCKED_MEMORY= diff --git a/distribution/packages/src/rpm/init.d/elasticsearch b/distribution/packages/src/rpm/init.d/elasticsearch index 8f91db312738d..bdaf8fbec37de 100644 --- a/distribution/packages/src/rpm/init.d/elasticsearch +++ b/distribution/packages/src/rpm/init.d/elasticsearch @@ -33,7 +33,7 @@ fi # Sets the default values for elasticsearch variables used in this script ES_HOME="/usr/share/elasticsearch" -MAX_OPEN_FILES=65536 +MAX_OPEN_FILES=65535 MAX_MAP_COUNT=262144 ES_PATH_CONF="${path.conf}" diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index ff1bdd6b7a619..b79dd5c36c244 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -248,7 +248,7 @@ If everything goes well with installation, you should see a bunch of messages th [2018-09-13T12:20:05,006][INFO ][o.e.n.Node ] [localhost.localdomain] initialized [2018-09-13T12:20:05,007][INFO ][o.e.n.Node ] [localhost.localdomain] starting ... [2018-09-13T12:20:05,202][INFO ][o.e.t.TransportService ] [localhost.localdomain] publish_address {127.0.0.1:9300}, bound_addresses {[::1]:9300}, {127.0.0.1:9300} -[2018-09-13T12:20:05,221][WARN ][o.e.b.BootstrapChecks ] [localhost.localdomain] max file descriptors [4096] for elasticsearch process is too low, increase to at least [65536] +[2018-09-13T12:20:05,221][WARN ][o.e.b.BootstrapChecks ] [localhost.localdomain] max file descriptors [4096] for elasticsearch process is too low, increase to at least [65535] [2018-09-13T12:20:05,221][WARN ][o.e.b.BootstrapChecks ] [localhost.localdomain] max virtual memory areas vm.max_map_count [65530] is too low, increase to at least [262144] [2018-09-13T12:20:08,355][INFO ][o.e.c.s.MasterService ] [localhost.localdomain] zen-disco-elected-as-master ([0] nodes joined)[, ], reason: master node changed {previous [], current [{localhost.localdomain}{B0aEHNagTiWx7SYj-l4NTw}{hzsQz6CVQMCTpMCVLM4IHg}{127.0.0.1}{127.0.0.1:9300}{testattr=test}]} [2018-09-13T12:20:08,360][INFO ][o.e.c.s.ClusterApplierService] [localhost.localdomain] master node changed {previous [], current [{localhost.localdomain}{B0aEHNagTiWx7SYj-l4NTw}{hzsQz6CVQMCTpMCVLM4IHg}{127.0.0.1}{127.0.0.1:9300}{testattr=test}]}, reason: apply cluster state (from master [master {localhost.localdomain}{B0aEHNagTiWx7SYj-l4NTw}{hzsQz6CVQMCTpMCVLM4IHg}{127.0.0.1}{127.0.0.1:9300}{testattr=test} committed version [1] source [zen-disco-elected-as-master ([0] nodes joined)[, ]]]) diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc index 267ea14420921..92710f4429f8a 100644 --- a/docs/reference/setup/install/docker.asciidoc +++ b/docs/reference/setup/install/docker.asciidoc @@ -338,7 +338,7 @@ needed, adjust them in the Daemon, or override them per container, for example using `docker run`: + -- - --ulimit nofile=65536:65536 + --ulimit nofile=65535:65535 NOTE: One way of checking the Docker daemon defaults for the aforementioned ulimits is by running: diff --git a/docs/reference/setup/install/sysconfig-file.asciidoc b/docs/reference/setup/install/sysconfig-file.asciidoc index d2df5cd4e0c5e..0532132cdadb7 100644 --- a/docs/reference/setup/install/sysconfig-file.asciidoc +++ b/docs/reference/setup/install/sysconfig-file.asciidoc @@ -5,7 +5,7 @@ `MAX_OPEN_FILES`:: - Maximum number of open files, defaults to `65536`. + Maximum number of open files, defaults to `65535`. `MAX_LOCKED_MEMORY`:: diff --git a/docs/reference/setup/sysconfig/configuring.asciidoc b/docs/reference/setup/sysconfig/configuring.asciidoc index 8390991aeb195..c951388537094 100644 --- a/docs/reference/setup/sysconfig/configuring.asciidoc +++ b/docs/reference/setup/sysconfig/configuring.asciidoc @@ -25,7 +25,7 @@ open file handles (`ulimit -n`) to 65,536, you can do the following: [source,sh] -------------------------------- sudo su <1> -ulimit -n 65536 <2> +ulimit -n 65535 <2> su elasticsearch <3> -------------------------------- <1> Become `root`. @@ -46,7 +46,7 @@ the `limits.conf` file: [source,sh] -------------------------------- -elasticsearch - nofile 65536 +elasticsearch - nofile 65535 -------------------------------- This change will only take effect the next time the `elasticsearch` user opens diff --git a/docs/reference/setup/sysconfig/file-descriptors.asciidoc b/docs/reference/setup/sysconfig/file-descriptors.asciidoc index 17e7884be0d33..4c22a0021ff53 100644 --- a/docs/reference/setup/sysconfig/file-descriptors.asciidoc +++ b/docs/reference/setup/sysconfig/file-descriptors.asciidoc @@ -12,15 +12,15 @@ file descriptors can be disastrous and will most probably lead to data loss. Make sure to increase the limit on the number of open files descriptors for the user running Elasticsearch to 65,536 or higher. -For the `.zip` and `.tar.gz` packages, set <> as -root before starting Elasticsearch, or set `nofile` to `65536` in +For the `.zip` and `.tar.gz` packages, set <> as +root before starting Elasticsearch, or set `nofile` to `65535` in <>. On macOS, you must also pass the JVM option `-XX:-MaxFDLimit` to Elasticsearch in order for it to make use of the higher file descriptor limit. RPM and Debian packages already default the maximum number of file -descriptors to 65536 and do not require further configuration. +descriptors to 65535 and do not require further configuration. You can check the `max_file_descriptors` configured for each node using the <> API, with: diff --git a/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats b/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats index db062eb337e74..3cf495939aff9 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats @@ -234,7 +234,7 @@ setup() { local max_processes=$(cat /proc/$pid/limits | grep "Max processes" | awk '{ print $3 }') [ "$max_processes" == "4096" ] local max_open_files=$(cat /proc/$pid/limits | grep "Max open files" | awk '{ print $4 }') - [ "$max_open_files" == "65536" ] + [ "$max_open_files" == "65535" ] local max_address_space=$(cat /proc/$pid/limits | grep "Max address space" | awk '{ print $4 }') [ "$max_address_space" == "unlimited" ] systemctl stop elasticsearch.service diff --git a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java index e77568105db7d..ebda7df66dc09 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java @@ -267,7 +267,7 @@ static class FileDescriptorCheck implements BootstrapCheck { private final int limit; FileDescriptorCheck() { - this(1 << 16); + this(65535); } protected FileDescriptorCheck(final int limit) { diff --git a/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java b/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java index be8387be87cfb..33a3aea37b4c5 100644 --- a/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java +++ b/server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java @@ -188,7 +188,7 @@ long getMaxHeapSize() { public void testFileDescriptorLimits() throws NodeValidationException { final boolean osX = randomBoolean(); // simulates OS X versus non-OS X - final int limit = osX ? 10240 : 1 << 16; + final int limit = osX ? 10240 : 65535; final AtomicLong maxFileDescriptorCount = new AtomicLong(randomIntBetween(1, limit - 1)); final BootstrapChecks.FileDescriptorCheck check; if (osX) { From b6e5ccaf8a2304892405712cf832d23efdce0940 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Thu, 17 Jan 2019 09:10:09 +0100 Subject: [PATCH 03/11] Remove the AbstracLifecycleComponent constructor with Settings (#37523) Adding the migration guide and removing the deprecated in 6.x constructor relates #35560 relates #34488 --- docs/reference/migration/migrate_7_0/java.asciidoc | 8 +++++++- .../common/component/AbstractLifecycleComponent.java | 6 ------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/reference/migration/migrate_7_0/java.asciidoc b/docs/reference/migration/migrate_7_0/java.asciidoc index e48a4cf1b45c3..f34b1c6ca9906 100644 --- a/docs/reference/migration/migrate_7_0/java.asciidoc +++ b/docs/reference/migration/migrate_7_0/java.asciidoc @@ -38,4 +38,10 @@ because `Settings` is no longer needed. ==== Deprecated method `Client#termVector` removed The client method `termVector`, deprecated in 2.0, has been removed. The method -`termVectors` (plural) should be used instead. \ No newline at end of file +`termVectors` (plural) should be used instead. + +[float] +==== Deprecated constructor `AbstractLifecycleComponent(Settings settings)` removed + +The constructor `AbstractLifecycleComponent(Settings settings)`, deprecated in 6.7 +has been removed. The parameterless constructor should be used instead. diff --git a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java index 2caaa43fbcd05..772d2d89cf5be 100644 --- a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java +++ b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java @@ -21,7 +21,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.settings.Settings; import java.io.IOException; import java.util.List; @@ -36,11 +35,6 @@ public abstract class AbstractLifecycleComponent implements LifecycleComponent { protected AbstractLifecycleComponent() {} - @Deprecated - protected AbstractLifecycleComponent(Settings settings) { - // TODO drop settings from ctor - } - @Override public Lifecycle.State lifecycleState() { return this.lifecycle.state(); From 1686c32ba9825d7998336b942759afaa64fce390 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Thu, 17 Jan 2019 10:17:58 +0200 Subject: [PATCH 04/11] SQL: Rename SQL type DATE to DATETIME (#37395) * SQL: Rename SQL data type DATE to DATETIME SQL data type DATE has only the date part (e.g.: 2019-01-14) without any time information. Previously the SQL type DATE was referring to the ES DATE which contains also the time part along with TZ information. To conform with SQL data types the data type `DATE` is renamed to `DATETIME`, since it includes also the time, as a new runtime SQL `DATE` data type will be introduced down the road, which only contains the date part and meets the SQL standard. Closes: #36440 * Address comments --- docs/reference/sql/endpoints/rest.asciidoc | 6 +- .../sql/functions/date-time.asciidoc | 118 +++++++++--------- .../reference/sql/functions/grouping.asciidoc | 4 +- .../sql/functions/type-conversion.asciidoc | 2 +- .../elasticsearch/xpack/sql/jdbc/EsType.java | 4 +- .../xpack/sql/jdbc/JdbcPreparedStatement.java | 4 +- .../xpack/sql/jdbc/JdbcResultSet.java | 2 +- .../xpack/sql/jdbc/TypeConverter.java | 10 +- .../xpack/sql/jdbc/TypeUtils.java | 20 +-- .../sql/jdbc/JdbcPreparedStatementTests.java | 20 +-- .../xpack/sql/jdbc/TypeConverterTests.java | 4 +- .../xpack/sql/qa/SqlProtocolTestCase.java | 11 +- .../xpack/sql/qa/jdbc/ResultSetTestCase.java | 30 ++--- .../sql/qa/src/main/resources/agg.csv-spec | 8 +- .../sql/qa/src/main/resources/agg.sql-spec | 24 ++-- .../sql/qa/src/main/resources/alias.csv-spec | 16 +-- .../qa/src/main/resources/command.csv-spec | 32 ++--- .../main/resources/datetime-interval.csv-spec | 6 +- .../qa/src/main/resources/datetime.csv-spec | 4 +- .../qa/src/main/resources/datetime.sql-spec | 6 +- .../sql/qa/src/main/resources/docs.csv-spec | 54 ++++---- .../sql/qa/src/main/resources/nested.csv-spec | 8 +- .../sql/qa/src/main/resources/null.csv-spec | 2 +- .../setup_mock_metadata_get_columns.sql | 2 +- .../sql/analysis/index/IndexResolver.java | 4 +- .../search/extractor/FieldHitExtractor.java | 2 +- .../xpack/sql/expression/Expressions.java | 24 ++-- .../function/grouping/Histogram.java | 4 +- .../scalar/datetime/CurrentDateTime.java | 2 +- .../expression/gen/script/ScriptWeaver.java | 6 +- .../xpack/sql/parser/ExpressionBuilder.java | 10 +- .../xpack/sql/planner/QueryFolder.java | 6 +- .../xpack/sql/planner/QueryTranslator.java | 2 +- .../querydsl/agg/GroupByDateHistogram.java | 2 +- .../xpack/sql/querydsl/agg/GroupByKey.java | 4 +- .../querydsl/container/QueryContainer.java | 4 +- .../querydsl/container/SearchHitFieldRef.java | 2 +- .../xpack/sql/type/DataType.java | 16 ++- .../xpack/sql/type/DataTypeConversion.java | 26 ++-- .../xpack/sql/type/DataTypes.java | 14 +-- .../xpack/sql/type/DateEsField.java | 2 +- .../elasticsearch/xpack/sql/type/Types.java | 4 +- .../analyzer/VerifierErrorMessagesTests.java | 7 +- .../analysis/index/IndexResolverTests.java | 2 +- .../extractor/FieldHitExtractorTests.java | 2 +- .../scalar/datetime/DayOfYearTests.java | 2 +- .../arithmetic/BinaryArithmeticTests.java | 12 +- .../xpack/sql/optimizer/OptimizerTests.java | 2 +- .../sql/parser/EscapedFunctionsTests.java | 6 +- .../logical/command/sys/SysParserTests.java | 4 +- .../logical/command/sys/SysTypesTests.java | 4 +- .../xpack/sql/planner/QueryFolderTests.java | 2 +- .../sql/planner/QueryTranslatorTests.java | 4 +- .../sql/type/DataTypeConversionTests.java | 12 +- .../xpack/sql/type/DataTypesTests.java | 22 ++-- .../xpack/sql/type/TypesTests.java | 12 +- 56 files changed, 315 insertions(+), 309 deletions(-) diff --git a/docs/reference/sql/endpoints/rest.asciidoc b/docs/reference/sql/endpoints/rest.asciidoc index 901d34113aee6..cdce98ef0e500 100644 --- a/docs/reference/sql/endpoints/rest.asciidoc +++ b/docs/reference/sql/endpoints/rest.asciidoc @@ -68,7 +68,7 @@ Which returns: {"name": "author", "type": "text"}, {"name": "name", "type": "text"}, {"name": "page_count", "type": "short"}, - {"name": "release_date", "type": "date"} + {"name": "release_date", "type": "datetime"} ], "rows": [ ["Peter F. Hamilton", "Pandora's Star", 768, "2004-03-02T00:00:00.000Z"], @@ -186,7 +186,7 @@ Douglas Adams |The Hitchhiker's Guide to the Galaxy|180 |1979-10-12T In addition to the `query` and `cursor` fields, the request can contain `fetch_size` and `time_zone`. `fetch_size` is a hint for how many results to return in each page. SQL might chose to return more -or fewer results though. `time_zone` is the time zone to use for date -functions and date parsing. `time_zone` defaults to `utc` and can take +or fewer results though. `time_zone` is the time zone to use for datetime +functions and datetime parsing. `time_zone` defaults to `utc` and can take any values documented http://www.joda.org/joda-time/apidocs/org/joda/time/DateTimeZone.html[here]. diff --git a/docs/reference/sql/functions/date-time.asciidoc b/docs/reference/sql/functions/date-time.asciidoc index 416e927903961..15fdba39026ef 100644 --- a/docs/reference/sql/functions/date-time.asciidoc +++ b/docs/reference/sql/functions/date-time.asciidoc @@ -22,9 +22,9 @@ The table below shows the mapping between {es} and {es-sql}: |========================== s|{es} s|{es-sql} -2+h| Index/Table date math +2+h| Index/Table datetime math 2+| -2+h| Query date math +2+h| Query date/time math | 1y | INTERVAL 1 YEAR | 2M | INTERVAL 2 MONTH | 3w | INTERVAL 21 DAY @@ -57,7 +57,7 @@ s|Description ==== Operators -Basic arithmetic operators (`+`, `-`, etc) support date-time parameters as indicated below: +Basic arithmetic operators (`+`, `-`, etc) support date/time parameters as indicated below: ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -66,7 +66,7 @@ include-tagged::{sql-specs}/docs.csv-spec[dtIntervalPlusInterval] ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/docs.csv-spec[dtDatePlusInterval] +include-tagged::{sql-specs}/docs.csv-spec[dtDateTimePlusInterval] -------------------------------------------------- ["source","sql",subs="attributes,callouts,macros"] @@ -81,7 +81,7 @@ include-tagged::{sql-specs}/docs.csv-spec[dtIntervalMinusInterval] ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/docs.csv-spec[dtDateMinusInterval] +include-tagged::{sql-specs}/docs.csv-spec[dtDateTimeMinusInterval] -------------------------------------------------- ["source","sql",subs="attributes,callouts,macros"] @@ -146,18 +146,18 @@ include-tagged::{sql-specs}/docs.csv-spec[filterNow] .Synopsis: [source, sql] -------------------------------------------------- -DAY_OF_MONTH(date_exp<1>) +DAY_OF_MONTH(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the day of the month from a date. +Extract the day of the month from a date/datetime. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -170,18 +170,18 @@ include-tagged::{sql-specs}/docs.csv-spec[dayOfMonth] .Synopsis: [source, sql] -------------------------------------------------- -DAY_OF_WEEK(date_exp<1>) +DAY_OF_WEEK(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the day of the week from a date. Sunday is `1`, Monday is `2`, etc. +Extract the day of the week from a date/datetime. Sunday is `1`, Monday is `2`, etc. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -194,18 +194,18 @@ include-tagged::{sql-specs}/docs.csv-spec[dayOfWeek] .Synopsis: [source, sql] -------------------------------------------------- -DAY_OF_YEAR(date_exp<1>) +DAY_OF_YEAR(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the day of the year from a date. +Extract the day of the year from a date/datetime. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -218,18 +218,18 @@ include-tagged::{sql-specs}/docs.csv-spec[dayOfYear] .Synopsis: [source, sql] -------------------------------------------------- -DAY_NAME(date_exp<1>) +DAY_NAME(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: string .Description: -Extract the day of the week from a datetime in text format (`Monday`, `Tuesday`...). +Extract the day of the week from a date/datetime in text format (`Monday`, `Tuesday`...). ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -242,18 +242,18 @@ include-tagged::{sql-specs}/docs.csv-spec[dayName] .Synopsis: [source, sql] -------------------------------------------------- -HOUR_OF_DAY(date_exp<1>) +HOUR_OF_DAY(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the hour of the day from a date. +Extract the hour of the day from a date/datetime. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -266,18 +266,18 @@ include-tagged::{sql-specs}/docs.csv-spec[hourOfDay] .Synopsis: [source, sql] -------------------------------------------------- -ISO_DAY_OF_WEEK(date_exp<1>) +ISO_DAY_OF_WEEK(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the day of the week from a date, following the https://en.wikipedia.org/wiki/ISO_week_date[ISO 8601 standard]. +Extract the day of the week from a date/datetime, following the https://en.wikipedia.org/wiki/ISO_week_date[ISO 8601 standard]. Monday is `1`, Tuesday is `2`, etc. ["source","sql",subs="attributes,callouts,macros"] @@ -291,18 +291,18 @@ include-tagged::{sql-specs}/docs.csv-spec[isoDayOfWeek] .Synopsis: [source, sql] -------------------------------------------------- -ISO_WEEK_OF_YEAR(date_exp<1>) +ISO_WEEK_OF_YEAR(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the week of the year from a date, following https://en.wikipedia.org/wiki/ISO_week_date[ISO 8601 standard]. The first week +Extract the week of the year from a date/datetime, following https://en.wikipedia.org/wiki/ISO_week_date[ISO 8601 standard]. The first week of a year is the first week with a majority (4 or more) of its days in January. ["source","sql",subs="attributes,callouts,macros"] @@ -316,18 +316,18 @@ include-tagged::{sql-specs}/docs.csv-spec[isoWeekOfYear] .Synopsis: [source, sql] -------------------------------------------------- -MINUTE_OF_DAY(date_exp<1>) +MINUTE_OF_DAY(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the minute of the day from a date. +Extract the minute of the day from a date/datetime. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -340,18 +340,18 @@ include-tagged::{sql-specs}/docs.csv-spec[minuteOfDay] .Synopsis: [source, sql] -------------------------------------------------- -MINUTE_OF_HOUR(date_exp<1>) +MINUTE_OF_HOUR(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the minute of the hour from a date. +Extract the minute of the hour from a date/datetime. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -364,18 +364,18 @@ include-tagged::{sql-specs}/docs.csv-spec[minuteOfHour] .Synopsis: [source, sql] -------------------------------------------------- -MONTH(date_exp<1>) +MONTH(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the month of the year from a date. +Extract the month of the year from a date/datetime. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -388,18 +388,18 @@ include-tagged::{sql-specs}/docs.csv-spec[monthOfYear] .Synopsis: [source, sql] -------------------------------------------------- -MONTH_NAME(date_exp<1>) +MONTH_NAME(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: string .Description: -Extract the month from a datetime in text format (`January`, `February`...). +Extract the month from a date/datetime in text format (`January`, `February`...). ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -417,20 +417,20 @@ NOW() *Input*: _none_ -*Output*: date/time +*Output*: datetime .Description: -This function offers the same functionality as <> function: returns the date/time -when the current query reached the server. This method always returns the same value within a query. +This function offers the same functionality as <> function: returns +the datetime when the current query reached the server. This method always returns the same value within a query. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[nowFunction] -------------------------------------------------- -Typically, this function (as well as its twin <> function is used for -relative date/time filtering: +Typically, this function (as well as its twin <> function is used +for relative date/time filtering: ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -443,18 +443,18 @@ include-tagged::{sql-specs}/docs.csv-spec[filterNow] .Synopsis: [source, sql] -------------------------------------------------- -SECOND_OF_MINUTE(date_exp<1>) +SECOND_OF_MINUTE(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the second of the minute from a date. +Extract the second of the minute from a date/datetime. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -467,18 +467,18 @@ include-tagged::{sql-specs}/docs.csv-spec[secondOfMinute] .Synopsis: [source, sql] -------------------------------------------------- -QUARTER(date_exp<1>) +QUARTER(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the year quarter the date falls in. +Extract the year quarter the date/datetime falls in. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -491,18 +491,18 @@ include-tagged::{sql-specs}/docs.csv-spec[quarter] .Synopsis: [source, sql] -------------------------------------------------- -WEEK_OF_YEAR(date_exp<1>) +WEEK_OF_YEAR(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the week of the year from a date. +Extract the week of the year from a date/datetime. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -515,18 +515,18 @@ include-tagged::{sql-specs}/docs.csv-spec[weekOfYear] .Synopsis: [source, sql] -------------------------------------------------- -YEAR(date_exp<1>) +YEAR(datetime_exp<1>) -------------------------------------------------- *Input*: -<1> date expression +<1> date/datetime expression *Output*: integer .Description: -Extract the year from a date. +Extract the year from a date/datetime. ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -539,19 +539,19 @@ include-tagged::{sql-specs}/docs.csv-spec[year] .Synopsis: [source, sql] -------------------------------------------------- -EXTRACT(datetime_function<1> FROM date_exp<2>) +EXTRACT(datetime_function<1> FROM datetime_exp<2>) -------------------------------------------------- *Input*: -<1> datetime function name -<2> date expression +<1> date/time function name +<2> date/datetime expression *Output*: integer .Description: -Extract fields from a datetime by specifying the name of a <>. +Extract fields from a date/datetime by specifying the name of a <>. The following ["source","sql",subs="attributes,callouts,macros"] diff --git a/docs/reference/sql/functions/grouping.asciidoc b/docs/reference/sql/functions/grouping.asciidoc index 9784f4e9b7480..0eee0426ce65a 100644 --- a/docs/reference/sql/functions/grouping.asciidoc +++ b/docs/reference/sql/functions/grouping.asciidoc @@ -50,7 +50,7 @@ or date/time fields: ["source","sql",subs="attributes,callouts,macros"] ---- -include-tagged::{sql-specs}/docs.csv-spec[histogramDate] +include-tagged::{sql-specs}/docs.csv-spec[histogramDateTime] ---- Expressions inside the histogram are also supported as long as the @@ -74,5 +74,5 @@ Instead one can rewrite the query to move the expression on the histogram _insid ["source","sql",subs="attributes,callouts,macros"] ---- -include-tagged::{sql-specs}/docs.csv-spec[histogramDateExpression] +include-tagged::{sql-specs}/docs.csv-spec[histogramDateTimeExpression] ---- diff --git a/docs/reference/sql/functions/type-conversion.asciidoc b/docs/reference/sql/functions/type-conversion.asciidoc index dec7ccb77ae4b..b99e9cc5e9c0d 100644 --- a/docs/reference/sql/functions/type-conversion.asciidoc +++ b/docs/reference/sql/functions/type-conversion.asciidoc @@ -37,7 +37,7 @@ include-tagged::{sql-specs}/docs.csv-spec[conversionIntToStringCast] ["source","sql",subs="attributes,callouts,macros"] ---- -include-tagged::{sql-specs}/docs.csv-spec[conversionStringToDateCast] +include-tagged::{sql-specs}/docs.csv-spec[conversionStringToDateTimeCast] ---- diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java index 33a6b791dfa44..097bc476bcb09 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/EsType.java @@ -28,7 +28,7 @@ public enum EsType implements SQLType { OBJECT( Types.STRUCT), NESTED( Types.STRUCT), BINARY( Types.VARBINARY), - DATE( Types.TIMESTAMP), + DATETIME( Types.TIMESTAMP), IP( Types.VARCHAR), INTERVAL_YEAR( ExtraTypes.INTERVAL_YEAR), INTERVAL_MONTH( ExtraTypes.INTERVAL_MONTH), @@ -64,4 +64,4 @@ public String getVendor() { public Integer getVendorTypeNumber() { return type; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatement.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatement.java index 74fb9b43ddafe..041c457d91b3d 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatement.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatement.java @@ -367,7 +367,7 @@ private void setObject(int parameterIndex, Object x, EsType dataType, String typ || x instanceof Time || x instanceof java.util.Date) { - if (dataType == EsType.DATE) { + if (dataType == EsType.DATETIME) { // converting to {@code java.util.Date} because this is the type supported by {@code XContentBuilder} for serialization java.util.Date dateToSet; if (x instanceof Timestamp) { @@ -532,4 +532,4 @@ public boolean execute(String sql, String[] columnNames) throws SQLException { public long executeLargeUpdate() throws SQLException { throw new SQLFeatureNotSupportedException("Batching not supported"); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcResultSet.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcResultSet.java index d089a99b0eeac..8c01b3112effa 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcResultSet.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcResultSet.java @@ -245,7 +245,7 @@ private Long dateTime(int columnIndex) throws SQLException { // TODO: the B6 appendix of the jdbc spec does mention CHAR, VARCHAR, LONGVARCHAR, DATE, TIMESTAMP as supported // jdbc types that should be handled by getDate and getTime methods. From all of those we support VARCHAR and // TIMESTAMP. Should we consider the VARCHAR conversion as a later enhancement? - if (EsType.DATE == type) { + if (EsType.DATETIME == type) { // the cursor can return an Integer if the date-since-epoch is small enough, XContentParser (Jackson) will // return the "smallest" data type for numbers when parsing // TODO: this should probably be handled server side diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java index 80f00ea3bbe29..9274e9061d453 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeConverter.java @@ -213,7 +213,7 @@ static Object convert(Object v, EsType columnType, String typeString) throws SQL return doubleValue(v); // Double might be represented as string for infinity and NaN values case FLOAT: return floatValue(v); // Float might be represented as string for infinity and NaN values - case DATE: + case DATETIME: return JdbcDateUtils.asDateTimeField(v, JdbcDateUtils::asTimestamp, Timestamp::new); case INTERVAL_YEAR: case INTERVAL_MONTH: @@ -467,21 +467,21 @@ private static Double asDouble(Object val, EsType columnType, String typeString) } private static Date asDate(Object val, EsType columnType, String typeString) throws SQLException { - if (columnType == EsType.DATE) { + if (columnType == EsType.DATETIME) { return JdbcDateUtils.asDateTimeField(val, JdbcDateUtils::asDate, Date::new); } return failConversion(val, columnType, typeString, Date.class); } private static Time asTime(Object val, EsType columnType, String typeString) throws SQLException { - if (columnType == EsType.DATE) { + if (columnType == EsType.DATETIME) { return JdbcDateUtils.asDateTimeField(val, JdbcDateUtils::asTime, Time::new); } return failConversion(val, columnType, typeString, Time.class); } private static Timestamp asTimestamp(Object val, EsType columnType, String typeString) throws SQLException { - if (columnType == EsType.DATE) { + if (columnType == EsType.DATETIME) { return JdbcDateUtils.asDateTimeField(val, JdbcDateUtils::asTimestamp, Timestamp::new); } return failConversion(val, columnType, typeString, Timestamp.class); @@ -538,4 +538,4 @@ private static long safeToLong(double x) throws SQLException { } return Math.round(x); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeUtils.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeUtils.java index 2a5f27f11bd64..ab8465dab90c3 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeUtils.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/TypeUtils.java @@ -37,7 +37,7 @@ private TypeUtils() {} private static final Set SIGNED_TYPE = EnumSet.of(EsType.BYTE, EsType.SHORT, EsType.INTEGER, EsType.LONG, - EsType.FLOAT, EsType.HALF_FLOAT, EsType.SCALED_FLOAT, EsType.DOUBLE, EsType.DATE); + EsType.FLOAT, EsType.HALF_FLOAT, EsType.SCALED_FLOAT, EsType.DOUBLE, EsType.DATETIME); static { @@ -52,16 +52,16 @@ private TypeUtils() {} aMap.put(String.class, EsType.KEYWORD); aMap.put(byte[].class, EsType.BINARY); aMap.put(String.class, EsType.KEYWORD); - aMap.put(Timestamp.class, EsType.DATE); + aMap.put(Timestamp.class, EsType.DATETIME); // apart from the mappings in {@code DataType} three more Java classes can be mapped to a {@code JDBCType.TIMESTAMP} // according to B-4 table from the jdbc4.2 spec - aMap.put(Calendar.class, EsType.DATE); - aMap.put(GregorianCalendar.class, EsType.DATE); - aMap.put(java.util.Date.class, EsType.DATE); - aMap.put(java.sql.Date.class, EsType.DATE); - aMap.put(java.sql.Time.class, EsType.DATE); - aMap.put(LocalDateTime.class, EsType.DATE); + aMap.put(Calendar.class, EsType.DATETIME); + aMap.put(GregorianCalendar.class, EsType.DATETIME); + aMap.put(java.util.Date.class, EsType.DATETIME); + aMap.put(java.sql.Date.class, EsType.DATETIME); + aMap.put(java.sql.Time.class, EsType.DATETIME); + aMap.put(LocalDateTime.class, EsType.DATETIME); CLASS_TO_TYPE = Collections.unmodifiableMap(aMap); Map> types = new LinkedHashMap<>(); @@ -77,7 +77,7 @@ private TypeUtils() {} types.put(EsType.KEYWORD, String.class); types.put(EsType.TEXT, String.class); types.put(EsType.BINARY, byte[].class); - types.put(EsType.DATE, Timestamp.class); + types.put(EsType.DATETIME, Timestamp.class); types.put(EsType.IP, String.class); types.put(EsType.INTERVAL_YEAR, Period.class); types.put(EsType.INTERVAL_MONTH, Period.class); @@ -172,4 +172,4 @@ static EsType of(Class clazz) throws SQLException { } return dataType; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatementTests.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatementTests.java index 50143f729370f..9134378a370c8 100644 --- a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatementTests.java +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/JdbcPreparedStatementTests.java @@ -29,7 +29,7 @@ import static org.elasticsearch.xpack.sql.jdbc.EsType.BINARY; import static org.elasticsearch.xpack.sql.jdbc.EsType.BOOLEAN; import static org.elasticsearch.xpack.sql.jdbc.EsType.BYTE; -import static org.elasticsearch.xpack.sql.jdbc.EsType.DATE; +import static org.elasticsearch.xpack.sql.jdbc.EsType.DATETIME; import static org.elasticsearch.xpack.sql.jdbc.EsType.DOUBLE; import static org.elasticsearch.xpack.sql.jdbc.EsType.FLOAT; import static org.elasticsearch.xpack.sql.jdbc.EsType.HALF_FLOAT; @@ -371,13 +371,13 @@ public void testSettingTimestampValues() throws SQLException { Timestamp someTimestamp = new Timestamp(randomLong()); jps.setTimestamp(1, someTimestamp); assertEquals(someTimestamp.getTime(), ((Date)value(jps)).getTime()); - assertEquals(DATE, jdbcType(jps)); + assertEquals(DATETIME, jdbcType(jps)); Calendar nonDefaultCal = randomCalendar(); // February 29th, 2016. 01:17:55 GMT = 1456708675000 millis since epoch jps.setTimestamp(1, new Timestamp(1456708675000L), nonDefaultCal); assertEquals(1456708675000L, convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal)); - assertEquals(DATE, jdbcType(jps)); + assertEquals(DATETIME, jdbcType(jps)); long beforeEpochTime = randomLongBetween(Long.MIN_VALUE, 0); jps.setTimestamp(1, new Timestamp(beforeEpochTime), nonDefaultCal); @@ -404,7 +404,7 @@ public void testSettingTimeValues() throws SQLException { Calendar nonDefaultCal = randomCalendar(); jps.setTime(1, time, nonDefaultCal); assertEquals(4675000, convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal)); - assertEquals(DATE, jdbcType(jps)); + assertEquals(DATETIME, jdbcType(jps)); assertTrue(value(jps) instanceof java.util.Date); jps.setObject(1, time, Types.VARCHAR); @@ -426,13 +426,13 @@ public void testSettingSqlDateValues() throws SQLException { java.sql.Date someSqlDate = new java.sql.Date(randomLong()); jps.setDate(1, someSqlDate); assertEquals(someSqlDate.getTime(), ((Date)value(jps)).getTime()); - assertEquals(DATE, jdbcType(jps)); + assertEquals(DATETIME, jdbcType(jps)); someSqlDate = new java.sql.Date(randomLong()); Calendar nonDefaultCal = randomCalendar(); jps.setDate(1, someSqlDate, nonDefaultCal); assertEquals(someSqlDate.getTime(), convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal)); - assertEquals(DATE, jdbcType(jps)); + assertEquals(DATETIME, jdbcType(jps)); assertTrue(value(jps) instanceof java.util.Date); jps.setObject(1, someSqlDate, Types.VARCHAR); @@ -456,7 +456,7 @@ public void testSettingCalendarValues() throws SQLException { jps.setObject(1, someCalendar); assertEquals(someCalendar.getTime(), value(jps)); - assertEquals(DATE, jdbcType(jps)); + assertEquals(DATETIME, jdbcType(jps)); assertTrue(value(jps) instanceof java.util.Date); jps.setObject(1, someCalendar, Types.VARCHAR); @@ -466,7 +466,7 @@ public void testSettingCalendarValues() throws SQLException { Calendar nonDefaultCal = randomCalendar(); jps.setObject(1, nonDefaultCal); assertEquals(nonDefaultCal.getTime(), value(jps)); - assertEquals(DATE, jdbcType(jps)); + assertEquals(DATETIME, jdbcType(jps)); } public void testThrownExceptionsWhenSettingCalendarValues() throws SQLException { @@ -483,7 +483,7 @@ public void testSettingDateValues() throws SQLException { jps.setObject(1, someDate); assertEquals(someDate, value(jps)); - assertEquals(DATE, jdbcType(jps)); + assertEquals(DATETIME, jdbcType(jps)); assertTrue(value(jps) instanceof java.util.Date); jps.setObject(1, someDate, Types.VARCHAR); @@ -505,7 +505,7 @@ public void testSettingLocalDateTimeValues() throws SQLException { jps.setObject(1, ldt); assertEquals(Date.class, value(jps).getClass()); - assertEquals(DATE, jdbcType(jps)); + assertEquals(DATETIME, jdbcType(jps)); assertTrue(value(jps) instanceof java.util.Date); jps.setObject(1, ldt, Types.VARCHAR); diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/TypeConverterTests.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/TypeConverterTests.java index 39b9393dacbe8..2e33f4e130741 100644 --- a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/TypeConverterTests.java +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/TypeConverterTests.java @@ -41,8 +41,8 @@ public void testDoubleAsNative() throws Exception { public void testTimestampAsNative() throws Exception { DateTime now = DateTime.now(); - assertThat(convertAsNative(now, EsType.DATE), instanceOf(Timestamp.class)); - assertEquals(now.getMillis(), ((Timestamp) convertAsNative(now, EsType.DATE)).getTime()); + assertThat(convertAsNative(now, EsType.DATETIME), instanceOf(Timestamp.class)); + assertEquals(now.getMillis(), ((Timestamp) convertAsNative(now, EsType.DATETIME)).getTime()); } private Object convertAsNative(Object value, EsType type) throws Exception { diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/SqlProtocolTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/SqlProtocolTestCase.java index 51de82f97413b..868c9584a0057 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/SqlProtocolTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/SqlProtocolTestCase.java @@ -64,11 +64,12 @@ public void testTextualType() throws IOException { } public void testDateTimes() throws IOException { - assertQuery("SELECT CAST('2019-01-14T12:29:25.000Z' AS DATE)", "CAST('2019-01-14T12:29:25.000Z' AS DATE)", "date", - "2019-01-14T12:29:25.000Z", 24); - assertQuery("SELECT CAST(-26853765751000 AS DATE)", "CAST(-26853765751000 AS DATE)", "date", "1119-01-15T12:37:29.000Z", 24); - assertQuery("SELECT CAST(CAST('-26853765751000' AS BIGINT) AS DATE)", "CAST(CAST('-26853765751000' AS BIGINT) AS DATE)", "date", - "1119-01-15T12:37:29.000Z", 24); + assertQuery("SELECT CAST('2019-01-14T12:29:25.000Z' AS DATETIME)", "CAST('2019-01-14T12:29:25.000Z' AS DATETIME)", + "datetime", "2019-01-14T12:29:25.000Z", 24); + assertQuery("SELECT CAST(-26853765751000 AS DATETIME)", "CAST(-26853765751000 AS DATETIME)", + "datetime", "1119-01-15T12:37:29.000Z", 24); + assertQuery("SELECT CAST(CAST('-26853765751000' AS BIGINT) AS DATETIME)", "CAST(CAST('-26853765751000' AS BIGINT) AS DATETIME)", + "datetime", "1119-01-15T12:37:29.000Z", 24); } public void testIPs() throws IOException { diff --git a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java index 3d5308d148d57..2550026c153a5 100644 --- a/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java +++ b/x-pack/plugin/sql/qa/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/ResultSetTestCase.java @@ -201,10 +201,10 @@ public void testGettingInvalidByte() throws Exception { sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getByte("test_date")); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Byte]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Byte]", of(randomDate)), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Byte.class)); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Byte]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Byte]", of(randomDate)), sqle.getMessage()); }); } @@ -324,10 +324,10 @@ public void testGettingInvalidShort() throws Exception { sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getShort("test_date")); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Short]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Short]", of(randomDate)), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Short.class)); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Short]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Short]", of(randomDate)), sqle.getMessage()); }); } @@ -439,10 +439,10 @@ public void testGettingInvalidInteger() throws Exception { sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getInt("test_date")); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Integer]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Integer]", of(randomDate)), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Integer.class)); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Integer]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Integer]", of(randomDate)), sqle.getMessage()); }); } @@ -541,10 +541,10 @@ public void testGettingInvalidLong() throws Exception { sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getLong("test_date")); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Long]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Long]", of(randomDate)), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Long.class)); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Long]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Long]", of(randomDate)), sqle.getMessage()); }); } @@ -624,10 +624,10 @@ public void testGettingInvalidDouble() throws Exception { sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getDouble("test_date")); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Double]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Double]", of(randomDate)), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Double.class)); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Double]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Double]", of(randomDate)), sqle.getMessage()); }); } @@ -707,10 +707,10 @@ public void testGettingInvalidFloat() throws Exception { sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getFloat("test_date")); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Float]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Float]", of(randomDate)), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Float.class)); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Float]", of(randomDate)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Float]", of(randomDate)), sqle.getMessage()); }); } @@ -768,7 +768,7 @@ public void testGettingBooleanValues() throws Exception { assertEquals("Expected: but was: for field " + fld, true, results.getObject(fld, Boolean.class)); } SQLException sqle = expectThrows(SQLException.class, () -> results.getBoolean("test_date")); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Boolean]", of(randomDate1)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Boolean]", of(randomDate1)), sqle.getMessage()); results.next(); @@ -778,11 +778,11 @@ public void testGettingBooleanValues() throws Exception { assertEquals("Expected: but was: for field " + fld, false, results.getObject(fld, Boolean.class)); } sqle = expectThrows(SQLException.class, () -> results.getBoolean("test_date")); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Boolean]", of(randomDate2)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Boolean]", of(randomDate2)), sqle.getMessage()); sqle = expectThrows(SQLException.class, () -> results.getObject("test_date", Boolean.class)); - assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATE] to [Boolean]", of(randomDate2)), + assertEquals(format(Locale.ROOT, "Unable to convert value [%.128s] of type [DATETIME] to [Boolean]", of(randomDate2)), sqle.getMessage()); results.next(); diff --git a/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec index 668316372c4bb..7051353d78dda 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec @@ -223,7 +223,7 @@ SELECT HISTOGRAM(salary, 5000) AS h FROM test_emp GROUP BY h; 70000 ; -histogramDate +histogramDateTime schema::h:ts|c:l SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) as c FROM test_emp GROUP BY h; @@ -247,7 +247,7 @@ null |10 ; -histogramDateWithCountAndOrder +histogramDateTimeWithCountAndOrder schema::h:ts|c:l SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) as c FROM test_emp GROUP BY h ORDER BY h DESC; @@ -270,7 +270,7 @@ SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) as c FROM test_emp null |10 ; -histogramDateWithMonthOnTop +histogramDateTimeWithMonthOnTop schema::h:i|c:l SELECT HISTOGRAM(MONTH(birth_date), 2) AS h, COUNT(*) as c FROM test_emp GROUP BY h ORDER BY h DESC; @@ -286,7 +286,7 @@ SELECT HISTOGRAM(MONTH(birth_date), 2) AS h, COUNT(*) as c FROM test_emp GROUP B null |10 ; -histogramDateWithYearOnTop +histogramDateTimeWithYearOnTop schema::h:i|c:l SELECT HISTOGRAM(YEAR(birth_date), 2) AS h, COUNT(*) as c FROM test_emp GROUP BY h ORDER BY h DESC; h | c diff --git a/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec index 21dd7bf530e3d..7406ea488308d 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec @@ -18,13 +18,13 @@ SELECT gender g FROM "test_emp" WHERE emp_no < 10020 GROUP BY g ORDER BY gender; groupByOnTextOnAliasOrderDesc SELECT gender g FROM "test_emp" WHERE emp_no < 10020 GROUP BY g ORDER BY g DESC; -groupByOnDate +groupByOnDateTime SELECT birth_date b FROM "test_emp" GROUP BY birth_date ORDER BY birth_date DESC; -groupByOnDateWithWhereClause +groupByOnDateTimeWithWhereClause SELECT birth_date b FROM "test_emp" WHERE emp_no < 10020 GROUP BY birth_date ORDER BY birth_date DESC; -groupByOnDateWithWhereAndLimit +groupByOnDateTimeWithWhereAndLimit SELECT birth_date b FROM "test_emp" WHERE emp_no < 10020 GROUP BY birth_date ORDER BY birth_date DESC LIMIT 1; -groupByOnDateOnAlias +groupByOnDateTimeOnAlias SELECT birth_date b FROM "test_emp" WHERE emp_no < 10020 GROUP BY b ORDER BY birth_date DESC; groupByOnNumber @@ -62,13 +62,13 @@ SELECT gender g, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY g, l groupByMultiOnTextOnAliasOrderDesc SELECT gender g, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY g, l ORDER BY g, l ASC; -groupByMultiOnDate +groupByMultiOnDateTime SELECT birth_date b, languages l FROM "test_emp" GROUP BY birth_date, languages ORDER BY birth_date DESC, languages; -groupByMultiOnDateWithWhereClause +groupByMultiOnDateTimeWithWhereClause SELECT birth_date b, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY birth_date, languages ORDER BY birth_date DESC, languages; -groupByMultiOnDateWithWhereAndLimit +groupByMultiOnDateTimeWithWhereAndLimit SELECT birth_date b, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY birth_date, languages ORDER BY birth_date DESC, languages LIMIT 1; -groupByMultiOnDateOnAlias +groupByMultiOnDateTimeOnAlias SELECT birth_date b, languages l FROM "test_emp" WHERE emp_no < 10020 GROUP BY b, l ORDER BY birth_date DESC, languages; groupByMultiAddScalar @@ -248,7 +248,7 @@ aggMinWithCastAndFilter SELECT gender g, CAST(MIN(emp_no) AS SMALLINT) m, COUNT(1) c FROM "test_emp" WHERE emp_no < 10020 GROUP BY gender ORDER BY gender; aggMinWithAlias SELECT gender g, MIN(emp_no) m FROM "test_emp" GROUP BY g ORDER BY gender; -aggMinOnDate +aggMinOnDateTime SELECT gender, MIN(birth_date) m FROM "test_emp" GROUP BY gender ORDER BY gender; // Conditional MIN @@ -304,7 +304,7 @@ aggMaxAndCountWithFilterAndLimit SELECT gender g, MAX(emp_no) m, COUNT(1) c FROM "test_emp" WHERE emp_no > 10000 GROUP BY gender ORDER BY gender LIMIT 1; aggMaxWithAlias SELECT gender g, MAX(emp_no) m FROM "test_emp" GROUP BY g ORDER BY gender; -aggMaxOnDate +aggMaxOnDateTime SELECT gender, MAX(birth_date) m FROM "test_emp" GROUP BY gender ORDER BY gender; aggAvgAndMaxWithLikeFilter SELECT CAST(AVG(salary) AS LONG) AS avg, CAST(SUM(salary) AS LONG) AS s FROM "test_emp" WHERE first_name LIKE 'G%'; @@ -482,9 +482,9 @@ selectCountWhereIsNull SELECT COUNT(*) count FROM test_emp WHERE first_name IS NULL; selectLanguagesCountWithNullsAndGroupByLanguage SELECT languages l, COUNT(*) c FROM test_emp GROUP BY languages ORDER BY languages; -selectHireDateGroupByHireDate +selectHireDateTimeGroupByHireDateTime SELECT hire_date HD, COUNT(*) c FROM test_emp GROUP BY hire_date ORDER BY hire_date DESC; -selectHireDateGroupByHireDate +selectHireDateTimeGroupByHireDateTime SELECT hire_date HD, COUNT(*) c FROM test_emp GROUP BY hire_date ORDER BY hire_date DESC; selectSalaryGroupBySalary SELECT salary, COUNT(*) c FROM test_emp GROUP BY salary ORDER BY salary DESC; diff --git a/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec index 7ad8ef342669d..4134db187c9a6 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/alias.csv-spec @@ -28,13 +28,13 @@ DESCRIBE test_alias; column | type | mapping --------------------+---------------+--------------- -birth_date |TIMESTAMP |date +birth_date |TIMESTAMP |datetime dep |STRUCT |nested dep.dep_id |VARCHAR |keyword dep.dep_name |VARCHAR |text dep.dep_name.keyword|VARCHAR |keyword -dep.from_date |TIMESTAMP |date -dep.to_date |TIMESTAMP |date +dep.from_date |TIMESTAMP |datetime +dep.to_date |TIMESTAMP |datetime emp_no |INTEGER |integer extra |STRUCT |object extra.info |STRUCT |object @@ -44,7 +44,7 @@ extra_no |INTEGER |integer first_name |VARCHAR |text first_name.keyword |VARCHAR |keyword gender |VARCHAR |keyword -hire_date |TIMESTAMP |date +hire_date |TIMESTAMP |datetime languages |TINYINT |byte last_name |VARCHAR |text last_name.keyword |VARCHAR |keyword @@ -56,13 +56,13 @@ DESCRIBE "test_*"; column | type | mapping --------------------+---------------+--------------- -birth_date |TIMESTAMP |date +birth_date |TIMESTAMP |datetime dep |STRUCT |nested dep.dep_id |VARCHAR |keyword dep.dep_name |VARCHAR |text dep.dep_name.keyword|VARCHAR |keyword -dep.from_date |TIMESTAMP |date -dep.to_date |TIMESTAMP |date +dep.from_date |TIMESTAMP |datetime +dep.to_date |TIMESTAMP |datetime emp_no |INTEGER |integer extra |STRUCT |object extra.info |STRUCT |object @@ -72,7 +72,7 @@ extra_no |INTEGER |integer first_name |VARCHAR |text first_name.keyword |VARCHAR |keyword gender |VARCHAR |keyword -hire_date |TIMESTAMP |date +hire_date |TIMESTAMP |datetime languages |TINYINT |byte last_name |VARCHAR |text last_name.keyword |VARCHAR |keyword diff --git a/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec index 0128873997bd3..e23ee39c54610 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/command.csv-spec @@ -228,13 +228,13 @@ DESCRIBE LIKE 'test_emp'; column | type | mapping --------------------+---------------+--------------- -birth_date |TIMESTAMP |date +birth_date |TIMESTAMP |datetime dep |STRUCT |nested dep.dep_id |VARCHAR |keyword dep.dep_name |VARCHAR |text dep.dep_name.keyword|VARCHAR |keyword -dep.from_date |TIMESTAMP |date -dep.to_date |TIMESTAMP |date +dep.from_date |TIMESTAMP |datetime +dep.to_date |TIMESTAMP |datetime emp_no |INTEGER |integer extra |STRUCT |object extra.info |STRUCT |object @@ -244,7 +244,7 @@ extra_no |INTEGER |integer first_name |VARCHAR |text first_name.keyword |VARCHAR |keyword gender |VARCHAR |keyword -hire_date |TIMESTAMP |date +hire_date |TIMESTAMP |datetime languages |TINYINT |byte last_name |VARCHAR |text last_name.keyword |VARCHAR |keyword @@ -256,13 +256,13 @@ DESCRIBE LIKE 'test_emp%'; column | type | mapping --------------------+---------------+--------------- -birth_date |TIMESTAMP |date +birth_date |TIMESTAMP |datetime dep |STRUCT |nested dep.dep_id |VARCHAR |keyword dep.dep_name |VARCHAR |text dep.dep_name.keyword|VARCHAR |keyword -dep.from_date |TIMESTAMP |date -dep.to_date |TIMESTAMP |date +dep.from_date |TIMESTAMP |datetime +dep.to_date |TIMESTAMP |datetime emp_no |INTEGER |integer extra |STRUCT |object extra.info |STRUCT |object @@ -272,7 +272,7 @@ extra_no |INTEGER |integer first_name |VARCHAR |text first_name.keyword |VARCHAR |keyword gender |VARCHAR |keyword -hire_date |TIMESTAMP |date +hire_date |TIMESTAMP |datetime languages |TINYINT |byte last_name |VARCHAR |text last_name.keyword |VARCHAR |keyword @@ -284,18 +284,18 @@ DESCRIBE "test_emp"; column | type | mapping --------------------+---------------+--------------- -birth_date |TIMESTAMP |date +birth_date |TIMESTAMP |datetime dep |STRUCT |nested dep.dep_id |VARCHAR |keyword dep.dep_name |VARCHAR |text dep.dep_name.keyword|VARCHAR |keyword -dep.from_date |TIMESTAMP |date -dep.to_date |TIMESTAMP |date +dep.from_date |TIMESTAMP |datetime +dep.to_date |TIMESTAMP |datetime emp_no |INTEGER |integer first_name |VARCHAR |text first_name.keyword |VARCHAR |keyword gender |VARCHAR |keyword -hire_date |TIMESTAMP |date +hire_date |TIMESTAMP |datetime languages |TINYINT |byte last_name |VARCHAR |text last_name.keyword |VARCHAR |keyword @@ -310,18 +310,18 @@ DESCRIBE "test_*,-test_alias*"; column | type | mapping --------------------+---------------+--------------- -birth_date |TIMESTAMP |date +birth_date |TIMESTAMP |datetime dep |STRUCT |nested dep.dep_id |VARCHAR |keyword dep.dep_name |VARCHAR |text dep.dep_name.keyword|VARCHAR |keyword -dep.from_date |TIMESTAMP |date -dep.to_date |TIMESTAMP |date +dep.from_date |TIMESTAMP |datetime +dep.to_date |TIMESTAMP |datetime emp_no |INTEGER |integer first_name |VARCHAR |text first_name.keyword |VARCHAR |keyword gender |VARCHAR |keyword -hire_date |TIMESTAMP |date +hire_date |TIMESTAMP |datetime languages |TINYINT |byte last_name |VARCHAR |text last_name.keyword |VARCHAR |keyword diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime-interval.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime-interval.csv-spec index 9434ead51da9b..8d9a65d1b85b6 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime-interval.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime-interval.csv-spec @@ -141,7 +141,7 @@ INTERVAL 1 DAY + INTERVAL 53 MINUTES ; datePlusIntervalInline -SELECT CAST('1969-05-13T12:34:56' AS DATE) + INTERVAL 49 YEARS AS result; +SELECT CAST('1969-05-13T12:34:56' AS DATETIME) + INTERVAL 49 YEARS AS result; result -------------------- @@ -183,7 +183,7 @@ SELECT -2 * INTERVAL '1 23:45' DAY TO MINUTES AS result; ; dateMinusInterval -SELECT CAST('2018-05-13T12:34:56' AS DATE) - INTERVAL '2-8' YEAR TO MONTH AS result; +SELECT CAST('2018-05-13T12:34:56' AS DATETIME) - INTERVAL '2-8' YEAR TO MONTH AS result; result -------------------- @@ -288,4 +288,4 @@ SELECT birth_date, MAX(hire_date) - INTERVAL 1 YEAR AS f FROM test_emp GROUP BY 1952-05-15T00:00:00Z|1953 1952-06-13T00:00:00Z|1953 1952-07-08T00:00:00Z|1953 -; \ No newline at end of file +; diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec index 5e51ae69bf396..39681e7118fc1 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime.csv-spec @@ -54,7 +54,7 @@ d:i | l:s ; // -// Date +// DateTime // dateTimeIsoDayOfWeek SELECT ISO_DAY_OF_WEEK(birth_date) d, last_name l FROM "test_emp" WHERE emp_no < 10010 ORDER BY ISO_DAY_OF_WEEK(birth_date); @@ -380,4 +380,4 @@ Berni Bezalel Bojan -; \ No newline at end of file +; diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec index 4b12d2de58fc7..3748a116b7450 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec @@ -10,7 +10,7 @@ // This has implications on the results, which could change given specific locales where the rules for determining the start of a year are different. // -// Date +// DateTime // dateTimeDay @@ -25,10 +25,10 @@ SELECT MONTH(birth_date) d, last_name l FROM "test_emp" WHERE emp_no < 10010 ORD dateTimeYear SELECT YEAR(birth_date) d, last_name l FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -monthNameFromStringDate +monthNameFromStringDateTime SELECT MONTHNAME(CAST('2018-09-03' AS TIMESTAMP)) month FROM "test_emp" limit 1; -dayNameFromStringDate +dayNameFromStringDateTime SELECT DAYNAME(CAST('2018-09-03' AS TIMESTAMP)) day FROM "test_emp" limit 1; quarterSelect diff --git a/x-pack/plugin/sql/qa/src/main/resources/docs.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/docs.csv-spec index 2ffbdc302af47..5c4f016d16459 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/docs.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/docs.csv-spec @@ -14,18 +14,18 @@ DESCRIBE emp; column | type | mapping --------------------+---------------+--------------- -birth_date |TIMESTAMP |date +birth_date |TIMESTAMP |datetime dep |STRUCT |nested dep.dep_id |VARCHAR |keyword dep.dep_name |VARCHAR |text dep.dep_name.keyword|VARCHAR |keyword -dep.from_date |TIMESTAMP |date -dep.to_date |TIMESTAMP |date +dep.from_date |TIMESTAMP |datetime +dep.to_date |TIMESTAMP |datetime emp_no |INTEGER |integer first_name |VARCHAR |text first_name.keyword |VARCHAR |keyword gender |VARCHAR |keyword -hire_date |TIMESTAMP |date +hire_date |TIMESTAMP |datetime languages |TINYINT |byte last_name |VARCHAR |text last_name.keyword |VARCHAR |keyword @@ -53,18 +53,18 @@ SHOW COLUMNS IN emp; column | type | mapping --------------------+---------------+--------------- -birth_date |TIMESTAMP |date +birth_date |TIMESTAMP |datetime dep |STRUCT |nested dep.dep_id |VARCHAR |keyword dep.dep_name |VARCHAR |text dep.dep_name.keyword|VARCHAR |keyword -dep.from_date |TIMESTAMP |date -dep.to_date |TIMESTAMP |date +dep.from_date |TIMESTAMP |datetime +dep.to_date |TIMESTAMP |datetime emp_no |INTEGER |integer first_name |VARCHAR |text first_name.keyword |VARCHAR |keyword gender |VARCHAR |keyword -hire_date |TIMESTAMP |date +hire_date |TIMESTAMP |datetime languages |TINYINT |byte last_name |VARCHAR |text last_name.keyword |VARCHAR |keyword @@ -746,9 +746,9 @@ SELECT HISTOGRAM(salary % 100, 10) AS h, COUNT(*) AS c FROM emp GROUP BY h; // end::histogramNumericExpression ; -histogramDate +histogramDateTime schema::h:ts|c:l -// tag::histogramDate +// tag::histogramDateTime SELECT HISTOGRAM(birth_date, INTERVAL 1 YEAR) AS h, COUNT(*) AS c FROM emp GROUP BY h; @@ -770,7 +770,7 @@ null |10 1963-02-07T00:00:00Z|7 1964-02-02T00:00:00Z|5 -// end::histogramDate +// end::histogramDateTime ; expressionOnHistogramNotAllowed-Ignore @@ -778,9 +778,9 @@ expressionOnHistogramNotAllowed-Ignore SELECT MONTH(HISTOGRAM(birth_date), 2)) AS h, COUNT(*) as c FROM emp GROUP BY h ORDER BY h DESC; // end::expressionOnHistogramNotAllowed -histogramDateExpression +histogramDateTimeExpression schema::h:i|c:l -// tag::histogramDateExpression +// tag::histogramDateTimeExpression SELECT HISTOGRAM(MONTH(birth_date), 2) AS h, COUNT(*) as c FROM emp GROUP BY h ORDER BY h DESC; h | c @@ -794,12 +794,12 @@ SELECT HISTOGRAM(MONTH(birth_date), 2) AS h, COUNT(*) as c FROM emp GROUP BY h O 0 |6 null |10 -// end::histogramDateExpression +// end::histogramDateTimeExpression ; /////////////////////////////// // -// Date/Time +// DateTime/Time // /////////////////////////////// @@ -816,14 +816,14 @@ SELECT INTERVAL 1 DAY + INTERVAL 53 MINUTES AS result; ; -dtDatePlusInterval -// tag::dtDatePlusInterval -SELECT CAST('1969-05-13T12:34:56' AS DATE) + INTERVAL 49 YEARS AS result; +dtDateTimePlusInterval +// tag::dtDateTimePlusInterval +SELECT CAST('1969-05-13T12:34:56' AS DATETIME) + INTERVAL 49 YEARS AS result; result -------------------- 2018-05-13T12:34:56Z -// end::dtDatePlusInterval +// end::dtDateTimePlusInterval ; dtMinusInterval @@ -848,14 +848,14 @@ SELECT INTERVAL '1' DAY - INTERVAL '2' HOURS AS result; ; -dtDateMinusInterval -// tag::dtDateMinusInterval -SELECT CAST('2018-05-13T12:34:56' AS DATE) - INTERVAL '2-8' YEAR TO MONTH AS result; +dtDateTimeMinusInterval +// tag::dtDateTimeMinusInterval +SELECT CAST('2018-05-13T12:34:56' AS DATETIME) - INTERVAL '2-8' YEAR TO MONTH AS result; result -------------------- 2015-09-13T12:34:56Z -// end::dtDateMinusInterval +// end::dtDateTimeMinusInterval ; dtIntervalMul @@ -1360,14 +1360,14 @@ SELECT CAST(123 AS VARCHAR) AS string; // end::conversionIntToStringCast ; -conversionStringToDateCast -// tag::conversionStringToDateCast +conversionStringToDateTimeCast +// tag::conversionStringToDateTimeCast SELECT YEAR(CAST('2018-05-19T11:23:45Z' AS TIMESTAMP)) AS year; year --------------- 2018 -// end::conversionStringToDateCast +// end::conversionStringToDateTimeCast ; /////////////////////////////// @@ -1918,7 +1918,7 @@ elastic /////////////////////////////// // -// Date-Time functions +// DateTime-Time functions // /////////////////////////////// diff --git a/x-pack/plugin/sql/qa/src/main/resources/nested.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/nested.csv-spec index d20769e237f97..39f9b2965c6dc 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/nested.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/nested.csv-spec @@ -8,18 +8,18 @@ DESCRIBE test_emp; column | type | mapping --------------------+---------------+--------------- -birth_date |TIMESTAMP |date +birth_date |TIMESTAMP |datetime dep |STRUCT |nested dep.dep_id |VARCHAR |keyword dep.dep_name |VARCHAR |text dep.dep_name.keyword|VARCHAR |keyword -dep.from_date |TIMESTAMP |date -dep.to_date |TIMESTAMP |date +dep.from_date |TIMESTAMP |datetime +dep.to_date |TIMESTAMP |datetime emp_no |INTEGER |integer first_name |VARCHAR |text first_name.keyword |VARCHAR |keyword gender |VARCHAR |keyword -hire_date |TIMESTAMP |date +hire_date |TIMESTAMP |datetime languages |TINYINT |byte last_name |VARCHAR |text last_name.keyword |VARCHAR |keyword diff --git a/x-pack/plugin/sql/qa/src/main/resources/null.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/null.csv-spec index 474fceaed4612..19541cf5d9f32 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/null.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/null.csv-spec @@ -3,7 +3,7 @@ // dateTimeOverNull -SELECT YEAR(CAST(NULL AS DATE)) d; +SELECT YEAR(CAST(NULL AS DATETIME)) d; d:i null diff --git a/x-pack/plugin/sql/qa/src/main/resources/setup_mock_metadata_get_columns.sql b/x-pack/plugin/sql/qa/src/main/resources/setup_mock_metadata_get_columns.sql index 69c572f4ddd4e..f61d48af4ff37 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/setup_mock_metadata_get_columns.sql +++ b/x-pack/plugin/sql/qa/src/main/resources/setup_mock_metadata_get_columns.sql @@ -33,7 +33,7 @@ SELECT null, 'test1', 'name.keyword', 12, 'KEYWORD', 0, 2147483647, null, null, null, null, 12, 0, 2147483647, 1, 'YES', null, null, null, null, 'NO', 'NO' FROM DUAL UNION ALL -SELECT null, 'test2', 'date', 93, 'DATE', 24, 8, null, null, +SELECT null, 'test2', 'date', 93, 'DATETIME', 24, 8, null, null, 1, -- columnNullable null, null, 9, 3, null, 1, 'YES', null, null, null, null, 'NO', 'NO' FROM DUAL diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java index 618dd66d88d11..43d356720f8ed 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java @@ -384,7 +384,7 @@ private static EsField createField(String fieldName, String typeName, Map exps, Predicate exps) { - return Nullability.and(exps.stream().map(Expression::nullable).toArray(Nullability[]::new)); + return Nullability.and(exps.stream().map(Expression::nullable).toArray(Nullability[]::new)); } public static boolean foldable(List exps) { @@ -171,25 +171,25 @@ public static TypeResolution typeMustBeString(Expression e, String operationName } public static TypeResolution typeMustBeDate(Expression e, String operationName, ParamOrdinal paramOrd) { - return typeMustBe(e, dt -> dt == DataType.DATE, operationName, paramOrd, "date"); + return typeMustBe(e, dt -> dt == DataType.DATETIME, operationName, paramOrd, "date"); } public static TypeResolution typeMustBeNumericOrDate(Expression e, String operationName, ParamOrdinal paramOrd) { - return typeMustBe(e, dt -> dt.isNumeric() || dt == DataType.DATE, operationName, paramOrd, "numeric", "date"); + return typeMustBe(e, dt -> dt.isNumeric() || dt == DataType.DATETIME, operationName, paramOrd, "numeric", "date"); } public static TypeResolution typeMustBe(Expression e, - Predicate predicate, - String operationName, - ParamOrdinal paramOrd, - String... acceptedTypes) { + Predicate predicate, + String operationName, + ParamOrdinal paramOrd, + String... acceptedTypes) { return predicate.test(e.dataType()) || DataTypes.isNull(e.dataType())? TypeResolution.TYPE_RESOLVED : new TypeResolution(format(Locale.ROOT, "[%s]%s argument must be [%s], found value [%s] type [%s]", - operationName, - paramOrd == null || paramOrd == ParamOrdinal.DEFAULT ? "" : " " + paramOrd.name().toLowerCase(Locale.ROOT), - Strings.arrayToDelimitedString(acceptedTypes, " or "), - Expressions.name(e), - e.dataType().esType)); + operationName, + paramOrd == null || paramOrd == ParamOrdinal.DEFAULT ? "" : " " + paramOrd.name().toLowerCase(Locale.ROOT), + Strings.arrayToDelimitedString(acceptedTypes, " or "), + Expressions.name(e), + e.dataType().esType)); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java index 1cace59a2cc00..46614755b7e8f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java @@ -42,7 +42,7 @@ protected TypeResolution resolveType() { TypeResolution resolution = Expressions.typeMustBeNumericOrDate(field(), "HISTOGRAM", ParamOrdinal.FIRST); if (resolution == TypeResolution.TYPE_RESOLVED) { // interval must be Literal interval - if (field().dataType() == DataType.DATE) { + if (field().dataType() == DataType.DATETIME) { resolution = Expressions.typeMustBe(interval, DataTypes::isInterval, "(Date) HISTOGRAM", ParamOrdinal.SECOND, "interval"); } else { resolution = Expressions.typeMustBeNumeric(interval, "(Numeric) HISTOGRAM", ParamOrdinal.SECOND); @@ -81,4 +81,4 @@ public boolean equals(Object obj) { } return false; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentDateTime.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentDateTime.java index 50a7f8868141a..82556795b5961 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentDateTime.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/CurrentDateTime.java @@ -21,7 +21,7 @@ public class CurrentDateTime extends ConfigurationFunction { private final ZonedDateTime dateTime; public CurrentDateTime(Source source, Expression precision, Configuration configuration) { - super(source, configuration, DataType.DATE); + super(source, configuration, DataType.DATETIME); this.precision = precision; int p = precision != null ? ((Number) precision.fold()).intValue() : 0; this.dateTime = nanoPrecision(configuration().now(), p); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java index 074518f6b7d7c..cd13570a1ad10 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/gen/script/ScriptWeaver.java @@ -79,7 +79,7 @@ default ScriptTemplate scriptWithScalar(ScalarFunctionAttribute scalar) { default ScriptTemplate scriptWithAggregate(AggregateFunctionAttribute aggregate) { String template = "{}"; - if (aggregate.dataType() == DataType.DATE) { + if (aggregate.dataType() == DataType.DATETIME) { template = "{sql}.asDateTime({})"; } return new ScriptTemplate(processScript(template), @@ -89,7 +89,7 @@ default ScriptTemplate scriptWithAggregate(AggregateFunctionAttribute aggregate) default ScriptTemplate scriptWithGrouping(GroupingFunctionAttribute grouping) { String template = "{}"; - if (grouping.dataType() == DataType.DATE) { + if (grouping.dataType() == DataType.DATETIME) { template = "{sql}.asDateTime({})"; } return new ScriptTemplate(processScript(template), @@ -110,4 +110,4 @@ default String processScript(String script) { default String formatTemplate(String template) { return Scripts.formatTemplate(template); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index 132c390337628..68baa84a802f6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -411,9 +411,9 @@ public DataType visitPrimitiveDataType(PrimitiveDataTypeContext ctx) { case "float": case "double": return DataType.DOUBLE; - case "date": + case "datetime": case "timestamp": - return DataType.DATE; + return DataType.DATETIME; case "char": case "varchar": case "string": @@ -793,7 +793,7 @@ public Literal visitDateEscapedLiteral(DateEscapedLiteralContext ctx) { } catch(IllegalArgumentException ex) { throw new ParsingException(source, "Invalid date received; {}", ex.getMessage()); } - return new Literal(source, DateUtils.of(dt), DataType.DATE); + return new Literal(source, DateUtils.of(dt), DataType.DATETIME); } @Override @@ -829,7 +829,7 @@ public Literal visitTimestampEscapedLiteral(TimestampEscapedLiteralContext ctx) } catch (IllegalArgumentException ex) { throw new ParsingException(source, "Invalid timestamp received; {}", ex.getMessage()); } - return new Literal(source, DateUtils.of(dt), DataType.DATE); + return new Literal(source, DateUtils.of(dt), DataType.DATETIME); } @Override @@ -930,4 +930,4 @@ private static Source minusAwareSource(SqlBaseParser.NumberContext ctx) { } return null; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java index 46380a9de2afd..5189a0ca4981e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java @@ -284,7 +284,7 @@ protected PhysicalPlan rule(AggregateExec a) { if (matchingGroup != null) { if (exp instanceof Attribute || exp instanceof ScalarFunction || exp instanceof GroupingFunction) { Processor action = null; - ZoneId zi = DataType.DATE == exp.dataType() ? DateUtils.UTC : null; + ZoneId zi = DataType.DATETIME == exp.dataType() ? DateUtils.UTC : null; /* * special handling of dates since aggs return the typed Date object which needs * extraction instead of handling this in the scroller, the folder handles this @@ -335,7 +335,7 @@ protected PhysicalPlan rule(AggregateExec a) { // check if the field is a date - if so mark it as such to interpret the long as a date // UTC is used since that's what the server uses and there's no conversion applied // (like for date histograms) - ZoneId zi = DataType.DATE == child.dataType() ? DateUtils.UTC : null; + ZoneId zi = DataType.DATETIME == child.dataType() ? DateUtils.UTC : null; queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, zi)); } // handle histogram @@ -359,7 +359,7 @@ else if (child instanceof GroupingFunction) { matchingGroup = groupingContext.groupFor(ne); Check.notNull(matchingGroup, "Cannot find group [{}]", Expressions.name(ne)); - ZoneId zi = DataType.DATE == ne.dataType() ? DateUtils.UTC : null; + ZoneId zi = DataType.DATETIME == ne.dataType() ? DateUtils.UTC : null; queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, zi)); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java index e0472f27131f0..489e1506edf1a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java @@ -275,7 +275,7 @@ else if (exp instanceof GroupingFunction) { Expression field = h.field(); // date histogram - if (h.dataType() == DataType.DATE) { + if (h.dataType() == DataType.DATETIME) { long intervalAsMillis = Intervals.inMillis(h.interval()); // TODO: set timezone if (field instanceof FieldAttribute) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateHistogram.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateHistogram.java index 71c0e4f3e847b..ada855ec1511d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateHistogram.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByDateHistogram.java @@ -15,7 +15,7 @@ import java.util.Objects; /** - * GROUP BY key based on histograms on date fields. + * GROUP BY key based on histograms on date/datetime fields. */ public class GroupByDateHistogram extends GroupByKey { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java index 7d74c1c3330e2..8626ea18e30c5 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java @@ -39,7 +39,7 @@ public final CompositeValuesSourceBuilder asValueSource() { builder.valueType(ValueType.DOUBLE); } else if (script.outputType().isString()) { builder.valueType(ValueType.STRING); - } else if (script.outputType() == DataType.DATE) { + } else if (script.outputType() == DataType.DATETIME) { builder.valueType(ValueType.DATE); } else if (script.outputType() == DataType.BOOLEAN) { builder.valueType(ValueType.BOOLEAN); @@ -78,4 +78,4 @@ public boolean equals(Object obj) { && Objects.equals(script, ((GroupByKey) obj).script) && Objects.equals(direction, ((GroupByKey) obj).direction); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java index 43b1045ff7092..9a784b7b112ff 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java @@ -183,7 +183,7 @@ private Tuple nestedHitFieldRef(FieldAttribute List nestedRefs = new ArrayList<>(); String name = aliasName(attr); - String format = attr.field().getDataType() == DataType.DATE ? "epoch_millis" : DocValueFieldsContext.USE_DEFAULT_FORMAT; + String format = attr.field().getDataType() == DataType.DATETIME ? "epoch_millis" : DocValueFieldsContext.USE_DEFAULT_FORMAT; Query q = rewriteToContainNestedField(query, attr.source(), attr.nestedParent().name(), name, format, attr.field().isAggregatable()); @@ -362,4 +362,4 @@ public String toString() { throw new RuntimeException("error rendering", e); } } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/SearchHitFieldRef.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/SearchHitFieldRef.java index 7f799108d28ec..5e51b36fc3c32 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/SearchHitFieldRef.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/SearchHitFieldRef.java @@ -49,7 +49,7 @@ public void collectFields(SqlSourceBuilder sourceBuilder) { return; } if (docValue) { - String format = dataType == DataType.DATE ? "epoch_millis" : null; + String format = dataType == DataType.DATETIME ? "epoch_millis" : null; sourceBuilder.addDocField(name, format); } else { sourceBuilder.addSourceField(name); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java index b8d55f22942eb..f233632d0f656 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java @@ -44,7 +44,7 @@ public enum DataType { // since ODBC and JDBC interpret precision for Date as display size, // the precision is 23 (number of chars in ISO8601 with millis) + Z (the UTC timezone) // see https://github.com/elastic/elasticsearch/issues/30386#issuecomment-386807288 - DATE( JDBCType.TIMESTAMP, Long.BYTES, 24, 24, false, false, true), + DATETIME( JDBCType.TIMESTAMP, Long.BYTES, 24, 24, false, false, true), // // specialized types // @@ -102,9 +102,9 @@ public enum DataType { odbcToEs.put("SQL_LONGVARBINARY", BINARY); // Date - odbcToEs.put("SQL_DATE", DATE); - odbcToEs.put("SQL_TIME", DATE); - odbcToEs.put("SQL_TIMESTAMP", DATE); + odbcToEs.put("SQL_DATE", DATETIME); + odbcToEs.put("SQL_TIME", DATETIME); + odbcToEs.put("SQL_TIMESTAMP", DATETIME); // Intervals odbcToEs.put("SQL_INTERVAL_HOUR_TO_MINUTE", INTERVAL_HOUR_TO_MINUTE); @@ -225,10 +225,14 @@ public static DataType fromOdbcType(String odbcType) { * For any dataType DataType.fromTypeName(dataType.esType) == dataType */ public static DataType fromTypeName(String esType) { + String uppercase = esType.toUpperCase(Locale.ROOT); + if (uppercase.equals("DATE")) { + return DataType.DATETIME; + } try { - return DataType.valueOf(esType.toUpperCase(Locale.ROOT)); + return DataType.valueOf(uppercase); } catch (IllegalArgumentException ex) { return DataType.UNSUPPORTED; } } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java index 3cfb5d5ddf804..f3cf3d2bac1ac 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java @@ -17,7 +17,7 @@ import java.util.function.LongFunction; import static org.elasticsearch.xpack.sql.type.DataType.BOOLEAN; -import static org.elasticsearch.xpack.sql.type.DataType.DATE; +import static org.elasticsearch.xpack.sql.type.DataType.DATETIME; import static org.elasticsearch.xpack.sql.type.DataType.LONG; import static org.elasticsearch.xpack.sql.type.DataType.NULL; @@ -83,7 +83,7 @@ public static DataType commonType(DataType left, DataType right) { } if (DataTypes.isInterval(right)) { - if (left == DATE) { + if (left == DATETIME) { return left; } } @@ -145,8 +145,8 @@ private static Conversion conversion(DataType from, DataType to) { return conversionToFloat(from); case DOUBLE: return conversionToDouble(from); - case DATE: - return conversionToDate(from); + case DATETIME: + return conversionToDateTime(from); case BOOLEAN: return conversionToBoolean(from); default: @@ -156,7 +156,7 @@ private static Conversion conversion(DataType from, DataType to) { } private static Conversion conversionToString(DataType from) { - if (from == DATE) { + if (from == DATETIME) { return Conversion.DATE_TO_STRING; } return Conversion.OTHER_TO_STRING; @@ -182,7 +182,7 @@ private static Conversion conversionToLong(DataType from) { if (from.isString()) { return Conversion.STRING_TO_LONG; } - if (from == DATE) { + if (from == DATETIME) { return Conversion.DATE_TO_LONG; } return null; @@ -201,7 +201,7 @@ private static Conversion conversionToInt(DataType from) { if (from.isString()) { return Conversion.STRING_TO_INT; } - if (from == DATE) { + if (from == DATETIME) { return Conversion.DATE_TO_INT; } return null; @@ -220,7 +220,7 @@ private static Conversion conversionToShort(DataType from) { if (from.isString()) { return Conversion.STRING_TO_SHORT; } - if (from == DATE) { + if (from == DATETIME) { return Conversion.DATE_TO_SHORT; } return null; @@ -239,7 +239,7 @@ private static Conversion conversionToByte(DataType from) { if (from.isString()) { return Conversion.STRING_TO_BYTE; } - if (from == DATE) { + if (from == DATETIME) { return Conversion.DATE_TO_BYTE; } return null; @@ -258,7 +258,7 @@ private static Conversion conversionToFloat(DataType from) { if (from.isString()) { return Conversion.STRING_TO_FLOAT; } - if (from == DATE) { + if (from == DATETIME) { return Conversion.DATE_TO_FLOAT; } return null; @@ -277,13 +277,13 @@ private static Conversion conversionToDouble(DataType from) { if (from.isString()) { return Conversion.STRING_TO_DOUBLE; } - if (from == DATE) { + if (from == DATETIME) { return Conversion.DATE_TO_DOUBLE; } return null; } - private static Conversion conversionToDate(DataType from) { + private static Conversion conversionToDateTime(DataType from) { if (from.isRational()) { return Conversion.RATIONAL_TO_DATE; } @@ -306,7 +306,7 @@ private static Conversion conversionToBoolean(DataType from) { if (from.isString()) { return Conversion.STRING_TO_BOOLEAN; } - if (from == DATE) { + if (from == DATETIME) { return Conversion.DATE_TO_BOOLEAN; } return null; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java index b865f541634b1..5a3fa235e9a73 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypes.java @@ -12,7 +12,7 @@ import static org.elasticsearch.xpack.sql.type.DataType.BOOLEAN; import static org.elasticsearch.xpack.sql.type.DataType.BYTE; -import static org.elasticsearch.xpack.sql.type.DataType.DATE; +import static org.elasticsearch.xpack.sql.type.DataType.DATETIME; import static org.elasticsearch.xpack.sql.type.DataType.DOUBLE; import static org.elasticsearch.xpack.sql.type.DataType.FLOAT; import static org.elasticsearch.xpack.sql.type.DataType.INTEGER; @@ -68,7 +68,7 @@ public static DataType fromJava(Object value) { return SHORT; } if (value instanceof ZonedDateTime) { - return DATE; + return DATETIME; } if (value instanceof String || value instanceof Character) { return KEYWORD; @@ -166,7 +166,7 @@ private static String intervalUnit(char unitChar) { // https://docs.microsoft.com/en-us/sql/relational-databases/native-client-odbc-date-time/metadata-catalog // https://github.com/elastic/elasticsearch/issues/30386 public static Integer metaSqlDataType(DataType t) { - if (t == DATE) { + if (t == DATETIME) { // ODBC SQL_DATETME return Integer.valueOf(9); } @@ -177,7 +177,7 @@ public static Integer metaSqlDataType(DataType t) { // https://github.com/elastic/elasticsearch/issues/30386 // https://docs.microsoft.com/en-us/sql/odbc/reference/syntax/sqlgettypeinfo-function?view=sql-server-2017 public static Integer metaSqlDateTimeSub(DataType t) { - if (t == DATE) { + if (t == DATETIME) { // ODBC SQL_CODE_TIMESTAMP return Integer.valueOf(3); } @@ -188,7 +188,7 @@ public static Integer metaSqlDateTimeSub(DataType t) { // https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/decimal-digits?view=sql-server-2017 public static Short metaSqlMinimumScale(DataType t) { // TODO: return info for HALF/SCALED_FLOATS (should be based on field not type) - if (t == DATE) { + if (t == DATETIME) { return Short.valueOf((short) 3); } if (t.isInteger()) { @@ -203,7 +203,7 @@ public static Short metaSqlMinimumScale(DataType t) { public static Short metaSqlMaximumScale(DataType t) { // TODO: return info for HALF/SCALED_FLOATS (should be based on field not type) - if (t == DATE) { + if (t == DATETIME) { return Short.valueOf((short) 3); } if (t.isInteger()) { @@ -223,4 +223,4 @@ public static Integer metaSqlRadix(DataType t) { // null means radix is not applicable for the given type. return t.isInteger() ? Integer.valueOf(10) : (t.isRational() ? Integer.valueOf(2) : null); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DateEsField.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DateEsField.java index 04926db5407f5..71924adab5581 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DateEsField.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DateEsField.java @@ -21,7 +21,7 @@ public class DateEsField extends EsField { private final List formats; public DateEsField(String name, Map properties, boolean hasDocValues, String... formats) { - super(name, DataType.DATE, properties, hasDocValues); + super(name, DataType.DATETIME, properties, hasDocValues); this.formats = CollectionUtils.isEmpty(formats) ? DEFAULT_FORMAT : Arrays.asList(formats); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/Types.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/Types.java index f367f39530dae..0af0a5f322cc6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/Types.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/Types.java @@ -86,7 +86,7 @@ private static void walkMapping(String name, Object value, Map boolean normalized = Strings.hasText(textSetting(content.get("normalizer"), null)); field = new KeywordEsField(name, properties, docValues, length, normalized); break; - case DATE: + case DATETIME: Object fmt = content.get("format"); if (fmt != null) { field = new DateEsField(name, properties, docValues, Strings.delimitedListToStringArray(fmt.toString(), "||")); @@ -118,4 +118,4 @@ private static boolean boolSetting(Object value, boolean defaultValue) { private static int intSetting(Object value, int defaultValue) { return value == null ? defaultValue : Integer.parseInt(value.toString()); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index e394296829b09..e45da9d08fee9 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -373,7 +373,7 @@ public void testInNestedWithDifferentDataTypesFromLeftValue_WhereClause() { } public void testNotSupportedAggregateOnDate() { - assertEquals("1:8: [AVG(date)] argument must be [numeric], found value [date] type [date]", + assertEquals("1:8: [AVG(date)] argument must be [numeric], found value [date] type [datetime]", error("SELECT AVG(date) FROM test")); } @@ -510,14 +510,14 @@ public void testAggsInWhere() { public void testHistogramInFilter() { assertEquals("1:63: Cannot filter on grouping function [HISTOGRAM(date, INTERVAL 1 MONTH)], use its argument instead", error("SELECT HISTOGRAM(date, INTERVAL 1 MONTH) AS h FROM test WHERE " - + "HISTOGRAM(date, INTERVAL 1 MONTH) > CAST('2000-01-01' AS DATE) GROUP BY h")); + + "HISTOGRAM(date, INTERVAL 1 MONTH) > CAST('2000-01-01' AS DATETIME) GROUP BY h")); } // related https://github.com/elastic/elasticsearch/issues/36853 public void testHistogramInHaving() { assertEquals("1:75: Cannot filter on grouping function [h], use its argument instead", error("SELECT HISTOGRAM(date, INTERVAL 1 MONTH) AS h FROM test GROUP BY h HAVING " - + "h > CAST('2000-01-01' AS DATE)")); + + "h > CAST('2000-01-01' AS DATETIME)")); } public void testGroupByScalarOnTopOfGrouping() { @@ -548,3 +548,4 @@ public void testErrorMessageForPercentileRankWithSecondArgBasedOnAField() { e.getMessage()); } } + diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java index b53d00cfbb71d..bb328b2d8ffdc 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java @@ -190,7 +190,7 @@ private static boolean isSearchable(DataType type) { } private static boolean isAggregatable(DataType type) { - return type.isNumeric() || type == DataType.KEYWORD || type == DataType.DATE; + return type.isNumeric() || type == DataType.KEYWORD || type == DataType.DATETIME; } private static class UpdateableFieldCapabilities extends FieldCapabilities { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java index 4f562e82b5c21..7677878ddac4f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java @@ -144,7 +144,7 @@ public void testGetDate() { SearchHit hit = new SearchHit(1); DocumentField field = new DocumentField("my_date_field", documentFieldValues); hit.fields(singletonMap("my_date_field", field)); - FieldHitExtractor extractor = new FieldHitExtractor("my_date_field", DataType.DATE, true); + FieldHitExtractor extractor = new FieldHitExtractor("my_date_field", DataType.DATETIME, true); assertEquals(DateUtils.of(millis), extractor.extract(hit)); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java index 2a7af2916373e..7168716b529ea 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java @@ -28,6 +28,6 @@ private Object extract(Object value, ZoneId zoneId) { } private DayOfYear build(Object value, ZoneId zoneId) { - return new DayOfYear(Source.EMPTY, new Literal(Source.EMPTY, value, DataType.DATE), zoneId); + return new DayOfYear(Source.EMPTY, new Literal(Source.EMPTY, value, DataType.DATETIME), zoneId); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java index e329ad248108c..696f999b0b051 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java @@ -77,7 +77,7 @@ public void testAddDayTimeIntervals() { assertEquals(interval(Duration.ofDays(1).plusHours(2), INTERVAL_DAY_TO_HOUR), L(x)); } - public void testAddYearMonthIntervalToDate() { + public void testAddYearMonthIntervalToDateTime() { ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Period.ofYears(100).plusMonths(50); @@ -86,7 +86,7 @@ public void testAddYearMonthIntervalToDate() { assertEquals(L(now.plus(t)), L(x)); } - public void testAddDayTimeIntervalToDate() { + public void testAddDayTimeIntervalToDateTime() { ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Duration.ofHours(2); @@ -95,7 +95,7 @@ public void testAddDayTimeIntervalToDate() { assertEquals(L(now.plus(t)), L(x)); } - public void testAddDayTimeIntervalToDateReverse() { + public void testAddDayTimeIntervalToDateTimeReverse() { ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Duration.ofHours(2); @@ -124,7 +124,7 @@ public void testSubDayTimeIntervals() { assertEquals(interval(Duration.ofDays(1).plusHours(8), INTERVAL_DAY_TO_HOUR), L(x)); } - public void testSubYearMonthIntervalToDate() { + public void testSubYearMonthIntervalToDateTime() { ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Period.ofYears(100).plusMonths(50); @@ -133,7 +133,7 @@ public void testSubYearMonthIntervalToDate() { assertEquals(L(now.minus(t)), L(x)); } - public void testSubYearMonthIntervalToDateIllegal() { + public void testSubYearMonthIntervalToDateTimeIllegal() { ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Period.ofYears(100).plusMonths(50); @@ -148,7 +148,7 @@ public void testSubNumberFromIntervalIllegal() { assertEquals("Cannot compute [-] between [IntervalDayTime] [Integer]", expect.getMessage()); } - public void testSubDayTimeIntervalToDate() { + public void testSubDayTimeIntervalToDateTime() { ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Duration.ofHours(2); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java index 700097c46163a..6873e4a107fb6 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java @@ -330,7 +330,7 @@ public void testConstantFoldingLikes() { } public void testConstantFoldingDatetime() { - Expression cast = new Cast(EMPTY, Literal.of(EMPTY, "2018-01-19T10:23:27Z"), DataType.DATE); + Expression cast = new Cast(EMPTY, Literal.of(EMPTY, "2018-01-19T10:23:27Z"), DataType.DATETIME); assertEquals(2018, foldFunction(new Year(EMPTY, cast, UTC))); assertEquals(1, foldFunction(new MonthOfYear(EMPTY, cast, UTC))); assertEquals(19, foldFunction(new DayOfMonth(EMPTY, cast, UTC))); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java index 5969f8e5ed2cd..f3bf9fc03e777 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java @@ -170,7 +170,7 @@ public void testFunctionWithFunctionWithArgAndParams() { public void testDateLiteral() { Literal l = dateLiteral("2012-01-01"); - assertThat(l.dataType(), is(DataType.DATE)); + assertThat(l.dataType(), is(DataType.DATETIME)); } public void testDateLiteralValidation() { @@ -192,7 +192,7 @@ public void testTimeLiteralValidation() { public void testTimestampLiteral() { Literal l = timestampLiteral("2012-01-01 10:01:02.3456"); - assertThat(l.dataType(), is(DataType.DATE)); + assertThat(l.dataType(), is(DataType.DATETIME)); } public void testTimestampLiteralValidation() { @@ -236,4 +236,4 @@ public void testLikeEscape() { LikePattern pattern = likeEscape("|%tring", "|"); assertThat(pattern.escape(), is('|')); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java index 0462956bf851f..6ed46b74d4512 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java @@ -61,7 +61,7 @@ public void testSysTypes() throws Exception { Command cmd = sql("SYS TYPES").v1(); List names = asList("BYTE", "LONG", "BINARY", "NULL", "INTEGER", "SHORT", "HALF_FLOAT", "SCALED_FLOAT", "FLOAT", "DOUBLE", - "KEYWORD", "TEXT", "IP", "BOOLEAN", "DATE", + "KEYWORD", "TEXT", "IP", "BOOLEAN", "DATETIME", "INTERVAL_YEAR", "INTERVAL_MONTH", "INTERVAL_DAY", "INTERVAL_HOUR", "INTERVAL_MINUTE", "INTERVAL_SECOND", "INTERVAL_YEAR_TO_MONTH", "INTERVAL_DAY_TO_HOUR", "INTERVAL_DAY_TO_MINUTE", "INTERVAL_DAY_TO_SECOND", "INTERVAL_HOUR_TO_MINUTE", "INTERVAL_HOUR_TO_SECOND", "INTERVAL_MINUTE_TO_SECOND", @@ -160,4 +160,4 @@ private void runSysColumns(String commandVariation) throws Exception { }, ex -> fail(ex.getMessage()))); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java index 7adeddc9ebec3..92f734e539780 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypesTests.java @@ -44,7 +44,7 @@ public void testSysTypes() throws Exception { Command cmd = sql("SYS TYPES").v1(); List names = asList("BYTE", "LONG", "BINARY", "NULL", "INTEGER", "SHORT", "HALF_FLOAT", "SCALED_FLOAT", "FLOAT", "DOUBLE", - "KEYWORD", "TEXT", "IP", "BOOLEAN", "DATE", + "KEYWORD", "TEXT", "IP", "BOOLEAN", "DATETIME", "INTERVAL_YEAR", "INTERVAL_MONTH", "INTERVAL_DAY", "INTERVAL_HOUR", "INTERVAL_MINUTE", "INTERVAL_SECOND", "INTERVAL_YEAR_TO_MONTH", "INTERVAL_DAY_TO_HOUR", "INTERVAL_DAY_TO_MINUTE", "INTERVAL_DAY_TO_SECOND", "INTERVAL_HOUR_TO_MINUTE", "INTERVAL_HOUR_TO_SECOND", "INTERVAL_MINUTE_TO_SECOND", @@ -108,4 +108,4 @@ public void testSysTypesMultipleMatches() throws Exception { assertEquals("IP", r.column(0)); }, ex -> fail(ex.getMessage()))); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java index 214d935251994..44f50b53b5aa3 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java @@ -292,7 +292,7 @@ public void testGroupKeyTypes_IP() { assertThat(ee.output().get(1).toString(), startsWith("a{s->")); } - public void testGroupKeyTypes_Date() { + public void testGroupKeyTypes_DateTime() { PhysicalPlan p = plan("SELECT count(*), date + INTERVAL '1-2' YEAR TO MONTH AS a FROM test GROUP BY a"); assertEquals(EsQueryExec.class, p.getClass()); EsQueryExec ee = (EsQueryExec) p; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index 23beae2fd586e..8ee94194845a5 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -170,7 +170,7 @@ public void testDateRangeLiteral() { } public void testDateRangeCast() { - LogicalPlan p = plan("SELECT some.string FROM test WHERE date > CAST('1969-05-13T12:34:56Z' AS DATE)"); + LogicalPlan p = plan("SELECT some.string FROM test WHERE date > CAST('1969-05-13T12:34:56Z' AS DATETIME)"); assertTrue(p instanceof Project); p = ((Project) p).child(); assertTrue(p instanceof Filter); @@ -480,7 +480,7 @@ public void testGroupByHistogram() { assertEquals("+2-0", h.interval().fold().toString()); Expression field = h.field(); assertEquals(FieldAttribute.class, field.getClass()); - assertEquals(DataType.DATE, field.dataType()); + assertEquals(DataType.DATETIME, field.dataType()); } public void testCountAndCountDistinctFolding() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java index a44ce44d0f904..ac744c3365a54 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java @@ -18,7 +18,7 @@ import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime; import static org.elasticsearch.xpack.sql.type.DataType.BOOLEAN; import static org.elasticsearch.xpack.sql.type.DataType.BYTE; -import static org.elasticsearch.xpack.sql.type.DataType.DATE; +import static org.elasticsearch.xpack.sql.type.DataType.DATETIME; import static org.elasticsearch.xpack.sql.type.DataType.DOUBLE; import static org.elasticsearch.xpack.sql.type.DataType.FLOAT; import static org.elasticsearch.xpack.sql.type.DataType.INTEGER; @@ -41,7 +41,7 @@ public void testConversionToString() { assertNull(conversion.convert(null)); assertEquals("10.0", conversion.convert(10.0)); - conversion = conversionFor(DATE, KEYWORD); + conversion = conversionFor(DATETIME, KEYWORD); assertNull(conversion.convert(null)); assertEquals("1970-01-01T00:00:00.000Z", conversion.convert(dateTime(0))); } @@ -80,8 +80,8 @@ public void testConversionToLong() { assertEquals("cannot cast [0xff] to [Long]", e.getMessage()); } - public void testConversionToDate() { - DataType to = DATE; + public void testConversionToDateTime() { + DataType to = DATETIME; { Conversion conversion = conversionFor(DOUBLE, to); assertNull(conversion.convert(null)); @@ -112,8 +112,8 @@ public void testConversionToDate() { // double check back and forth conversion ZonedDateTime dt = TestUtils.now(); - Conversion forward = conversionFor(DATE, KEYWORD); - Conversion back = conversionFor(KEYWORD, DATE); + Conversion forward = conversionFor(DATETIME, KEYWORD); + Conversion back = conversionFor(KEYWORD, DATETIME); assertEquals(dt, back.convert(forward.convert(dt))); Exception e = expectThrows(SqlIllegalArgumentException.class, () -> conversion.convert("0xff")); assertEquals("cannot cast [0xff] to [Date]:Invalid format: \"0xff\" is malformed at \"xff\"", e.getMessage()); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypesTests.java index ff6bf4611c827..7b38718dad794 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypesTests.java @@ -9,7 +9,7 @@ import java.util.EnumSet; -import static org.elasticsearch.xpack.sql.type.DataType.DATE; +import static org.elasticsearch.xpack.sql.type.DataType.DATETIME; import static org.elasticsearch.xpack.sql.type.DataType.FLOAT; import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_DAY; import static org.elasticsearch.xpack.sql.type.DataType.INTERVAL_DAY_TO_HOUR; @@ -37,32 +37,32 @@ public class DataTypesTests extends ESTestCase { public void testMetaDataType() { - assertEquals(Integer.valueOf(9), metaSqlDataType(DATE)); - DataType t = randomDataTypeNoDate(); + assertEquals(Integer.valueOf(9), metaSqlDataType(DATETIME)); + DataType t = randomDataTypeNoDateTime(); assertEquals(t.sqlType.getVendorTypeNumber(), metaSqlDataType(t)); } public void testMetaDateTypeSub() { - assertEquals(Integer.valueOf(3), metaSqlDateTimeSub(DATE)); - assertEquals(Integer.valueOf(0), metaSqlDateTimeSub(randomDataTypeNoDate())); + assertEquals(Integer.valueOf(3), metaSqlDateTimeSub(DATETIME)); + assertEquals(Integer.valueOf(0), metaSqlDateTimeSub(randomDataTypeNoDateTime())); } public void testMetaMinimumScale() { - assertEquals(Short.valueOf((short) 3), metaSqlMinimumScale(DATE)); + assertEquals(Short.valueOf((short) 3), metaSqlMinimumScale(DATETIME)); assertEquals(Short.valueOf((short) 0), metaSqlMinimumScale(LONG)); assertEquals(Short.valueOf((short) 0), metaSqlMinimumScale(FLOAT)); assertNull(metaSqlMinimumScale(KEYWORD)); } public void testMetaMaximumScale() { - assertEquals(Short.valueOf((short) 3), metaSqlMaximumScale(DATE)); + assertEquals(Short.valueOf((short) 3), metaSqlMaximumScale(DATETIME)); assertEquals(Short.valueOf((short) 0), metaSqlMaximumScale(LONG)); assertEquals(Short.valueOf((short) FLOAT.defaultPrecision), metaSqlMaximumScale(FLOAT)); assertNull(metaSqlMaximumScale(KEYWORD)); } public void testMetaRadix() { - assertNull(metaSqlRadix(DATE)); + assertNull(metaSqlRadix(DATETIME)); assertNull(metaSqlRadix(KEYWORD)); assertEquals(Integer.valueOf(10), metaSqlRadix(LONG)); assertEquals(Integer.valueOf(2), metaSqlRadix(FLOAT)); @@ -108,7 +108,7 @@ public void testIncompatibleInterval() throws Exception { assertNull(compatibleInterval(INTERVAL_MINUTE_TO_SECOND, INTERVAL_MONTH)); } - private DataType randomDataTypeNoDate() { - return randomValueOtherThan(DataType.DATE, () -> randomFrom(DataType.values())); + private DataType randomDataTypeNoDateTime() { + return randomValueOtherThan(DataType.DATETIME, () -> randomFrom(DataType.values())); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java index 8e02e82eb831f..fd7b88330d3c3 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java @@ -14,7 +14,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; -import static org.elasticsearch.xpack.sql.type.DataType.DATE; +import static org.elasticsearch.xpack.sql.type.DataType.DATETIME; import static org.elasticsearch.xpack.sql.type.DataType.INTEGER; import static org.elasticsearch.xpack.sql.type.DataType.KEYWORD; import static org.elasticsearch.xpack.sql.type.DataType.NESTED; @@ -81,7 +81,7 @@ public void testDateField() { assertThat(mapping.size(), is(1)); EsField field = mapping.get("date"); - assertThat(field.getDataType(), is(DATE)); + assertThat(field.getDataType(), is(DATETIME)); assertThat(field.isAggregatable(), is(true)); assertThat(field.getPrecision(), is(24)); @@ -95,7 +95,7 @@ public void testDateNoFormat() { assertThat(mapping.size(), is(1)); EsField field = mapping.get("date"); - assertThat(field.getDataType(), is(DATE)); + assertThat(field.getDataType(), is(DATETIME)); assertThat(field.isAggregatable(), is(true)); DateEsField dfield = (DateEsField) field; // default types @@ -107,7 +107,7 @@ public void testDateMulti() { assertThat(mapping.size(), is(1)); EsField field = mapping.get("date"); - assertThat(field.getDataType(), is(DATE)); + assertThat(field.getDataType(), is(DATETIME)); assertThat(field.isAggregatable(), is(true)); DateEsField dfield = (DateEsField) field; // default types @@ -175,7 +175,7 @@ public void testNestedDoc() { Map children = field.getProperties(); assertThat(children.size(), is(4)); assertThat(children.get("dep_name").getDataType(), is(TEXT)); - assertThat(children.get("start_date").getDataType(), is(DATE)); + assertThat(children.get("start_date").getDataType(), is(DATETIME)); } public void testGeoField() { @@ -208,4 +208,4 @@ public static Map loadMapping(String name, boolean ordered) { assertNotNull("Could not find mapping resource:" + name, stream); return Types.fromEs(XContentHelper.convertToMap(JsonXContent.jsonXContent, stream, ordered)); } -} \ No newline at end of file +} From 676e1b1a13588e2d13777242a6ceddecd2f2def4 Mon Sep 17 00:00:00 2001 From: Torgeir Thoresen Date: Thu, 17 Jan 2019 10:22:49 +0100 Subject: [PATCH 05/11] Fix erroneous docstrings for abstract bulk by scroll request (#37517) --- .../index/reindex/AbstractBulkByScrollRequest.java | 4 ++-- .../index/reindex/AbstractBulkByScrollRequestBuilder.java | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java index 4aa9bc5ce146c..265ef1cbf481a 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java @@ -185,14 +185,14 @@ public Self setSize(int size) { } /** - * Should version conflicts cause aborts? Defaults to false. + * Whether or not version conflicts cause the action to abort. */ public boolean isAbortOnVersionConflict() { return abortOnVersionConflict; } /** - * Should version conflicts cause aborts? Defaults to false. + * Set whether or not version conflicts cause the action to abort. */ public Self setAbortOnVersionConflict(boolean abortOnVersionConflict) { this.abortOnVersionConflict = abortOnVersionConflict; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java index 227814e24302e..a14ef850c5079 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java @@ -75,7 +75,7 @@ public Self size(int size) { } /** - * Should we version conflicts cause the action to abort? + * Set whether or not version conflicts cause the action to abort. */ public Self abortOnVersionConflict(boolean abortOnVersionConflict) { request.setAbortOnVersionConflict(abortOnVersionConflict); From da799306a8c471b02ab4bd8e6e0185933952a7a9 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 17 Jan 2019 11:51:17 +0100 Subject: [PATCH 06/11] Decreased time out in test Relates to #37378 --- .../action/admin/cluster/state/ClusterStateApiTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateApiTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateApiTests.java index fb823d3657e19..e061e7a08dd89 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateApiTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateApiTests.java @@ -62,7 +62,7 @@ public void testWaitForMetaDataVersion() throws Exception { // Verify that the timed out property has been set" metadataVersion = response.getState().getMetaData().version(); clusterStateRequest.waitForMetaDataVersion(metadataVersion + 1); - clusterStateRequest.waitForTimeout(TimeValue.timeValueSeconds(1)); // Fail fast + clusterStateRequest.waitForTimeout(TimeValue.timeValueMillis(500)); // Fail fast ActionFuture future3 = client().admin().cluster().state(clusterStateRequest); assertBusy(() -> { assertThat(future3.isDone(), is(true)); From 6fe2d6da0391a1da861fc55222c39693388a529b Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Thu, 17 Jan 2019 13:54:48 +0100 Subject: [PATCH 07/11] Mute TransportClientNodesServiceTests#testListenerFailures Relates to #37567 --- .../client/transport/TransportClientNodesServiceTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java b/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java index 208629c169a67..3100dcbcc66a3 100644 --- a/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java @@ -220,6 +220,7 @@ public void close() { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/37567") public void testListenerFailures() throws InterruptedException { int iters = iterations(10, 100); for (int i = 0; i Date: Thu, 17 Jan 2019 13:14:06 +0100 Subject: [PATCH 08/11] Moved ccr integration to the package with other ccr integration tests. --- .../org/elasticsearch/xpack/ccr/{action => }/FollowStatsIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/{action => }/FollowStatsIT.java (99%) diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowStatsIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/FollowStatsIT.java similarity index 99% rename from x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowStatsIT.java rename to x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/FollowStatsIT.java index bf6f080099088..409746f9d851b 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/FollowStatsIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/FollowStatsIT.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ccr.action; +package org.elasticsearch.xpack.ccr; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; From d9fa4e4adaf354d802523e5cf396cf2bfc7f40b4 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Thu, 17 Jan 2019 13:59:09 +0100 Subject: [PATCH 09/11] Fix testRelocateWhileContinuouslyIndexingAndWaitingForRefresh (#37560) This test failed because the refresh at the end of the test is not guaranteed to run before the indexing is completed, and therefore there's no guarantee that the refresh will free all operations. This triggers an assertion failure in the test clean-up, which asserts that there are no more pending operations. --- .../elasticsearch/recovery/RelocationIT.java | 20 +++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java index 45f0fce3b8143..fb455f37d76f3 100644 --- a/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java +++ b/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; @@ -552,7 +553,7 @@ public void testRelocateWhileWaitingForRefresh() { assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(20L)); } - public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() { + public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws Exception { logger.info("--> starting [node1] ..."); final String node1 = internalCluster().startNode(); @@ -570,9 +571,11 @@ public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() { logger.info("--> flush so we have an actual index"); client().admin().indices().prepareFlush().execute().actionGet(); logger.info("--> index more docs so we have something in the translog"); + final List> pendingIndexResponses = new ArrayList<>(); for (int i = 10; i < 20; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) - .setSource("field", "value" + i).execute(); + pendingIndexResponses.add(client().prepareIndex("test", "type", Integer.toString(i)) + .setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) + .setSource("field", "value" + i).execute()); } logger.info("--> start another node"); @@ -587,8 +590,9 @@ public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() { .execute(); logger.info("--> index 100 docs while relocating"); for (int i = 20; i < 120; i++) { - client().prepareIndex("test", "type", Integer.toString(i)).setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) - .setSource("field", "value" + i).execute(); + pendingIndexResponses.add(client().prepareIndex("test", "type", Integer.toString(i)) + .setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) + .setSource("field", "value" + i).execute()); } relocationListener.actionGet(); clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID) @@ -596,7 +600,11 @@ public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() { assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); logger.info("--> verifying count"); - client().admin().indices().prepareRefresh().execute().actionGet(); + assertBusy(() -> { + client().admin().indices().prepareRefresh().execute().actionGet(); + assertTrue(pendingIndexResponses.stream().allMatch(ActionFuture::isDone)); + }, 1, TimeUnit.MINUTES); + assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(120L)); } From b85bfd3e1793c71330bb6d2e186d3e6f6edfb74e Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 17 Jan 2019 14:04:41 +0100 Subject: [PATCH 10/11] Added fatal_exception field for ccr stats in monitoring mapping. (#37563) --- .../collector/ccr/FollowStatsMonitoringDocTests.java | 7 ++++++- .../plugin/core/src/main/resources/monitoring-es.json | 11 +++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java index 410d573e1b4c0..33affe45fc46c 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java @@ -230,7 +230,7 @@ public void testShardFollowNodeTaskStatusFieldsMapped() throws IOException { 10, fetchExceptions, 2, - null); + new ElasticsearchException("fatal error")); XContentBuilder builder = jsonBuilder(); builder.value(status); Map serializedStatus = XContentHelper.convertToMap(XContentType.JSON.xContent(), Strings.toString(builder), false); @@ -266,6 +266,11 @@ public void testShardFollowNodeTaskStatusFieldsMapped() throws IOException { assertThat(exceptionFieldMapping.size(), equalTo(2)); assertThat(XContentMapValues.extractValue("type.type", exceptionFieldMapping), equalTo("keyword")); assertThat(XContentMapValues.extractValue("reason.type", exceptionFieldMapping), equalTo("text")); + } else if (fieldName.equals("fatal_exception")) { + assertThat(fieldType, equalTo("object")); + assertThat(((Map) fieldMapping.get("properties")).size(), equalTo(2)); + assertThat(XContentMapValues.extractValue("properties.type.type", fieldMapping), equalTo("keyword")); + assertThat(XContentMapValues.extractValue("properties.reason.type", fieldMapping), equalTo("text")); } else { fail("unexpected field value type [" + fieldValue.getClass() + "] for field [" + fieldName + "]"); } diff --git a/x-pack/plugin/core/src/main/resources/monitoring-es.json b/x-pack/plugin/core/src/main/resources/monitoring-es.json index 872d3df43a81f..426262cd48c03 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-es.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-es.json @@ -1028,6 +1028,17 @@ }, "time_since_last_read_millis": { "type": "long" + }, + "fatal_exception": { + "type": "object", + "properties": { + "type" : { + "type": "keyword" + }, + "reason": { + "type": "text" + } + } } } }, From 4351a5e5375237b8259fbcbd3ff4d7c59cc2d215 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Thu, 17 Jan 2019 15:10:28 +0100 Subject: [PATCH 11/11] Allow field types to optimize phrase prefix queries (#37436) This change adds a way to customize how phrase prefix queries should be created on field types. The match phrase prefix query is exposed in field types in order to allow optimizations based on the options set on the field. For instance the text field uses the configured prefix field (if available) to build a span near that mixes the original field and the prefix field on the last position. This change also contains a small refactoring of the match/multi_match query that simplifies the interactions between the builders. Closes #31921 --- .../AnnotatedTextFieldMapper.java | 72 +-- .../lucene/search/MultiPhrasePrefixQuery.java | 12 +- .../SpanBooleanQueryRewriteWithMaxClause.java | 119 ++++ .../index/mapper/MappedFieldType.java | 16 +- .../index/mapper/TextFieldMapper.java | 191 ++++-- .../query/SpanMultiTermQueryBuilder.java | 155 ++--- .../index/search/MatchQuery.java | 548 +++++++++++------- .../index/search/MultiMatchQuery.java | 340 ++++------- .../CustomUnifiedHighlighterTests.java | 4 +- .../search/MultiPhrasePrefixQueryTests.java | 10 +- .../index/mapper/TextFieldMapperTests.java | 128 ++++ .../MatchPhrasePrefixQueryBuilderTests.java | 16 +- .../index/query/MatchQueryBuilderTests.java | 8 +- .../query/MultiMatchQueryBuilderTests.java | 12 +- .../query/QueryStringQueryBuilderTests.java | 7 +- .../query/SpanMultiTermQueryBuilderTests.java | 137 ++--- .../index/search/MultiMatchQueryTests.java | 22 +- 17 files changed, 1042 insertions(+), 755 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java index 79fefbc64d407..2aadfd2218590 100644 --- a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java +++ b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java @@ -27,17 +27,17 @@ import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; -import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; import org.apache.lucene.analysis.tokenattributes.TypeAttribute; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; -import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.NormsFieldExistsQuery; -import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.search.spans.SpanQuery; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -603,62 +603,26 @@ public Query existsQuery(QueryShardContext context) { } @Override - public Query phraseQuery(String field, TokenStream stream, int slop, boolean enablePosIncrements) throws IOException { - PhraseQuery.Builder builder = new PhraseQuery.Builder(); - builder.setSlop(slop); - - TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); - PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class); - int position = -1; - - stream.reset(); - while (stream.incrementToken()) { - if (enablePosIncrements) { - position += posIncrAtt.getPositionIncrement(); - } - else { - position += 1; - } - builder.add(new Term(field, termAtt.getBytesRef()), position); - } - - return builder.build(); + public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRewriteMethod method, QueryShardContext context) { + SpanMultiTermQueryWrapper spanMulti = + new SpanMultiTermQueryWrapper<>(new PrefixQuery(new Term(name(), indexedValueForSearch(value)))); + spanMulti.setRewriteMethod(method); + return spanMulti; } @Override - public Query multiPhraseQuery(String field, TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { - - MultiPhraseQuery.Builder mpqb = new MultiPhraseQuery.Builder(); - mpqb.setSlop(slop); - - TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); - - PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class); - int position = -1; - - List multiTerms = new ArrayList<>(); - stream.reset(); - while (stream.incrementToken()) { - int positionIncrement = posIncrAtt.getPositionIncrement(); + public Query phraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { + return TextFieldMapper.createPhraseQuery(stream, name(), slop, enablePositionIncrements); + } - if (positionIncrement > 0 && multiTerms.size() > 0) { - if (enablePositionIncrements) { - mpqb.add(multiTerms.toArray(new Term[0]), position); - } else { - mpqb.add(multiTerms.toArray(new Term[0])); - } - multiTerms.clear(); - } - position += positionIncrement; - multiTerms.add(new Term(field, termAtt.getBytesRef())); - } + @Override + public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { + return TextFieldMapper.createPhraseQuery(stream, name(), slop, enablePositionIncrements); + } - if (enablePositionIncrements) { - mpqb.add(multiTerms.toArray(new Term[0]), position); - } else { - mpqb.add(multiTerms.toArray(new Term[0])); - } - return mpqb.build(); + @Override + public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions) throws IOException { + return TextFieldMapper.createPhrasePrefixQuery(stream, name(), slop, maxExpansions); } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java index b8e1039b2df1d..57f60add714a1 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java @@ -39,16 +39,21 @@ import java.util.Iterator; import java.util.List; import java.util.ListIterator; +import java.util.Objects; public class MultiPhrasePrefixQuery extends Query { - private String field; + private final String field; private ArrayList termArrays = new ArrayList<>(); private ArrayList positions = new ArrayList<>(); private int maxExpansions = Integer.MAX_VALUE; private int slop = 0; + public MultiPhrasePrefixQuery(String field) { + this.field = Objects.requireNonNull(field); + } + /** * Sets the phrase slop for this query. * @@ -102,9 +107,6 @@ public void add(Term[] terms) { * @see org.apache.lucene.search.PhraseQuery.Builder#add(Term, int) */ public void add(Term[] terms, int position) { - if (termArrays.size() == 0) - field = terms[0].field(); - for (int i = 0; i < terms.length; i++) { if (terms[i].field() != field) { throw new IllegalArgumentException( @@ -212,7 +214,7 @@ private void getPrefixTerms(ObjectHashSet terms, final Term prefix, final @Override public final String toString(String f) { StringBuilder buffer = new StringBuilder(); - if (field == null || !field.equals(f)) { + if (field.equals(f) == false) { buffer.append(field); buffer.append(":"); } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java b/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java new file mode 100644 index 0000000000000..e78770ed2a85a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java @@ -0,0 +1,119 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.lucene.search; + +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexReaderContext; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.queries.SpanMatchNoDocsQuery; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.search.spans.SpanOrQuery; +import org.apache.lucene.search.spans.SpanQuery; +import org.apache.lucene.search.spans.SpanTermQuery; +import org.apache.lucene.util.BytesRef; + +import java.io.IOException; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; + +/** + * A span rewrite method that extracts the first maxExpansions terms + * that match the {@link MultiTermQuery} in the terms dictionary. + * The rewrite throws an error if more than maxExpansions terms are found and hardLimit + * is set. + */ +public class SpanBooleanQueryRewriteWithMaxClause extends SpanMultiTermQueryWrapper.SpanRewriteMethod { + private final int maxExpansions; + private final boolean hardLimit; + + public SpanBooleanQueryRewriteWithMaxClause() { + this(BooleanQuery.getMaxClauseCount(), true); + } + + public SpanBooleanQueryRewriteWithMaxClause(int maxExpansions, boolean hardLimit) { + this.maxExpansions = maxExpansions; + this.hardLimit = hardLimit; + } + + public int getMaxExpansions() { + return maxExpansions; + } + + public boolean isHardLimit() { + return hardLimit; + } + + @Override + public SpanQuery rewrite(IndexReader reader, MultiTermQuery query) throws IOException { + final MultiTermQuery.RewriteMethod delegate = new MultiTermQuery.RewriteMethod() { + @Override + public Query rewrite(IndexReader reader, MultiTermQuery query) throws IOException { + Collection queries = collectTerms(reader, query); + if (queries.size() == 0) { + return new SpanMatchNoDocsQuery(query.getField(), "no expansion found for " + query.toString()); + } else if (queries.size() == 1) { + return queries.iterator().next(); + } else { + return new SpanOrQuery(queries.toArray(new SpanQuery[0])); + } + } + + private Collection collectTerms(IndexReader reader, MultiTermQuery query) throws IOException { + Set queries = new HashSet<>(); + IndexReaderContext topReaderContext = reader.getContext(); + for (LeafReaderContext context : topReaderContext.leaves()) { + final Terms terms = context.reader().terms(query.getField()); + if (terms == null) { + // field does not exist + continue; + } + + final TermsEnum termsEnum = getTermsEnum(query, terms, null); + assert termsEnum != null; + + if (termsEnum == TermsEnum.EMPTY) + continue; + + BytesRef bytes; + while ((bytes = termsEnum.next()) != null) { + if (queries.size() >= maxExpansions) { + if (hardLimit) { + throw new RuntimeException("[" + query.toString() + " ] " + + "exceeds maxClauseCount [ Boolean maxClauseCount is set to " + BooleanQuery.getMaxClauseCount() + "]"); + } else { + return queries; + } + } + queries.add(new SpanTermQuery(new Term(query.getField(), bytes))); + } + } + return queries; + } + }; + return (SpanQuery) delegate.rewrite(reader, query); + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 741b2300a4678..f785e01125f69 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -35,6 +35,8 @@ import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.intervals.IntervalsSource; +import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; @@ -365,16 +367,26 @@ public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nu public abstract Query existsQuery(QueryShardContext context); - public Query phraseQuery(String field, TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { + public Query phraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { throw new IllegalArgumentException("Can only use phrase queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]"); } - public Query multiPhraseQuery(String field, TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { + public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { throw new IllegalArgumentException("Can only use phrase queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]"); } + public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions) throws IOException { + throw new IllegalArgumentException("Can only use phrase prefix queries on text fields - not on [" + name + + "] which is of type [" + typeName() + "]"); + } + + public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRewriteMethod method, QueryShardContext context) { + throw new IllegalArgumentException("Can only use span prefix queries on text fields - not on [" + name + + "] which is of type [" + typeName() + "]"); + } + /** * Create an {@link IntervalsSource} to be used for proximity queries */ diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 1b25c7b9866f7..e5fc470e130bc 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -40,14 +40,23 @@ import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.NormsFieldExistsQuery; import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.intervals.IntervalsSource; +import org.apache.lucene.search.spans.FieldMaskingSpanQuery; +import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.search.spans.SpanNearQuery; +import org.apache.lucene.search.spans.SpanOrQuery; +import org.apache.lucene.search.spans.SpanQuery; +import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.Version; import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -60,6 +69,7 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -598,6 +608,23 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, Quer return tq; } + @Override + public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRewriteMethod method, QueryShardContext context) { + failIfNotIndexed(); + if (prefixFieldType != null + && value.length() >= prefixFieldType.minChars + && value.length() <= prefixFieldType.maxChars + && prefixFieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0) { + + return new FieldMaskingSpanQuery(new SpanTermQuery(new Term(prefixFieldType.name(), indexedValueForSearch(value))), name()); + } else { + SpanMultiTermQueryWrapper spanMulti = + new SpanMultiTermQueryWrapper<>(new PrefixQuery(new Term(name(), indexedValueForSearch(value)))); + spanMulti.setRewriteMethod(method); + return spanMulti; + } + } + @Override public Query existsQuery(QueryShardContext context) { if (omitNorms()) { @@ -617,9 +644,9 @@ public IntervalsSource intervals(String text, int maxGaps, boolean ordered, Name } @Override - public Query phraseQuery(String field, TokenStream stream, int slop, boolean enablePosIncrements) throws IOException { - - if (indexPhrases && slop == 0 && hasGaps(cache(stream)) == false) { + public Query phraseQuery(TokenStream stream, int slop, boolean enablePosIncrements) throws IOException { + String field = name(); + if (indexPhrases && slop == 0 && hasGaps(stream) == false) { stream = new FixedShingleFilter(stream, 2); field = field + FAST_PHRASE_SUFFIX; } @@ -645,54 +672,85 @@ public Query phraseQuery(String field, TokenStream stream, int slop, boolean ena } @Override - public Query multiPhraseQuery(String field, TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { - - if (indexPhrases && slop == 0 && hasGaps(cache(stream)) == false) { + public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { + String field = name(); + if (indexPhrases && slop == 0 && hasGaps(stream) == false) { stream = new FixedShingleFilter(stream, 2); field = field + FAST_PHRASE_SUFFIX; } + return createPhraseQuery(stream, field, slop, enablePositionIncrements); + } - MultiPhraseQuery.Builder mpqb = new MultiPhraseQuery.Builder(); - mpqb.setSlop(slop); - - TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); + @Override + public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions) throws IOException { + return analyzePhrasePrefix(stream, slop, maxExpansions); + } - PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class); - int position = -1; + private Query analyzePhrasePrefix(TokenStream stream, int slop, int maxExpansions) throws IOException { + final MultiPhrasePrefixQuery query = createPhrasePrefixQuery(stream, name(), slop, maxExpansions); - List multiTerms = new ArrayList<>(); - stream.reset(); - while (stream.incrementToken()) { - int positionIncrement = posIncrAtt.getPositionIncrement(); + if (slop > 0 + || prefixFieldType == null + || prefixFieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) { + return query; + } - if (positionIncrement > 0 && multiTerms.size() > 0) { - if (enablePositionIncrements) { - mpqb.add(multiTerms.toArray(new Term[0]), position); - } else { - mpqb.add(multiTerms.toArray(new Term[0])); - } - multiTerms.clear(); + int lastPos = query.getTerms().length - 1; + final Term[][] terms = query.getTerms(); + final int[] positions = query.getPositions(); + for (Term term : terms[lastPos]) { + String value = term.text(); + if (value.length() < prefixFieldType.minChars || value.length() > prefixFieldType.maxChars) { + return query; } - position += positionIncrement; - multiTerms.add(new Term(field, termAtt.getBytesRef())); } - if (enablePositionIncrements) { - mpqb.add(multiTerms.toArray(new Term[0]), position); - } else { - mpqb.add(multiTerms.toArray(new Term[0])); + if (terms.length == 1) { + Term[] newTerms = Arrays.stream(terms[0]) + .map(term -> new Term(prefixFieldType.name(), term.bytes())) + .toArray(Term[]::new); + return new SynonymQuery(newTerms); } - return mpqb.build(); - } - private static CachingTokenFilter cache(TokenStream in) { - if (in instanceof CachingTokenFilter) { - return (CachingTokenFilter) in; + SpanNearQuery.Builder spanQuery = new SpanNearQuery.Builder(name(), true); + spanQuery.setSlop(slop); + int previousPos = -1; + for (int i = 0; i < terms.length; i++) { + Term[] posTerms = terms[i]; + int posInc = positions[i] - previousPos; + previousPos = positions[i]; + if (posInc > 1) { + spanQuery.addGap(posInc - 1); + } + if (i == lastPos) { + if (posTerms.length == 1) { + FieldMaskingSpanQuery fieldMask = + new FieldMaskingSpanQuery(new SpanTermQuery(new Term(prefixFieldType.name(), posTerms[0].bytes())), name()); + spanQuery.addClause(fieldMask); + } else { + SpanQuery[] queries = Arrays.stream(posTerms) + .map(term -> new FieldMaskingSpanQuery( + new SpanTermQuery(new Term(prefixFieldType.name(), term.bytes())), name()) + ) + .toArray(SpanQuery[]::new); + spanQuery.addClause(new SpanOrQuery(queries)); + } + } else { + if (posTerms.length == 1) { + spanQuery.addClause(new SpanTermQuery(posTerms[0])); + } else { + SpanTermQuery[] queries = Arrays.stream(posTerms) + .map(SpanTermQuery::new) + .toArray(SpanTermQuery[]::new); + spanQuery.addClause(new SpanOrQuery(queries)); + } + } } - return new CachingTokenFilter(in); + return spanQuery.build(); } - private static boolean hasGaps(CachingTokenFilter stream) throws IOException { + private static boolean hasGaps(TokenStream stream) throws IOException { + assert stream instanceof CachingTokenFilter; PositionIncrementAttribute posIncAtt = stream.getAttribute(PositionIncrementAttribute.class); stream.reset(); while (stream.incrementToken()) { @@ -870,4 +928,65 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, builder.field("index_phrases", fieldType().indexPhrases); } } + + public static Query createPhraseQuery(TokenStream stream, String field, int slop, boolean enablePositionIncrements) throws IOException { + MultiPhraseQuery.Builder mpqb = new MultiPhraseQuery.Builder(); + mpqb.setSlop(slop); + + TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); + + PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class); + int position = -1; + + List multiTerms = new ArrayList<>(); + stream.reset(); + while (stream.incrementToken()) { + int positionIncrement = posIncrAtt.getPositionIncrement(); + + if (positionIncrement > 0 && multiTerms.size() > 0) { + if (enablePositionIncrements) { + mpqb.add(multiTerms.toArray(new Term[0]), position); + } else { + mpqb.add(multiTerms.toArray(new Term[0])); + } + multiTerms.clear(); + } + position += positionIncrement; + multiTerms.add(new Term(field, termAtt.getBytesRef())); + } + + if (enablePositionIncrements) { + mpqb.add(multiTerms.toArray(new Term[0]), position); + } else { + mpqb.add(multiTerms.toArray(new Term[0])); + } + return mpqb.build(); + } + + public static MultiPhrasePrefixQuery createPhrasePrefixQuery(TokenStream stream, String field, + int slop, int maxExpansions) throws IOException { + MultiPhrasePrefixQuery builder = new MultiPhrasePrefixQuery(field); + builder.setSlop(slop); + builder.setMaxExpansions(maxExpansions); + + List currentTerms = new ArrayList<>(); + + TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); + PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class); + + stream.reset(); + int position = -1; + while (stream.incrementToken()) { + if (posIncrAtt.getPositionIncrement() != 0) { + if (currentTerms.isEmpty() == false) { + builder.add(currentTerms.toArray(new Term[0]), position); + } + position += posIncrAtt.getPositionIncrement(); + currentTerms.clear(); + } + currentTerms.add(new Term(field, termAtt.getBytesRef())); + } + builder.add(currentTerms.toArray(new Term[0]), position); + return builder; + } } diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java index 22fca7d1d0b8f..49e5e53e1ed91 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java @@ -18,31 +18,19 @@ */ package org.elasticsearch.index.query; -import org.apache.lucene.index.Term; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.TermStates; import org.apache.lucene.queries.SpanMatchNoDocsQuery; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiTermQuery; -import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.ScoringRewrite; import org.apache.lucene.search.TopTermsRewrite; -import org.apache.lucene.search.spans.SpanBoostQuery; import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.SpanBooleanQueryRewriteWithMaxClause; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -50,8 +38,6 @@ import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import java.util.Objects; /** @@ -138,126 +124,53 @@ public static SpanMultiTermQueryBuilder fromXContent(XContentParser parser) thro return new SpanMultiTermQueryBuilder(subQuery).queryName(queryName).boost(boost); } - static class TopTermSpanBooleanQueryRewriteWithMaxClause extends SpanMultiTermQueryWrapper.SpanRewriteMethod { - private final long maxExpansions; - - TopTermSpanBooleanQueryRewriteWithMaxClause() { - this.maxExpansions = BooleanQuery.getMaxClauseCount(); - } - - @Override - public SpanQuery rewrite(IndexReader reader, MultiTermQuery query) throws IOException { - final MultiTermQuery.RewriteMethod delegate = new ScoringRewrite>() { - @Override - protected List getTopLevelBuilder() { - return new ArrayList(); - } - - @Override - protected Query build(List builder) { - return new SpanOrQuery((SpanQuery[]) builder.toArray(new SpanQuery[builder.size()])); - } - - @Override - protected void checkMaxClauseCount(int count) { - if (count > maxExpansions) { - throw new RuntimeException("[" + query.toString() + " ] " + - "exceeds maxClauseCount [ Boolean maxClauseCount is set to " + BooleanQuery.getMaxClauseCount() + "]"); - } - } - - @Override - protected void addClause(List topLevel, Term term, int docCount, float boost, TermStates states) { - SpanTermQuery q = new SpanTermQuery(term, states); - topLevel.add(q); - } - }; - return (SpanQuery) delegate.rewrite(reader, query); - } - } - @Override protected Query doToQuery(QueryShardContext context) throws IOException { - Query subQuery = multiTermQueryBuilder.toQuery(context); - float boost = AbstractQueryBuilder.DEFAULT_BOOST; - while (true) { - if (subQuery instanceof ConstantScoreQuery) { - subQuery = ((ConstantScoreQuery) subQuery).getQuery(); - boost = 1; - } else if (subQuery instanceof BoostQuery) { - BoostQuery boostQuery = (BoostQuery) subQuery; - subQuery = boostQuery.getQuery(); - boost *= boostQuery.getBoost(); - } else { - break; - } - } - // no MultiTermQuery extends SpanQuery, so SpanBoostQuery is not supported here - assert subQuery instanceof SpanBoostQuery == false; - - if (subQuery instanceof MatchNoDocsQuery) { - return new SpanMatchNoDocsQuery(multiTermQueryBuilder.fieldName(), subQuery.toString()); - } - - final SpanQuery spanQuery; - if (subQuery instanceof TermQuery) { - /** - * Text fields that index prefixes can rewrite prefix queries - * into term queries. See {@link TextFieldMapper.TextFieldType#prefixQuery}. - */ - if (multiTermQueryBuilder.getClass() != PrefixQueryBuilder.class) { - throw new UnsupportedOperationException("unsupported inner query generated by " + - multiTermQueryBuilder.getClass().getName() + ", should be " + MultiTermQuery.class.getName() - + " but was " + subQuery.getClass().getName()); - } - + if (multiTermQueryBuilder instanceof PrefixQueryBuilder) { PrefixQueryBuilder prefixBuilder = (PrefixQueryBuilder) multiTermQueryBuilder; - MappedFieldType fieldType = context.fieldMapper(prefixBuilder.fieldName()); - String fieldName = fieldType != null ? fieldType.name() : prefixBuilder.fieldName(); - - if (context.getIndexSettings().getIndexVersionCreated().before(Version.V_6_4_0)) { - /** - * Indices created in this version do not index positions on the prefix field - * so we cannot use it to match positional queries. Instead, we explicitly create the prefix - * query on the main field to avoid the rewrite. - */ - PrefixQuery prefixQuery = new PrefixQuery(new Term(fieldName, prefixBuilder.value())); - if (prefixBuilder.rewrite() != null) { - MultiTermQuery.RewriteMethod rewriteMethod = - QueryParsers.parseRewriteMethod(prefixBuilder.rewrite(), null, LoggingDeprecationHandler.INSTANCE); - prefixQuery.setRewriteMethod(rewriteMethod); + MappedFieldType fieldType = context.fieldMapper(multiTermQueryBuilder.fieldName()); + if (fieldType == null) { + return new SpanMatchNoDocsQuery(multiTermQueryBuilder.fieldName(), "unknown field"); + } + final SpanMultiTermQueryWrapper.SpanRewriteMethod spanRewriteMethod; + if (prefixBuilder.rewrite() != null) { + MultiTermQuery.RewriteMethod rewriteMethod = + QueryParsers.parseRewriteMethod(prefixBuilder.rewrite(), null, LoggingDeprecationHandler.INSTANCE); + if (rewriteMethod instanceof TopTermsRewrite) { + TopTermsRewrite innerRewrite = (TopTermsRewrite) rewriteMethod; + spanRewriteMethod = new SpanMultiTermQueryWrapper.TopTermsSpanBooleanQueryRewrite(innerRewrite.getSize()); + } else { + spanRewriteMethod = new SpanBooleanQueryRewriteWithMaxClause(); } - subQuery = prefixQuery; - spanQuery = new SpanMultiTermQueryWrapper<>(prefixQuery); } else { - /** - * Prefixes are indexed in a different field so we mask the term query with the original field - * name. This is required because span_near and span_or queries don't work across different field. - * The masking is safe because the prefix field is indexed using the same content than the original field - * and the prefix analyzer preserves positions. - */ - SpanTermQuery spanTermQuery = new SpanTermQuery(((TermQuery) subQuery).getTerm()); - spanQuery = new FieldMaskingSpanQuery(spanTermQuery, fieldName); + spanRewriteMethod = new SpanBooleanQueryRewriteWithMaxClause(); } + return fieldType.spanPrefixQuery(prefixBuilder.value(), spanRewriteMethod, context); } else { - if (subQuery instanceof MultiTermQuery == false) { + Query subQuery = multiTermQueryBuilder.toQuery(context); + while (true) { + if (subQuery instanceof ConstantScoreQuery) { + subQuery = ((ConstantScoreQuery) subQuery).getQuery(); + } else if (subQuery instanceof BoostQuery) { + BoostQuery boostQuery = (BoostQuery) subQuery; + subQuery = boostQuery.getQuery(); + } else { + break; + } + } + if (subQuery instanceof MatchNoDocsQuery) { + return new SpanMatchNoDocsQuery(multiTermQueryBuilder.fieldName(), subQuery.toString()); + } else if (subQuery instanceof MultiTermQuery == false) { throw new UnsupportedOperationException("unsupported inner query, should be " + MultiTermQuery.class.getName() + " but was " + subQuery.getClass().getName()); } - spanQuery = new SpanMultiTermQueryWrapper<>((MultiTermQuery) subQuery); - } - if (subQuery instanceof MultiTermQuery) { MultiTermQuery multiTermQuery = (MultiTermQuery) subQuery; - SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) spanQuery; + SpanMultiTermQueryWrapper wrapper = new SpanMultiTermQueryWrapper<>(multiTermQuery); if (multiTermQuery.getRewriteMethod() instanceof TopTermsRewrite == false) { - wrapper.setRewriteMethod(new TopTermSpanBooleanQueryRewriteWithMaxClause()); + wrapper.setRewriteMethod(new SpanBooleanQueryRewriteWithMaxClause()); } + return wrapper; } - if (boost != AbstractQueryBuilder.DEFAULT_BOOST) { - return new SpanBoostQuery(spanQuery, boost); - } - - return spanQuery; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/search/MatchQuery.java b/server/src/main/java/org/elasticsearch/index/search/MatchQuery.java index 267f3a6951161..ad4b267eef643 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MatchQuery.java +++ b/server/src/main/java/org/elasticsearch/index/search/MatchQuery.java @@ -20,43 +20,46 @@ package org.elasticsearch.index.search; import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.CachingTokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.DisableGraphAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; +import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.FuzzyQuery; -import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.MultiTermQuery; -import org.apache.lucene.search.PhraseQuery; -import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; import org.apache.lucene.search.spans.SpanNearQuery; import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.search.spans.SpanTermQuery; -import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.QueryBuilder; +import org.apache.lucene.util.graph.GraphTokenStreamFiniteStrings; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.lucene.search.SpanBooleanQueryRewriteWithMaxClause; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.function.Supplier; import static org.elasticsearch.common.lucene.search.Queries.newLenientFieldQuery; import static org.elasticsearch.common.lucene.search.Queries.newUnmappedFieldQuery; @@ -128,19 +131,10 @@ public void writeTo(StreamOutput out) throws IOException { } } - /** - * the default phrase slop - */ public static final int DEFAULT_PHRASE_SLOP = 0; - /** - * the default leniency setting - */ public static final boolean DEFAULT_LENIENCY = false; - /** - * the default zero terms query - */ public static final ZeroTermsQuery DEFAULT_ZERO_TERMS_QUERY = ZeroTermsQuery.NONE; protected final QueryShardContext context; @@ -159,6 +153,9 @@ public void writeTo(StreamOutput out) throws IOException { protected int maxExpansions = FuzzyQuery.defaultMaxExpansions; + protected SpanMultiTermQueryWrapper.SpanRewriteMethod spanRewriteMethod = + new SpanBooleanQueryRewriteWithMaxClause(FuzzyQuery.defaultMaxExpansions, false); + protected boolean transpositions = FuzzyQuery.defaultTranspositions; protected MultiTermQuery.RewriteMethod fuzzyRewriteMethod; @@ -212,6 +209,7 @@ public void setFuzzyPrefixLength(int fuzzyPrefixLength) { public void setMaxExpansions(int maxExpansions) { this.maxExpansions = maxExpansions; + this.spanRewriteMethod = new SpanBooleanQueryRewriteWithMaxClause(maxExpansions, false); } public void setTranspositions(boolean transpositions) { @@ -234,78 +232,83 @@ public void setAutoGenerateSynonymsPhraseQuery(boolean enabled) { this.autoGenerateSynonymsPhraseQuery = enabled; } - protected Analyzer getAnalyzer(MappedFieldType fieldType, boolean quoted) { - if (analyzer == null) { - return quoted ? context.getSearchQuoteAnalyzer(fieldType) : context.getSearchAnalyzer(fieldType); - } else { - return analyzer; - } - } - - private boolean hasPositions(MappedFieldType fieldType) { - return fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; - } - public Query parse(Type type, String fieldName, Object value) throws IOException { - MappedFieldType fieldType = context.fieldMapper(fieldName); + final MappedFieldType fieldType = context.fieldMapper(fieldName); if (fieldType == null) { return newUnmappedFieldQuery(fieldName); } - final String field = fieldType.name(); - - Analyzer analyzer = getAnalyzer(fieldType, type == Type.PHRASE); + Analyzer analyzer = getAnalyzer(fieldType, type == Type.PHRASE || type == Type.PHRASE_PREFIX); assert analyzer != null; + MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, fieldType); + /* * If a keyword analyzer is used, we know that further analysis isn't * needed and can immediately return a term query. */ - if (analyzer == Lucene.KEYWORD_ANALYZER) { - return blendTermQuery(new Term(fieldName, value.toString()), fieldType); + if (analyzer == Lucene.KEYWORD_ANALYZER + && type != Type.PHRASE_PREFIX) { + return builder.newTermQuery(new Term(fieldName, value.toString())); } - MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, fieldType); - builder.setEnablePositionIncrements(this.enablePositionIncrements); - if (hasPositions(fieldType)) { - builder.setAutoGenerateMultiTermSynonymsPhraseQuery(this.autoGenerateSynonymsPhraseQuery); - } else { - builder.setAutoGenerateMultiTermSynonymsPhraseQuery(false); - } + return parseInternal(type, fieldName, builder, value); + } - Query query = null; + protected final Query parseInternal(Type type, String fieldName, MatchQueryBuilder builder, Object value) throws IOException { + final Query query; switch (type) { case BOOLEAN: if (commonTermsCutoff == null) { - query = builder.createBooleanQuery(field, value.toString(), occur); + query = builder.createBooleanQuery(fieldName, value.toString(), occur); } else { - query = builder.createCommonTermsQuery(field, value.toString(), occur, occur, commonTermsCutoff); + query = createCommonTermsQuery(builder, fieldName, value.toString(), occur, occur, commonTermsCutoff); } break; + case PHRASE: - query = builder.createPhraseQuery(field, value.toString(), phraseSlop); + query = builder.createPhraseQuery(fieldName, value.toString(), phraseSlop); break; + case PHRASE_PREFIX: - query = builder.createPhrasePrefixQuery(field, value.toString(), phraseSlop, maxExpansions); + query = builder.createPhrasePrefixQuery(fieldName, value.toString(), phraseSlop); break; + default: throw new IllegalStateException("No type found for [" + type + "]"); } - if (query == null) { - return zeroTermsQuery(); - } else { - return query; + return query == null ? zeroTermsQuery() : query; + } + + private Query createCommonTermsQuery(MatchQueryBuilder builder, String field, String queryText, + Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency) { + Query booleanQuery = builder.createBooleanQuery(field, queryText, lowFreqOccur); + if (booleanQuery != null && booleanQuery instanceof BooleanQuery) { + BooleanQuery bq = (BooleanQuery) booleanQuery; + return boolToExtendedCommonTermsQuery(bq, highFreqOccur, lowFreqOccur, maxTermFrequency); } + return booleanQuery; } - protected final Query termQuery(MappedFieldType fieldType, BytesRef value, boolean lenient) { - try { - return fieldType.termQuery(value, context); - } catch (RuntimeException e) { - if (lenient) { - return newLenientFieldQuery(fieldType.name(), e); + private Query boolToExtendedCommonTermsQuery(BooleanQuery bq, + Occur highFreqOccur, + Occur lowFreqOccur, + float maxTermFrequency) { + ExtendedCommonTermsQuery query = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency); + for (BooleanClause clause : bq.clauses()) { + if ((clause.getQuery() instanceof TermQuery) == false) { + return bq; } - throw e; + query.add(((TermQuery) clause.getQuery()).getTerm()); + } + return query; + } + + protected Analyzer getAnalyzer(MappedFieldType fieldType, boolean quoted) { + if (analyzer == null) { + return quoted ? context.getSearchQuoteAnalyzer(fieldType) : context.getSearchAnalyzer(fieldType); + } else { + return analyzer; } } @@ -322,216 +325,345 @@ protected Query zeroTermsQuery() { } } - private class MatchQueryBuilder extends QueryBuilder { + private boolean hasPositions(MappedFieldType fieldType) { + return fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; + } - private final MappedFieldType mapper; + class MatchQueryBuilder extends QueryBuilder { + private final MappedFieldType fieldType; /** * Creates a new QueryBuilder using the given analyzer. */ - MatchQueryBuilder(Analyzer analyzer, MappedFieldType mapper) { + MatchQueryBuilder(Analyzer analyzer, MappedFieldType fieldType) { super(analyzer); - this.mapper = mapper; + this.fieldType = fieldType; + if (hasPositions(fieldType)) { + setAutoGenerateMultiTermSynonymsPhraseQuery(autoGenerateSynonymsPhraseQuery); + } else { + setAutoGenerateMultiTermSynonymsPhraseQuery(false); + } + setEnablePositionIncrements(enablePositionIncrements); } @Override - protected Query newTermQuery(Term term) { - return blendTermQuery(term, mapper); + protected Query createFieldQuery(Analyzer analyzer, BooleanClause.Occur operator, String field, + String queryText, boolean quoted, int slop) { + assert operator == BooleanClause.Occur.SHOULD || operator == BooleanClause.Occur.MUST; + Type type = quoted ? Type.PHRASE : Type.BOOLEAN; + return createQuery(field, queryText, type, operator, slop); } - @Override - protected Query newSynonymQuery(Term[] terms) { - return blendTermsQuery(terms, mapper); + public Query createPhrasePrefixQuery(String field, String queryText, int slop) { + return createQuery(field, queryText, Type.PHRASE_PREFIX, occur, slop); } - @Override - protected Query analyzePhrase(String field, TokenStream stream, int slop) throws IOException { - try { - checkForPositions(field); - Query query = mapper.phraseQuery(field, stream, slop, enablePositionIncrements); - if (query instanceof PhraseQuery) { - // synonyms that expand to multiple terms can return a phrase query. - return blendPhraseQuery((PhraseQuery) query, mapper); - } - return query; - } catch (IllegalArgumentException | IllegalStateException e) { - if (lenient) { - return newLenientFieldQuery(field, e); + private Query createFieldQuery(TokenStream source, Type type, BooleanClause.Occur operator, String field, int phraseSlop) { + assert operator == BooleanClause.Occur.SHOULD || operator == BooleanClause.Occur.MUST; + + // Build an appropriate query based on the analysis chain. + try (CachingTokenFilter stream = new CachingTokenFilter(source)) { + + TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); + PositionIncrementAttribute posIncAtt = stream.addAttribute(PositionIncrementAttribute.class); + PositionLengthAttribute posLenAtt = stream.addAttribute(PositionLengthAttribute.class); + + if (termAtt == null) { + return null; } - throw e; - } - } - @Override - protected Query analyzeMultiPhrase(String field, TokenStream stream, int slop) throws IOException { - try { - checkForPositions(field); - return mapper.multiPhraseQuery(field, stream, slop, enablePositionIncrements); - } catch (IllegalArgumentException | IllegalStateException e) { - if (lenient) { - return newLenientFieldQuery(field, e); + // phase 1: read through the stream and assess the situation: + // counting the number of tokens/positions and marking if we have any synonyms. + + int numTokens = 0; + int positionCount = 0; + boolean hasSynonyms = false; + boolean isGraph = false; + + stream.reset(); + while (stream.incrementToken()) { + numTokens++; + int positionIncrement = posIncAtt.getPositionIncrement(); + if (positionIncrement != 0) { + positionCount += positionIncrement; + } else { + hasSynonyms = true; + } + + int positionLength = posLenAtt.getPositionLength(); + if (enableGraphQueries && positionLength > 1) { + isGraph = true; + } } - throw e; - } - } - private void checkForPositions(String field) { - if (hasPositions(mapper) == false) { - throw new IllegalStateException("field:[" + field + "] was indexed without position data; cannot run PhraseQuery"); + // phase 2: based on token count, presence of synonyms, and options + // formulate a single term, boolean, or phrase. + if (numTokens == 0) { + return null; + } else if (numTokens == 1) { + // single term + if (type == Type.PHRASE_PREFIX) { + return analyzePhrasePrefix(field, stream, phraseSlop, positionCount); + } else { + return analyzeTerm(field, stream); + } + } else if (isGraph) { + // graph + if (type == Type.PHRASE || type == Type.PHRASE_PREFIX) { + return analyzeGraphPhrase(stream, field, type, phraseSlop); + } else { + return analyzeGraphBoolean(field, stream, operator); + } + } else if (type == Type.PHRASE && positionCount > 1) { + // phrase + if (hasSynonyms) { + // complex phrase with synonyms + return analyzeMultiPhrase(field, stream, phraseSlop); + } else { + // simple phrase + return analyzePhrase(field, stream, phraseSlop); + } + } else if (type == Type.PHRASE_PREFIX) { + // phrase prefix + return analyzePhrasePrefix(field, stream, phraseSlop, positionCount); + } else { + // boolean + if (positionCount == 1) { + // only one position, with synonyms + return analyzeBoolean(field, stream); + } else { + // complex case: multiple positions + return analyzeMultiBoolean(field, stream, operator); + } + } + } catch (IOException e) { + throw new RuntimeException("Error analyzing query text", e); } } - /** - * Checks if graph analysis should be enabled for the field depending - * on the provided {@link Analyzer} - */ - @Override - protected Query createFieldQuery(Analyzer analyzer, BooleanClause.Occur operator, String field, - String queryText, boolean quoted, int phraseSlop) { - assert operator == BooleanClause.Occur.SHOULD || operator == BooleanClause.Occur.MUST; - + private Query createQuery(String field, String queryText, Type type, BooleanClause.Occur operator, int phraseSlop) { // Use the analyzer to get all the tokens, and then build an appropriate // query based on the analysis chain. try (TokenStream source = analyzer.tokenStream(field, queryText)) { if (source.hasAttribute(DisableGraphAttribute.class)) { /* - A {@link TokenFilter} in this {@link TokenStream} disabled the graph analysis to avoid - paths explosion. See {@link org.elasticsearch.index.analysis.ShingleTokenFilterFactory} for details. + * A {@link TokenFilter} in this {@link TokenStream} disabled the graph analysis to avoid + * paths explosion. See {@link org.elasticsearch.index.analysis.ShingleTokenFilterFactory} for details. */ setEnableGraphQueries(false); } - Query query = super.createFieldQuery(source, operator, field, quoted, phraseSlop); - setEnableGraphQueries(true); - return query; + try { + return createFieldQuery(source, type, operator, field, phraseSlop); + } finally { + setEnableGraphQueries(true); + } } catch (IOException e) { throw new RuntimeException("Error analyzing query text", e); } } - public Query createPhrasePrefixQuery(String field, String queryText, int phraseSlop, int maxExpansions) { - final Query query = createFieldQuery(getAnalyzer(), Occur.MUST, field, queryText, true, phraseSlop); - return toMultiPhrasePrefix(query, phraseSlop, maxExpansions); + private SpanQuery newSpanQuery(Term[] terms, boolean prefix) { + if (terms.length == 1) { + return prefix ? fieldType.spanPrefixQuery(terms[0].text(), spanRewriteMethod, context) : new SpanTermQuery(terms[0]); + } + SpanQuery[] spanQueries = new SpanQuery[terms.length]; + for (int i = 0; i < terms.length; i++) { + spanQueries[i] = prefix ? new SpanTermQuery(terms[i]) : + fieldType.spanPrefixQuery(terms[i].text(), spanRewriteMethod, context); + } + return new SpanOrQuery(spanQueries); } - private Query toMultiPhrasePrefix(final Query query, int phraseSlop, int maxExpansions) { - float boost = 1; - Query innerQuery = query; - while (innerQuery instanceof BoostQuery) { - BoostQuery bq = (BoostQuery) innerQuery; - boost *= bq.getBoost(); - innerQuery = bq.getQuery(); - } - if (query instanceof SpanQuery) { - return toSpanQueryPrefix((SpanQuery) query, boost); + @Override + protected SpanQuery createSpanQuery(TokenStream in, String field) throws IOException { + return createSpanQuery(in, field, false); + } + + private SpanQuery createSpanQuery(TokenStream in, String field, boolean prefix) throws IOException { + TermToBytesRefAttribute termAtt = in.getAttribute(TermToBytesRefAttribute.class); + PositionIncrementAttribute posIncAtt = in.getAttribute(PositionIncrementAttribute.class); + if (termAtt == null) { + return null; } - final MultiPhrasePrefixQuery prefixQuery = new MultiPhrasePrefixQuery(); - prefixQuery.setMaxExpansions(maxExpansions); - prefixQuery.setSlop(phraseSlop); - if (innerQuery instanceof PhraseQuery) { - PhraseQuery pq = (PhraseQuery) innerQuery; - Term[] terms = pq.getTerms(); - int[] positions = pq.getPositions(); - for (int i = 0; i < terms.length; i++) { - prefixQuery.add(new Term[]{terms[i]}, positions[i]); + + SpanNearQuery.Builder builder = new SpanNearQuery.Builder(field, true); + Term lastTerm = null; + while (in.incrementToken()) { + if (posIncAtt.getPositionIncrement() > 1) { + builder.addGap(posIncAtt.getPositionIncrement()-1); } - return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost); - } else if (innerQuery instanceof MultiPhraseQuery) { - MultiPhraseQuery pq = (MultiPhraseQuery) innerQuery; - Term[][] terms = pq.getTermArrays(); - int[] positions = pq.getPositions(); - for (int i = 0; i < terms.length; i++) { - prefixQuery.add(terms[i], positions[i]); + if (lastTerm != null) { + builder.addClause(new SpanTermQuery(lastTerm)); } - return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost); - } else if (innerQuery instanceof TermQuery) { - prefixQuery.add(((TermQuery) innerQuery).getTerm()); - return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost); + lastTerm = new Term(field, termAtt.getBytesRef()); + } + if (lastTerm != null) { + SpanQuery spanQuery = prefix ? + fieldType.spanPrefixQuery(lastTerm.text(), spanRewriteMethod, context) : new SpanTermQuery(lastTerm); + builder.addClause(spanQuery); + } + SpanNearQuery query = builder.build(); + SpanQuery[] clauses = query.getClauses(); + if (clauses.length == 1) { + return clauses[0]; + } else { + return query; } - return query; } - private Query toSpanQueryPrefix(SpanQuery query, float boost) { - if (query instanceof SpanTermQuery) { - SpanMultiTermQueryWrapper ret = - new SpanMultiTermQueryWrapper<>(new PrefixQuery(((SpanTermQuery) query).getTerm())); - return boost == 1 ? ret : new BoostQuery(ret, boost); - } else if (query instanceof SpanNearQuery) { - SpanNearQuery spanNearQuery = (SpanNearQuery) query; - SpanQuery[] clauses = spanNearQuery.getClauses(); - if (clauses[clauses.length - 1] instanceof SpanTermQuery) { - clauses[clauses.length - 1] = new SpanMultiTermQueryWrapper<>( - new PrefixQuery(((SpanTermQuery) clauses[clauses.length - 1]).getTerm()) - ); - } - SpanNearQuery newQuery = new SpanNearQuery(clauses, spanNearQuery.getSlop(), spanNearQuery.isInOrder()); - return boost == 1 ? newQuery : new BoostQuery(newQuery, boost); - } else if (query instanceof SpanOrQuery) { - SpanOrQuery orQuery = (SpanOrQuery) query; - SpanQuery[] clauses = new SpanQuery[orQuery.getClauses().length]; - for (int i = 0; i < clauses.length; i++) { - clauses[i] = (SpanQuery) toSpanQueryPrefix(orQuery.getClauses()[i], 1); - } - return boost == 1 ? new SpanOrQuery(clauses) : new BoostQuery(new SpanOrQuery(clauses), boost); + @Override + protected Query newTermQuery(Term term) { + Supplier querySupplier; + if (fuzziness != null) { + querySupplier = () -> { + Query query = fieldType.fuzzyQuery(term.text(), fuzziness, fuzzyPrefixLength, maxExpansions, transpositions); + if (query instanceof FuzzyQuery) { + QueryParsers.setRewriteMethod((FuzzyQuery) query, fuzzyRewriteMethod); + } + return query; + }; } else { + querySupplier = () -> fieldType.termQuery(term.bytes(), context); + } + try { + Query query = querySupplier.get(); return query; + } catch (RuntimeException e) { + if (lenient) { + return newLenientFieldQuery(fieldType.name(), e); + } else { + throw e; + } } } - public Query createCommonTermsQuery(String field, String queryText, - Occur highFreqOccur, - Occur lowFreqOccur, - float maxTermFrequency) { - Query booleanQuery = createBooleanQuery(field, queryText, lowFreqOccur); - if (booleanQuery != null && booleanQuery instanceof BooleanQuery) { - BooleanQuery bq = (BooleanQuery) booleanQuery; - return boolToExtendedCommonTermsQuery(bq, highFreqOccur, lowFreqOccur, maxTermFrequency); + @Override + protected Query analyzePhrase(String field, TokenStream stream, int slop) throws IOException { + try { + checkForPositions(field); + return fieldType.phraseQuery(stream, slop, enablePositionIncrements); + } catch (IllegalArgumentException | IllegalStateException e) { + if (lenient) { + return newLenientFieldQuery(field, e); + } + throw e; } - return booleanQuery; } - private Query boolToExtendedCommonTermsQuery(BooleanQuery bq, - Occur highFreqOccur, - Occur lowFreqOccur, - float maxTermFrequency) { - ExtendedCommonTermsQuery query = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency); - for (BooleanClause clause : bq.clauses()) { - if (!(clause.getQuery() instanceof TermQuery)) { - return bq; + @Override + protected Query analyzeMultiPhrase(String field, TokenStream stream, int slop) throws IOException { + try { + checkForPositions(field); + return fieldType.multiPhraseQuery(stream, slop, enablePositionIncrements); + } catch (IllegalArgumentException | IllegalStateException e) { + if (lenient) { + return newLenientFieldQuery(field, e); } - query.add(((TermQuery) clause.getQuery()).getTerm()); + throw e; } - return query; } - } - - /** - * Called when a phrase query is built with {@link QueryBuilder#analyzePhrase(String, TokenStream, int)}. - * Subclass can override this function to blend this query to multiple fields. - */ - protected Query blendPhraseQuery(PhraseQuery query, MappedFieldType fieldType) { - return query; - } - - protected Query blendTermsQuery(Term[] terms, MappedFieldType fieldType) { - return new SynonymQuery(terms); - } - protected Query blendTermQuery(Term term, MappedFieldType fieldType) { - if (fuzziness != null) { + private Query analyzePhrasePrefix(String field, TokenStream stream, int slop, int positionCount) throws IOException { try { - Query query = fieldType.fuzzyQuery(term.text(), fuzziness, fuzzyPrefixLength, maxExpansions, transpositions); - if (query instanceof FuzzyQuery) { - QueryParsers.setRewriteMethod((FuzzyQuery) query, fuzzyRewriteMethod); + if (positionCount > 1) { + checkForPositions(field); } - return query; - } catch (RuntimeException e) { + return fieldType.phrasePrefixQuery(stream, slop, maxExpansions); + } catch (IllegalArgumentException | IllegalStateException e) { if (lenient) { - return newLenientFieldQuery(fieldType.name(), e); + return newLenientFieldQuery(field, e); + } + throw e; + } + } + + private Query analyzeGraphPhrase(TokenStream source, String field, Type type, int slop) throws IOException { + assert type == Type.PHRASE_PREFIX || type == Type.PHRASE; + + source.reset(); + GraphTokenStreamFiniteStrings graph = new GraphTokenStreamFiniteStrings(source); + if (phraseSlop > 0) { + /* + * Creates a boolean query from the graph token stream by extracting all the finite strings from the graph + * and using them to create phrase queries with the appropriate slop. + */ + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + Iterator it = graph.getFiniteStrings(); + while (it.hasNext()) { + Query query = createFieldQuery(it.next(), type, BooleanClause.Occur.MUST, field, slop); + if (query != null) { + builder.add(query, BooleanClause.Occur.SHOULD); + } + } + return builder.build(); + } + + /* + * Creates a span near (phrase) query from a graph token stream. + * The articulation points of the graph are visited in order and the queries + * created at each point are merged in the returned near query. + */ + List clauses = new ArrayList<>(); + int[] articulationPoints = graph.articulationPoints(); + int lastState = 0; + int maxClauseCount = BooleanQuery.getMaxClauseCount(); + for (int i = 0; i <= articulationPoints.length; i++) { + int start = lastState; + int end = -1; + if (i < articulationPoints.length) { + end = articulationPoints[i]; + } + lastState = end; + final SpanQuery queryPos; + boolean endPrefix = end == -1 && type == Type.PHRASE_PREFIX; + if (graph.hasSidePath(start)) { + List queries = new ArrayList<>(); + Iterator it = graph.getFiniteStrings(start, end); + while (it.hasNext()) { + TokenStream ts = it.next(); + SpanQuery q = createSpanQuery(ts, field, endPrefix); + if (q != null) { + if (queries.size() >= maxClauseCount) { + throw new BooleanQuery.TooManyClauses(); + } + queries.add(q); + } + } + if (queries.size() > 0) { + queryPos = new SpanOrQuery(queries.toArray(new SpanQuery[0])); + } else { + queryPos = null; + } } else { - throw e; + Term[] terms = graph.getTerms(field, start); + assert terms.length > 0; + if (terms.length >= maxClauseCount) { + throw new BooleanQuery.TooManyClauses(); + } + queryPos = newSpanQuery(terms, endPrefix); + } + + if (queryPos != null) { + if (clauses.size() >= maxClauseCount) { + throw new BooleanQuery.TooManyClauses(); + } + clauses.add(queryPos); } } + + if (clauses.isEmpty()) { + return null; + } else if (clauses.size() == 1) { + return clauses.get(0); + } else { + return new SpanNearQuery(clauses.toArray(new SpanQuery[0]), 0, true); + } + } + + private void checkForPositions(String field) { + if (hasPositions(fieldType) == false) { + throw new IllegalStateException("field:[" + field + "] was indexed without position data; cannot run PhraseQuery"); + } } - return termQuery(fieldType, term.bytes(), lenient); } } diff --git a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java index 6f57faba001c9..7eefaadaadde2 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java +++ b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java @@ -20,12 +20,12 @@ package org.elasticsearch.index.search; import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.index.Term; import org.apache.lucene.queries.BlendedTermQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; @@ -49,222 +49,182 @@ public class MultiMatchQuery extends MatchQuery { private Float groupTieBreaker = null; - public void setTieBreaker(float tieBreaker) { - this.groupTieBreaker = tieBreaker; - } - public MultiMatchQuery(QueryShardContext context) { super(context); } - private Query parseAndApply(Type type, String fieldName, Object value, - String minimumShouldMatch, Float boostValue) throws IOException { - Query query = parse(type, fieldName, value); - query = Queries.maybeApplyMinimumShouldMatch(query, minimumShouldMatch); - if (query != null && boostValue != null && - boostValue != AbstractQueryBuilder.DEFAULT_BOOST && query instanceof MatchNoDocsQuery == false) { - query = new BoostQuery(query, boostValue); - } - return query; + public void setTieBreaker(float tieBreaker) { + this.groupTieBreaker = tieBreaker; } public Query parse(MultiMatchQueryBuilder.Type type, Map fieldNames, - Object value, String minimumShouldMatch) throws IOException { - final Query result; - // reset query builder - queryBuilder = null; - if (fieldNames.size() == 1) { - Map.Entry fieldBoost = fieldNames.entrySet().iterator().next(); - Float boostValue = fieldBoost.getValue(); - result = parseAndApply(type.matchQueryType(), fieldBoost.getKey(), value, minimumShouldMatch, boostValue); - } else { - final float tieBreaker = groupTieBreaker == null ? type.tieBreaker() : groupTieBreaker; - switch (type) { - case PHRASE: - case PHRASE_PREFIX: - case BEST_FIELDS: - case MOST_FIELDS: - queryBuilder = new QueryBuilder(tieBreaker); - break; - case CROSS_FIELDS: - queryBuilder = new CrossFieldsQueryBuilder(tieBreaker); - break; - default: - throw new IllegalStateException("No such type: " + type); - } - final List queries = queryBuilder.buildGroupedQueries(type, fieldNames, value, minimumShouldMatch); - result = queryBuilder.combineGrouped(queries); + Object value, String minimumShouldMatch) throws IOException { + final float tieBreaker = groupTieBreaker == null ? type.tieBreaker() : groupTieBreaker; + final List queries; + switch (type) { + case PHRASE: + case PHRASE_PREFIX: + case BEST_FIELDS: + case MOST_FIELDS: + queries = buildFieldQueries(type, fieldNames, value, minimumShouldMatch); + break; + + case CROSS_FIELDS: + queries = buildCrossFieldQuery(type, fieldNames, value, minimumShouldMatch, tieBreaker); + break; + + default: + throw new IllegalStateException("No such type: " + type); } - return result; + return combineGrouped(queries, tieBreaker); } - private QueryBuilder queryBuilder; - - public class QueryBuilder { - protected final float tieBreaker; - - public QueryBuilder(float tieBreaker) { - this.tieBreaker = tieBreaker; - } - - public List buildGroupedQueries(MultiMatchQueryBuilder.Type type, Map fieldNames, - Object value, String minimumShouldMatch) throws IOException{ - List queries = new ArrayList<>(); - for (String fieldName : fieldNames.keySet()) { - Float boostValue = fieldNames.get(fieldName); - Query query = parseGroup(type.matchQueryType(), fieldName, boostValue, value, minimumShouldMatch); - if (query != null) { - queries.add(query); - } - } - return queries; + private Query combineGrouped(List groupQuery, float tieBreaker) { + if (groupQuery.isEmpty()) { + return zeroTermsQuery(); } - - Query parseGroup(Type type, String field, Float boostValue, Object value, String minimumShouldMatch) throws IOException { - if (context.fieldMapper(field) == null) { - return null; // indicates to the caller that this field is unmapped and should be disregarded - } - return parseAndApply(type, field, value, minimumShouldMatch, boostValue); + if (groupQuery.size() == 1) { + return groupQuery.get(0); } + return new DisjunctionMaxQuery(groupQuery, tieBreaker); + } - private Query combineGrouped(List groupQuery) { - if (groupQuery == null || groupQuery.isEmpty()) { - return zeroTermsQuery(); + private List buildFieldQueries(MultiMatchQueryBuilder.Type type, Map fieldNames, + Object value, String minimumShouldMatch) throws IOException{ + List queries = new ArrayList<>(); + for (String fieldName : fieldNames.keySet()) { + if (context.fieldMapper(fieldName) == null) { + // ignore unmapped fields + continue; } - if (groupQuery.size() == 1) { - return groupQuery.get(0); + Float boostValue = fieldNames.get(fieldName); + Query query = parse(type.matchQueryType(), fieldName, value); + query = Queries.maybeApplyMinimumShouldMatch(query, minimumShouldMatch); + if (query != null + && boostValue != null + && boostValue != AbstractQueryBuilder.DEFAULT_BOOST + && query instanceof MatchNoDocsQuery == false) { + query = new BoostQuery(query, boostValue); } - List queries = new ArrayList<>(); - for (Query query : groupQuery) { + if (query != null) { queries.add(query); } - return new DisjunctionMaxQuery(queries, tieBreaker); - } - - public Query blendTerm(Term term, MappedFieldType fieldType) { - return MultiMatchQuery.super.blendTermQuery(term, fieldType); - } - - public Query blendTerms(Term[] terms, MappedFieldType fieldType) { - return MultiMatchQuery.super.blendTermsQuery(terms, fieldType); - } - - public Query termQuery(MappedFieldType fieldType, BytesRef value) { - return MultiMatchQuery.this.termQuery(fieldType, value, lenient); - } - - public Query blendPhrase(PhraseQuery query, MappedFieldType type) { - return MultiMatchQuery.super.blendPhraseQuery(query, type); } + return queries; } - final class CrossFieldsQueryBuilder extends QueryBuilder { - private FieldAndFieldType[] blendedFields; - - CrossFieldsQueryBuilder(float tiebreaker) { - super(tiebreaker); - } - - @Override - public List buildGroupedQueries(MultiMatchQueryBuilder.Type type, Map fieldNames, - Object value, String minimumShouldMatch) throws IOException { - Map> groups = new HashMap<>(); - List queries = new ArrayList<>(); - for (Map.Entry entry : fieldNames.entrySet()) { - String name = entry.getKey(); - MappedFieldType fieldType = context.fieldMapper(name); - if (fieldType != null) { - Analyzer actualAnalyzer = getAnalyzer(fieldType, type == MultiMatchQueryBuilder.Type.PHRASE); - name = fieldType.name(); - if (!groups.containsKey(actualAnalyzer)) { - groups.put(actualAnalyzer, new ArrayList<>()); - } - Float boost = entry.getValue(); - boost = boost == null ? Float.valueOf(1.0f) : boost; - groups.get(actualAnalyzer).add(new FieldAndFieldType(fieldType, boost)); - } else { - queries.add(new MatchNoDocsQuery("unknown field " + name)); + private List buildCrossFieldQuery(MultiMatchQueryBuilder.Type type, Map fieldNames, + Object value, String minimumShouldMatch, float tieBreaker) throws IOException { + Map> groups = new HashMap<>(); + List queries = new ArrayList<>(); + for (Map.Entry entry : fieldNames.entrySet()) { + String name = entry.getKey(); + MappedFieldType fieldType = context.fieldMapper(name); + if (fieldType != null) { + Analyzer actualAnalyzer = getAnalyzer(fieldType, type == MultiMatchQueryBuilder.Type.PHRASE); + if (!groups.containsKey(actualAnalyzer)) { + groups.put(actualAnalyzer, new ArrayList<>()); } + float boost = entry.getValue() == null ? 1.0f : entry.getValue(); + groups.get(actualAnalyzer).add(new FieldAndBoost(fieldType, boost)); + } + } + for (Map.Entry> group : groups.entrySet()) { + final MatchQueryBuilder builder; + if (group.getValue().size() == 1) { + builder = new MatchQueryBuilder(group.getKey(), group.getValue().get(0).fieldType); + } else { + builder = new BlendedQueryBuilder(group.getKey(), group.getValue(), tieBreaker); } - for (List group : groups.values()) { - if (group.size() > 1) { - blendedFields = new FieldAndFieldType[group.size()]; - int i = 0; - for (FieldAndFieldType fieldAndFieldType : group) { - blendedFields[i++] = fieldAndFieldType; + + /* + * We have to pick some field to pass through the superclass so + * we just pick the first field. It shouldn't matter because + * fields are already grouped by their analyzers/types. + */ + String representativeField = group.getValue().get(0).fieldType.name(); + Query query = parseInternal(type.matchQueryType(), representativeField, builder, value); + query = Queries.maybeApplyMinimumShouldMatch(query, minimumShouldMatch); + if (query != null) { + if (group.getValue().size() == 1) { + // apply the field boost to groups that contain a single field + float boost = group.getValue().get(0).boost; + if (boost != AbstractQueryBuilder.DEFAULT_BOOST) { + query = new BoostQuery(query, boost); } - } else { - blendedFields = null; - } - /* - * We have to pick some field to pass through the superclass so - * we just pick the first field. It shouldn't matter because - * fields are already grouped by their analyzers/types. - */ - String representativeField = group.get(0).fieldType.name(); - Query q = parseGroup(type.matchQueryType(), representativeField, 1f, value, minimumShouldMatch); - if (q != null) { - queries.add(q); } + queries.add(query); } + } + + return queries; + } + + private class BlendedQueryBuilder extends MatchQueryBuilder { + private final List blendedFields; + private final float tieBreaker; - return queries.isEmpty() ? null : queries; + BlendedQueryBuilder(Analyzer analyzer, List blendedFields, float tieBreaker) { + super(analyzer, blendedFields.get(0).fieldType); + this.blendedFields = blendedFields; + this.tieBreaker = tieBreaker; } @Override - public Query blendTerms(Term[] terms, MappedFieldType fieldType) { - if (blendedFields == null || blendedFields.length == 1) { - return super.blendTerms(terms, fieldType); - } + protected Query newSynonymQuery(Term[] terms) { BytesRef[] values = new BytesRef[terms.length]; for (int i = 0; i < terms.length; i++) { values[i] = terms[i].bytes(); } - return MultiMatchQuery.blendTerms(context, values, commonTermsCutoff, tieBreaker, lenient, blendedFields); + return blendTerms(context, values, commonTermsCutoff, tieBreaker, lenient, blendedFields); } @Override - public Query blendTerm(Term term, MappedFieldType fieldType) { - if (blendedFields == null) { - return super.blendTerm(term, fieldType); - } - return MultiMatchQuery.blendTerm(context, term.bytes(), commonTermsCutoff, tieBreaker, lenient, blendedFields); + public Query newTermQuery(Term term) { + return blendTerm(context, term.bytes(), commonTermsCutoff, tieBreaker, lenient, blendedFields); } @Override - public Query termQuery(MappedFieldType fieldType, BytesRef value) { - /* - * Use the string value of the term because we're reusing the - * portion of the query is usually after the analyzer has run on - * each term. We just skip that analyzer phase. - */ - return blendTerm(new Term(fieldType.name(), value.utf8ToString()), fieldType); + protected Query analyzePhrase(String field, TokenStream stream, int slop) throws IOException { + List disjunctions = new ArrayList<>(); + for (FieldAndBoost fieldType : blendedFields) { + Query query = fieldType.fieldType.phraseQuery(stream, slop, enablePositionIncrements); + if (fieldType.boost != 1f) { + query = new BoostQuery(query, fieldType.boost); + } + disjunctions.add(query); + } + return new DisjunctionMaxQuery(disjunctions, tieBreaker); } @Override - public Query blendPhrase(PhraseQuery query, MappedFieldType type) { - if (blendedFields == null) { - return super.blendPhrase(query, type); + protected Query analyzeMultiPhrase(String field, TokenStream stream, int slop) throws IOException { + List disjunctions = new ArrayList<>(); + for (FieldAndBoost fieldType : blendedFields) { + Query query = fieldType.fieldType.multiPhraseQuery(stream, slop, enablePositionIncrements); + if (fieldType.boost != 1f) { + query = new BoostQuery(query, fieldType.boost); + } + disjunctions.add(query); } - /** - * We build phrase queries for multi-word synonyms when {@link QueryBuilder#autoGenerateSynonymsPhraseQuery} is true. - */ - return MultiMatchQuery.blendPhrase(query, tieBreaker, blendedFields); + return new DisjunctionMaxQuery(disjunctions, tieBreaker); } } static Query blendTerm(QueryShardContext context, BytesRef value, Float commonTermsCutoff, float tieBreaker, - boolean lenient, FieldAndFieldType... blendedFields) { + boolean lenient, List blendedFields) { + return blendTerms(context, new BytesRef[] {value}, commonTermsCutoff, tieBreaker, lenient, blendedFields); } static Query blendTerms(QueryShardContext context, BytesRef[] values, Float commonTermsCutoff, float tieBreaker, - boolean lenient, FieldAndFieldType... blendedFields) { + boolean lenient, List blendedFields) { + List queries = new ArrayList<>(); - Term[] terms = new Term[blendedFields.length * values.length]; - float[] blendedBoost = new float[blendedFields.length * values.length]; + Term[] terms = new Term[blendedFields.size() * values.length]; + float[] blendedBoost = new float[blendedFields.size() * values.length]; int i = 0; - for (FieldAndFieldType ft : blendedFields) { + for (FieldAndBoost ft : blendedFields) { for (BytesRef term : values) { Query query; try { @@ -309,61 +269,15 @@ static Query blendTerms(QueryShardContext context, BytesRef[] values, Float comm // best effort: add clauses that are not term queries so that they have an opportunity to match // however their score contribution will be different // TODO: can we improve this? - return new DisjunctionMaxQuery(queries, 1.0f); - } - } - - /** - * Expand a {@link PhraseQuery} to multiple fields that share the same analyzer. - * Returns a {@link DisjunctionMaxQuery} with a disjunction for each expanded field. - */ - static Query blendPhrase(PhraseQuery query, float tiebreaker, FieldAndFieldType... fields) { - List disjunctions = new ArrayList<>(); - for (FieldAndFieldType field : fields) { - int[] positions = query.getPositions(); - Term[] terms = query.getTerms(); - PhraseQuery.Builder builder = new PhraseQuery.Builder(); - for (int i = 0; i < terms.length; i++) { - builder.add(new Term(field.fieldType.name(), terms[i].bytes()), positions[i]); - } - Query q = builder.build(); - if (field.boost != AbstractQueryBuilder.DEFAULT_BOOST) { - q = new BoostQuery(q, field.boost); - } - disjunctions.add(q); - } - return new DisjunctionMaxQuery(disjunctions, tiebreaker); - } - - @Override - protected Query blendTermQuery(Term term, MappedFieldType fieldType) { - if (queryBuilder == null) { - return super.blendTermQuery(term, fieldType); - } - return queryBuilder.blendTerm(term, fieldType); - } - - @Override - protected Query blendTermsQuery(Term[] terms, MappedFieldType fieldType) { - if (queryBuilder == null) { - return super.blendTermsQuery(terms, fieldType); - } - return queryBuilder.blendTerms(terms, fieldType); - } - - @Override - protected Query blendPhraseQuery(PhraseQuery query, MappedFieldType fieldType) { - if (queryBuilder == null) { - return super.blendPhraseQuery(query, fieldType); + return new DisjunctionMaxQuery(queries, tieBreaker); } - return queryBuilder.blendPhrase(query, fieldType); } - static final class FieldAndFieldType { + static final class FieldAndBoost { final MappedFieldType fieldType; final float boost; - FieldAndFieldType(MappedFieldType fieldType, float boost) { + FieldAndBoost(MappedFieldType fieldType, float boost) { this.fieldType = Objects.requireNonNull(fieldType); this.boost = boost; } diff --git a/server/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java b/server/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java index a6e676006fdbf..4e4b04d1ff19c 100644 --- a/server/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java +++ b/server/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java @@ -126,7 +126,7 @@ public void testMultiPhrasePrefixQuerySingleTerm() throws Exception { final String[] outputs = { "The quick brown fox." }; - MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery(); + MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery("text"); query.add(new Term("text", "bro")); assertHighlightOneDoc("text", inputs, new StandardAnalyzer(), query, Locale.ROOT, BreakIterator.getSentenceInstance(Locale.ROOT), 0, outputs); @@ -139,7 +139,7 @@ public void testMultiPhrasePrefixQuery() throws Exception { final String[] outputs = { "The quick brown fox." }; - MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery(); + MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery("text"); query.add(new Term("text", "quick")); query.add(new Term("text", "brown")); query.add(new Term("text", "fo")); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java b/server/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java index 23b6939fe7a70..f0d4c88e01c19 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java @@ -43,24 +43,24 @@ public void testSimple() throws Exception { IndexReader reader = DirectoryReader.open(writer); IndexSearcher searcher = new IndexSearcher(reader); - MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery(); + MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery("field"); query.add(new Term("field", "aa")); assertThat(searcher.count(query), equalTo(1)); - query = new MultiPhrasePrefixQuery(); + query = new MultiPhrasePrefixQuery("field"); query.add(new Term("field", "aaa")); query.add(new Term("field", "bb")); assertThat(searcher.count(query), equalTo(1)); - query = new MultiPhrasePrefixQuery(); + query = new MultiPhrasePrefixQuery("field"); query.setSlop(1); query.add(new Term("field", "aaa")); query.add(new Term("field", "cc")); assertThat(searcher.count(query), equalTo(1)); - query = new MultiPhrasePrefixQuery(); + query = new MultiPhrasePrefixQuery("field"); query.setSlop(1); query.add(new Term("field", "xxx")); assertThat(searcher.count(query), equalTo(0)); } -} \ No newline at end of file +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index acd6c9ee6f80b..e527f98f73c20 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -34,13 +34,19 @@ import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.spans.FieldMaskingSpanQuery; +import org.apache.lucene.search.spans.SpanNearQuery; +import org.apache.lucene.search.spans.SpanOrQuery; +import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; @@ -52,6 +58,7 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; +import org.elasticsearch.index.query.MatchPhrasePrefixQueryBuilder; import org.elasticsearch.index.query.MatchPhraseQueryBuilder; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.search.MatchQuery; @@ -956,4 +963,125 @@ public void testIndexPrefixMapping() throws IOException { assertThat(e.getMessage(), containsString("Cannot set index_prefixes on unindexed field [field]")); } } + + public void testFastPhrasePrefixes() throws IOException { + QueryShardContext queryShardContext = indexService.newQueryShardContext( + randomInt(20), null, () -> { + throw new UnsupportedOperationException(); + }, null); + + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "text") + .field("analyzer", "my_stop_analyzer") + .startObject("index_prefixes") + .field("min_chars", 2) + .field("max_chars", 10) + .endObject() + .endObject() + .startObject("synfield") + .field("type", "text") + .field("analyzer", "standard") // will be replaced with MockSynonymAnalyzer + .field("index_phrases", true) + .startObject("index_prefixes") + .field("min_chars", 2) + .field("max_chars", 10) + .endObject() + .endObject() + .endObject() + .endObject().endObject()); + + queryShardContext.getMapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + + { + Query q = new MatchPhrasePrefixQueryBuilder("field", "two words").toQuery(queryShardContext); + Query expected = new SpanNearQuery.Builder("field", true) + .addClause(new SpanTermQuery(new Term("field", "two"))) + .addClause(new FieldMaskingSpanQuery( + new SpanTermQuery(new Term("field._index_prefix", "words")), "field") + ) + .build(); + assertThat(q, equalTo(expected)); + } + + { + Query q = new MatchPhrasePrefixQueryBuilder("field", "three words here").toQuery(queryShardContext); + Query expected = new SpanNearQuery.Builder("field", true) + .addClause(new SpanTermQuery(new Term("field", "three"))) + .addClause(new SpanTermQuery(new Term("field", "words"))) + .addClause(new FieldMaskingSpanQuery( + new SpanTermQuery(new Term("field._index_prefix", "here")), "field") + ) + .build(); + assertThat(q, equalTo(expected)); + } + + { + Query q = new MatchPhrasePrefixQueryBuilder("field", "two words").slop(1).toQuery(queryShardContext); + MultiPhrasePrefixQuery mpq = new MultiPhrasePrefixQuery("field"); + mpq.setSlop(1); + mpq.add(new Term("field", "two")); + mpq.add(new Term("field", "words")); + assertThat(q, equalTo(mpq)); + } + + { + Query q = new MatchPhrasePrefixQueryBuilder("field", "singleton").toQuery(queryShardContext); + assertThat(q, is(new SynonymQuery(new Term("field._index_prefix", "singleton")))); + } + + { + + Query q = new MatchPhrasePrefixQueryBuilder("field", "sparkle a stopword").toQuery(queryShardContext); + Query expected = new SpanNearQuery.Builder("field", true) + .addClause(new SpanTermQuery(new Term("field", "sparkle"))) + .addGap(1) + .addClause(new FieldMaskingSpanQuery( + new SpanTermQuery(new Term("field._index_prefix", "stopword")), "field") + ) + .build(); + assertThat(q, equalTo(expected)); + } + + { + MatchQuery matchQuery = new MatchQuery(queryShardContext); + matchQuery.setAnalyzer(new MockSynonymAnalyzer()); + Query q = matchQuery.parse(MatchQuery.Type.PHRASE_PREFIX, "synfield", "motor dogs"); + Query expected = new SpanNearQuery.Builder("synfield", true) + .addClause(new SpanTermQuery(new Term("synfield", "motor"))) + .addClause( + new SpanOrQuery( + new FieldMaskingSpanQuery( + new SpanTermQuery(new Term("synfield._index_prefix", "dogs")), "synfield" + ), + new FieldMaskingSpanQuery( + new SpanTermQuery(new Term("synfield._index_prefix", "dog")), "synfield" + ) + ) + ) + .build(); + assertThat(q, equalTo(expected)); + } + + { + MatchQuery matchQuery = new MatchQuery(queryShardContext); + matchQuery.setPhraseSlop(1); + matchQuery.setAnalyzer(new MockSynonymAnalyzer()); + Query q = matchQuery.parse(MatchQuery.Type.PHRASE_PREFIX, "synfield", "two dogs"); + MultiPhrasePrefixQuery mpq = new MultiPhrasePrefixQuery("synfield"); + mpq.setSlop(1); + mpq.add(new Term("synfield", "two")); + mpq.add(new Term[] { new Term("synfield", "dogs"), new Term("synfield", "dog") }); + assertThat(q, equalTo(mpq)); + } + + { + Query q = new MatchPhrasePrefixQueryBuilder("field", "motor d").toQuery(queryShardContext); + MultiPhrasePrefixQuery mpq = new MultiPhrasePrefixQuery("field"); + mpq.add(new Term("field", "motor")); + mpq.add(new Term("field", "d")); + assertThat(q, equalTo(mpq)); + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java index fd722ef0c77af..a6aa53e3aa0e9 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchPhrasePrefixQueryBuilderTests.java @@ -19,12 +19,9 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.SynonymQuery; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.search.internal.SearchContext; @@ -34,7 +31,6 @@ import java.util.HashMap; import java.util.Map; -import static org.elasticsearch.test.AbstractBuilderTestCase.STRING_ALIAS_FIELD_NAME; import static org.hamcrest.CoreMatchers.either; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.containsString; @@ -43,8 +39,7 @@ public class MatchPhrasePrefixQueryBuilderTests extends AbstractQueryTestCase { @Override protected MatchPhrasePrefixQueryBuilder doCreateTestQueryBuilder() { - String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME, INT_FIELD_NAME, - DOUBLE_FIELD_NAME, DATE_FIELD_NAME); + String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME); Object value; if (isTextField(fieldName)) { int terms = randomIntBetween(0, 3); @@ -91,10 +86,9 @@ protected Map getAlternateVersions() { protected void doAssertLuceneQuery(MatchPhrasePrefixQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { assertThat(query, notNullValue()); - assertThat(query, - either(instanceOf(BooleanQuery.class)).or(instanceOf(MultiPhrasePrefixQuery.class)) - .or(instanceOf(TermQuery.class)).or(instanceOf(PointRangeQuery.class)) - .or(instanceOf(IndexOrDocValuesQuery.class)).or(instanceOf(MatchNoDocsQuery.class))); + assertThat(query, either(instanceOf(MultiPhrasePrefixQuery.class)) + .or(instanceOf(SynonymQuery.class)) + .or(instanceOf(MatchNoDocsQuery.class))); } public void testIllegalValues() { diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index 184ee2759c15e..c258cce6c7c50 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -25,7 +25,6 @@ import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; @@ -371,13 +370,10 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws public void testMatchPhrasePrefixWithBoost() throws Exception { QueryShardContext context = createShardContext(); { - // field boost is applied on a single term query + // field boost is ignored on a single term query MatchPhrasePrefixQueryBuilder builder = new MatchPhrasePrefixQueryBuilder("string_boost", "foo"); Query query = builder.toQuery(context); - assertThat(query, instanceOf(BoostQuery.class)); - assertThat(((BoostQuery) query).getBoost(), equalTo(4f)); - Query innerQuery = ((BoostQuery) query).getQuery(); - assertThat(innerQuery, instanceOf(MultiPhrasePrefixQuery.class)); + assertThat(query, instanceOf(MultiPhrasePrefixQuery.class)); } { diff --git a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java index 43c76f028e22e..27651e0da0de4 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java @@ -91,7 +91,12 @@ protected MultiMatchQueryBuilder doCreateTestQueryBuilder() { // sets other parameters of the multi match query if (randomBoolean()) { - query.type(randomFrom(MultiMatchQueryBuilder.Type.values())); + if (fieldName.equals(STRING_FIELD_NAME)) { + query.type(randomFrom(MultiMatchQueryBuilder.Type.values())); + } else { + query.type(randomValueOtherThan(MultiMatchQueryBuilder.Type.PHRASE_PREFIX, + () -> randomFrom(MultiMatchQueryBuilder.Type.values()))); + } } if (randomBoolean()) { query.operator(randomFrom(Operator.values())); @@ -384,6 +389,11 @@ public void testDefaultField() throws Exception { ), 0.0f ); assertEquals(expected, query); + + context.getIndexSettings().updateIndexMetaData( + newIndexMeta("index", context.getIndexSettings().getSettings(), + Settings.builder().putNull("index.query.default_field").build()) + ); } public void testWithStopWords() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index baa0fed01bbf0..0eb6de7da252f 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -1208,20 +1208,21 @@ public void testUnmappedFieldRewriteToMatchNoDocs() throws IOException { .field("unmapped_field") .lenient(true) .toQuery(createShardContext()); - assertEquals(new MatchNoDocsQuery(""), query); + assertEquals(new BooleanQuery.Builder().build(), query); // Unmapped prefix field query = new QueryStringQueryBuilder("unmapped_field:hello") .lenient(true) .toQuery(createShardContext()); - assertEquals(new MatchNoDocsQuery(""), query); + assertEquals(new BooleanQuery.Builder().build(), query); // Unmapped fields query = new QueryStringQueryBuilder("hello") .lenient(true) .field("unmapped_field") + .field("another_field") .toQuery(createShardContext()); - assertEquals(new MatchNoDocsQuery(""), query); + assertEquals(new BooleanQuery.Builder().build(), query); } public void testDefaultField() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java index 47db7d42d8cd0..4c59e25804a55 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -32,8 +32,8 @@ import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TopTermsRewrite; import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.spans.SpanBoostQuery; import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.search.spans.SpanTermQuery; @@ -42,6 +42,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.SpanBooleanQueryRewriteWithMaxClause; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.internal.SearchContext; @@ -55,6 +56,7 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.either; +import static org.hamcrest.CoreMatchers.startsWith; public class SpanMultiTermQueryBuilderTests extends AbstractQueryTestCase { @Override @@ -68,6 +70,9 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws .field("type", "alias") .field("path", "prefix_field") .endObject() + .startObject("body") + .field("type", "text") + .endObject() .endObject().endObject().endObject(); mapperService.merge("_doc", @@ -85,23 +90,26 @@ protected void doAssertLuceneQuery(SpanMultiTermQueryBuilder queryBuilder, Query if (query instanceof SpanMatchNoDocsQuery) { return; } - if (queryBuilder.innerQuery().boost() != AbstractQueryBuilder.DEFAULT_BOOST) { - assertThat(query, instanceOf(SpanBoostQuery.class)); - SpanBoostQuery boostQuery = (SpanBoostQuery) query; - assertThat(boostQuery.getBoost(), equalTo(queryBuilder.innerQuery().boost())); - query = boostQuery.getQuery(); - } - assertThat(query, instanceOf(SpanMultiTermQueryWrapper.class)); - SpanMultiTermQueryWrapper spanMultiTermQueryWrapper = (SpanMultiTermQueryWrapper) query; - Query multiTermQuery = queryBuilder.innerQuery().toQuery(context.getQueryShardContext()); - if (queryBuilder.innerQuery().boost() != AbstractQueryBuilder.DEFAULT_BOOST) { - assertThat(multiTermQuery, instanceOf(BoostQuery.class)); - BoostQuery boostQuery = (BoostQuery) multiTermQuery; - multiTermQuery = boostQuery.getQuery(); + assertThat(query, either(instanceOf(SpanMultiTermQueryWrapper.class)).or(instanceOf(FieldMaskingSpanQuery.class))); + if (query instanceof SpanMultiTermQueryWrapper) { + SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) query; + Query innerQuery = queryBuilder.innerQuery().toQuery(context.getQueryShardContext()); + if (queryBuilder.innerQuery().boost() != AbstractQueryBuilder.DEFAULT_BOOST) { + assertThat(innerQuery, instanceOf(BoostQuery.class)); + BoostQuery boostQuery = (BoostQuery) innerQuery; + innerQuery = boostQuery.getQuery(); + } + assertThat(innerQuery, instanceOf(MultiTermQuery.class)); + MultiTermQuery multiQuery = (MultiTermQuery) innerQuery; + if (multiQuery.getRewriteMethod() instanceof TopTermsRewrite) { + assertThat(wrapper.getRewriteMethod(), instanceOf(SpanMultiTermQueryWrapper.TopTermsSpanBooleanQueryRewrite.class)); + } else { + assertThat(wrapper.getRewriteMethod(), instanceOf(SpanBooleanQueryRewriteWithMaxClause.class)); + } + } else if (query instanceof FieldMaskingSpanQuery) { + FieldMaskingSpanQuery mask = (FieldMaskingSpanQuery) query; + assertThat(mask.getMaskedQuery(), instanceOf(TermQuery.class)); } - assertThat(multiTermQuery, either(instanceOf(MultiTermQuery.class)).or(instanceOf(TermQuery.class))); - assertThat(spanMultiTermQueryWrapper.getWrappedQuery(), - equalTo(new SpanMultiTermQueryWrapper<>((MultiTermQuery) multiTermQuery).getWrappedQuery())); } public void testIllegalArgument() { @@ -168,11 +176,10 @@ public String fieldName() { */ public void testUnsupportedInnerQueryType() throws IOException { MultiTermQueryBuilder query = new TermMultiTermQueryBuilder(); - SpanMultiTermQueryBuilder spamMultiTermQuery = new SpanMultiTermQueryBuilder(query); + SpanMultiTermQueryBuilder spanMultiTermQuery = new SpanMultiTermQueryBuilder(query); UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, - () -> spamMultiTermQuery.toQuery(createShardContext())); - assertThat(e.getMessage(), containsString("unsupported inner query generated by " + TermMultiTermQueryBuilder.class.getName() + - ", should be " + MultiTermQuery.class.getName())); + () -> spanMultiTermQuery.toQuery(createShardContext())); + assertThat(e.getMessage(), startsWith("unsupported inner query")); } public void testToQueryInnerSpanMultiTerm() throws IOException { @@ -184,50 +191,39 @@ public void testToQueryInnerSpanMultiTerm() throws IOException { public void testToQueryInnerTermQuery() throws IOException { String fieldName = randomFrom("prefix_field", "prefix_field_alias"); final QueryShardContext context = createShardContext(); - if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) { - Query query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder(fieldName, "foo")) - .toQuery(context); - assertThat(query, instanceOf(FieldMaskingSpanQuery.class)); - FieldMaskingSpanQuery fieldSpanQuery = (FieldMaskingSpanQuery) query; - assertThat(fieldSpanQuery.getField(), equalTo("prefix_field")); - assertThat(fieldSpanQuery.getMaskedQuery(), instanceOf(SpanTermQuery.class)); - SpanTermQuery spanTermQuery = (SpanTermQuery) fieldSpanQuery.getMaskedQuery(); - assertThat(spanTermQuery.getTerm().text(), equalTo("foo")); - - query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder(fieldName, "foo")) - .boost(2.0f) - .toQuery(context); - assertThat(query, instanceOf(SpanBoostQuery.class)); - SpanBoostQuery boostQuery = (SpanBoostQuery) query; - assertThat(boostQuery.getBoost(), equalTo(2.0f)); - assertThat(boostQuery.getQuery(), instanceOf(FieldMaskingSpanQuery.class)); - fieldSpanQuery = (FieldMaskingSpanQuery) boostQuery.getQuery(); - assertThat(fieldSpanQuery.getField(), equalTo("prefix_field")); - assertThat(fieldSpanQuery.getMaskedQuery(), instanceOf(SpanTermQuery.class)); - spanTermQuery = (SpanTermQuery) fieldSpanQuery.getMaskedQuery(); - assertThat(spanTermQuery.getTerm().text(), equalTo("foo")); - } else { - Query query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder(fieldName, "foo")) - .toQuery(context); + { + Query query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder(fieldName, "foo")).toQuery(context); + if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) { + assertThat(query, instanceOf(FieldMaskingSpanQuery.class)); + FieldMaskingSpanQuery fieldQuery = (FieldMaskingSpanQuery) query; + assertThat(fieldQuery.getMaskedQuery(), instanceOf(SpanTermQuery.class)); + assertThat(fieldQuery.getField(), equalTo("prefix_field")); + SpanTermQuery termQuery = (SpanTermQuery) fieldQuery.getMaskedQuery(); + assertThat(termQuery.getTerm().field(), equalTo("prefix_field._index_prefix")); + assertThat(termQuery.getTerm().text(), equalTo("foo")); + } else { + assertThat(query, instanceOf(SpanMultiTermQueryWrapper.class)); + SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) query; + assertThat(wrapper.getWrappedQuery(), instanceOf(PrefixQuery.class)); + PrefixQuery prefixQuery = (PrefixQuery) wrapper.getWrappedQuery(); + assertThat(prefixQuery.getField(), equalTo("prefix_field")); + assertThat(prefixQuery.getPrefix().text(), equalTo("foo")); + } + } + + { + Query query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder(fieldName, "f")).toQuery(context); assertThat(query, instanceOf(SpanMultiTermQueryWrapper.class)); SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) query; assertThat(wrapper.getWrappedQuery(), instanceOf(PrefixQuery.class)); + assertThat(wrapper.getField(), equalTo("prefix_field")); PrefixQuery prefixQuery = (PrefixQuery) wrapper.getWrappedQuery(); assertThat(prefixQuery.getField(), equalTo("prefix_field")); - assertThat(prefixQuery.getPrefix().text(), equalTo("foo")); - - query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder(fieldName, "foo")) - .boost(2.0f) - .toQuery(context); - assertThat(query, instanceOf(SpanBoostQuery.class)); - SpanBoostQuery boostQuery = (SpanBoostQuery) query; - assertThat(boostQuery.getBoost(), equalTo(2.0f)); - assertThat(boostQuery.getQuery(), instanceOf(SpanMultiTermQueryWrapper.class)); - wrapper = (SpanMultiTermQueryWrapper) boostQuery.getQuery(); - assertThat(wrapper.getWrappedQuery(), instanceOf(PrefixQuery.class)); - prefixQuery = (PrefixQuery) wrapper.getWrappedQuery(); - assertThat(prefixQuery.getField(), equalTo("prefix_field")); - assertThat(prefixQuery.getPrefix().text(), equalTo("foo")); + assertThat(prefixQuery.getPrefix().text(), equalTo("f")); + assertThat(wrapper.getRewriteMethod(), instanceOf(SpanBooleanQueryRewriteWithMaxClause.class)); + SpanBooleanQueryRewriteWithMaxClause rewrite = (SpanBooleanQueryRewriteWithMaxClause) wrapper.getRewriteMethod(); + assertThat(rewrite.getMaxExpansions(), equalTo(BooleanQuery.getMaxClauseCount())); + assertTrue(rewrite.isHardLimit()); } } @@ -255,17 +251,13 @@ public void testFromJson() throws IOException { } public void testDefaultMaxRewriteBuilder() throws Exception { - Query query = QueryBuilders.spanMultiTermQueryBuilder(QueryBuilders.prefixQuery("foo", "b")). - toQuery(createShardContext()); - - if (query instanceof SpanBoostQuery) { - query = ((SpanBoostQuery)query).getQuery(); - } + Query query = QueryBuilders.spanMultiTermQueryBuilder(QueryBuilders.prefixQuery("body", "b")) + .toQuery(createShardContext()); assertTrue(query instanceof SpanMultiTermQueryWrapper); if (query instanceof SpanMultiTermQueryWrapper) { - MultiTermQuery.RewriteMethod rewriteMethod = ((SpanMultiTermQueryWrapper)query).getRewriteMethod(); - assertTrue(rewriteMethod instanceof SpanMultiTermQueryBuilder.TopTermSpanBooleanQueryRewriteWithMaxClause); + MultiTermQuery.RewriteMethod rewriteMethod = ((SpanMultiTermQueryWrapper) query).getRewriteMethod(); + assertTrue(rewriteMethod instanceof SpanBooleanQueryRewriteWithMaxClause); } } @@ -285,7 +277,6 @@ public void testTermExpansionExceptionOnSpanFailure() throws Exception { Query query = queryBuilder.toQuery(createShardContext(reader)); RuntimeException exc = expectThrows(RuntimeException.class, () -> query.rewrite(reader)); assertThat(exc.getMessage(), containsString("maxClauseCount")); - } finally { BooleanQuery.setMaxClauseCount(origBoolMaxClauseCount); } @@ -296,17 +287,13 @@ public void testTermExpansionExceptionOnSpanFailure() throws Exception { public void testTopNMultiTermsRewriteInsideSpan() throws Exception { Query query = QueryBuilders.spanMultiTermQueryBuilder( - QueryBuilders.prefixQuery("foo", "b").rewrite("top_terms_boost_2000") + QueryBuilders.prefixQuery("body", "b").rewrite("top_terms_boost_2000") ).toQuery(createShardContext()); - if (query instanceof SpanBoostQuery) { - query = ((SpanBoostQuery)query).getQuery(); - } - assertTrue(query instanceof SpanMultiTermQueryWrapper); if (query instanceof SpanMultiTermQueryWrapper) { MultiTermQuery.RewriteMethod rewriteMethod = ((SpanMultiTermQueryWrapper)query).getRewriteMethod(); - assertFalse(rewriteMethod instanceof SpanMultiTermQueryBuilder.TopTermSpanBooleanQueryRewriteWithMaxClause); + assertFalse(rewriteMethod instanceof SpanBooleanQueryRewriteWithMaxClause); } } diff --git a/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java b/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java index 1087bbbf9fd8f..58baadd83573d 100644 --- a/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java @@ -27,7 +27,6 @@ import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; @@ -44,7 +43,7 @@ import org.elasticsearch.index.mapper.MockFieldMapper.FakeFieldType; import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.search.MultiMatchQuery.FieldAndFieldType; +import org.elasticsearch.index.search.MultiMatchQuery.FieldAndBoost; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.MockKeywordPlugin; @@ -105,7 +104,8 @@ public void testCrossFieldMultiMatchQuery() throws IOException { for (float tieBreaker : new float[] {0.0f, 0.5f}) { Query parsedQuery = multiMatchQuery("banon") .field("name.first", 2) - .field("name.last", 3).field("foobar") + .field("name.last", 3) + .field("foobar") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) .tieBreaker(tieBreaker) .toQuery(queryShardContext); @@ -113,11 +113,7 @@ public void testCrossFieldMultiMatchQuery() throws IOException { Query rewrittenQuery = searcher.searcher().rewrite(parsedQuery); Query tq1 = new BoostQuery(new TermQuery(new Term("name.first", "banon")), 2); Query tq2 = new BoostQuery(new TermQuery(new Term("name.last", "banon")), 3); - Query expected = new DisjunctionMaxQuery( - Arrays.asList( - new MatchNoDocsQuery("unknown field foobar"), - new DisjunctionMaxQuery(Arrays.asList(tq2, tq1), tieBreaker) - ), tieBreaker); + Query expected = new DisjunctionMaxQuery(Arrays.asList(tq2, tq1), tieBreaker); assertEquals(expected, rewrittenQuery); } } @@ -133,7 +129,7 @@ public void testBlendTerms() { Query expected = BlendedTermQuery.dismaxBlendedQuery(terms, boosts, 1.0f); Query actual = MultiMatchQuery.blendTerm( indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }, null), - new BytesRef("baz"), null, 1f, false, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3)); + new BytesRef("baz"), null, 1f, false, Arrays.asList(new FieldAndBoost(ft1, 2), new FieldAndBoost(ft2, 3))); assertEquals(expected, actual); } @@ -149,7 +145,7 @@ public void testBlendTermsWithFieldBoosts() { Query expected = BlendedTermQuery.dismaxBlendedQuery(terms, boosts, 1.0f); Query actual = MultiMatchQuery.blendTerm( indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }, null), - new BytesRef("baz"), null, 1f, false, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3)); + new BytesRef("baz"), null, 1f, false, Arrays.asList(new FieldAndBoost(ft1, 2), new FieldAndBoost(ft2, 3))); assertEquals(expected, actual); } @@ -171,7 +167,7 @@ public Query termQuery(Object value, QueryShardContext context) { ), 1f); Query actual = MultiMatchQuery.blendTerm( indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }, null), - new BytesRef("baz"), null, 1f, true, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3)); + new BytesRef("baz"), null, 1f, true, Arrays.asList(new FieldAndBoost(ft1, 2), new FieldAndBoost(ft2, 3))); assertEquals(expected, actual); } @@ -185,7 +181,7 @@ public Query termQuery(Object value, QueryShardContext context) { ft.setName("bar"); expectThrows(IllegalArgumentException.class, () -> MultiMatchQuery.blendTerm( indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }, null), - new BytesRef("baz"), null, 1f, false, new FieldAndFieldType(ft, 1))); + new BytesRef("baz"), null, 1f, false, Arrays.asList(new FieldAndBoost(ft, 1)))); } public void testBlendNoTermQuery() { @@ -209,7 +205,7 @@ public Query termQuery(Object value, QueryShardContext context) { ), 1.0f); Query actual = MultiMatchQuery.blendTerm( indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }, null), - new BytesRef("baz"), null, 1f, false, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3)); + new BytesRef("baz"), null, 1f, false, Arrays.asList(new FieldAndBoost(ft1, 2), new FieldAndBoost(ft2, 3))); assertEquals(expected, actual); }