Skip to content

Commit

Permalink
Merge branch 'main' into retrievers
Browse files Browse the repository at this point in the history
  • Loading branch information
jdconrad committed Feb 2, 2024
2 parents 15d7e69 + 5408883 commit 633e762
Show file tree
Hide file tree
Showing 235 changed files with 6,507 additions and 946 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ public abstract class CheckForbiddenApisTask extends DefaultTask implements Patt
private File resourcesDir;

private boolean ignoreFailures = false;
private boolean ignoreMissingClasses = false;

@Input
@Optional
Expand Down Expand Up @@ -250,6 +251,15 @@ public void setIgnoreFailures(boolean ignoreFailures) {
this.ignoreFailures = ignoreFailures;
}

@Input
public boolean getIgnoreMissingClasses() {
return ignoreMissingClasses;
}

public void setIgnoreMissingClasses(boolean ignoreMissingClasses) {
this.ignoreMissingClasses = ignoreMissingClasses;
}

/**
* The default compiler target version used to expand references to bundled JDK signatures.
* E.g., if you use "jdk-deprecated", it will expand to this version.
Expand Down Expand Up @@ -378,6 +388,7 @@ public void checkForbidden() {
parameters.getSignatures().set(getSignatures());
parameters.getTargetCompatibility().set(getTargetCompatibility());
parameters.getIgnoreFailures().set(getIgnoreFailures());
parameters.getIgnoreMissingClasses().set(getIgnoreMissingClasses());
parameters.getSuccessMarker().set(getSuccessMarker());
parameters.getSignaturesFiles().from(getSignaturesFiles());
});
Expand Down Expand Up @@ -514,7 +525,9 @@ private URLClassLoader createClassLoader(FileCollection classpath, FileCollectio
@NotNull
private Checker createChecker(URLClassLoader urlLoader) {
final EnumSet<Checker.Option> options = EnumSet.noneOf(Checker.Option.class);
options.add(FAIL_ON_MISSING_CLASSES);
if (getParameters().getIgnoreMissingClasses().get() == false) {
options.add(FAIL_ON_MISSING_CLASSES);
}
if (getParameters().getIgnoreFailures().get() == false) {
options.add(FAIL_ON_VIOLATION);
}
Expand Down Expand Up @@ -573,6 +586,8 @@ interface Parameters extends WorkParameters {

Property<Boolean> getIgnoreFailures();

Property<Boolean> getIgnoreMissingClasses();

ListProperty<String> getSignatures();

}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
"Infra/Scripting",
"Infra/Settings",
"Infra/Transport API",
"Infra/Metrics",
"Ingest",
"Ingest Node",
"Java High Level REST Client",
Expand Down
5 changes: 5 additions & 0 deletions docs/changelog/103973.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 103973
summary: Add stricter validation for api key expiration time
area: Security
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/104320.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 104320
summary: Hot-reloadable LDAP bind password
area: Authentication
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/104363.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 104363
summary: Apply windowing and chunking to long documents
area: Machine Learning
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/104529.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 104529
summary: Add rest spec for Query User API
area: Client
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/104648.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 104648
summary: "[Connector API] Implement update `index_name` action"
area: Application
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/104750.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 104750
summary: "[Connectors API] Implement connector status update action"
area: Application
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/104993.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 104993
summary: Support enrich remote mode
area: ES|QL
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/104996.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 104996
summary: "Enhancement: Metrics for Search Took Times using Action Listeners"
area: Search
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/105015.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 105015
summary: Modify name of threadpool metric for rejected
area: Infra/Metrics
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/105055.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 105055
summary: "Do not enable APM agent 'instrument', it's not required for manual tracing"
area: Infra/Core
type: bug
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/105066.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 105066
summary: Fix handling of `ml.config_version` node attribute for nodes with machine learning disabled
area: Machine Learning
type: bug
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/105070.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 105070
summary: Validate settings before reloading JWT shared secret
area: Authentication
type: bug
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/96235.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 96235
summary: Add `index.mapping.total_fields.ignore_dynamic_beyond_limit` setting to ignore dynamic fields when field limit is reached
area: Mapping
type: enhancement
issues: []
7 changes: 5 additions & 2 deletions docs/reference/mapping/fields/ignored-field.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,11 @@
The `_ignored` field indexes and stores the names of every field in a document
that has been ignored when the document was indexed. This can, for example,
be the case when the field was malformed and <<ignore-malformed,`ignore_malformed`>>
was turned on, or when a `keyword` fields value exceeds its optional
<<ignore-above,`ignore_above`>> setting.
was turned on, when a `keyword` field's value exceeds its optional
<<ignore-above,`ignore_above`>> setting, or when
<<mapping-settings-limit,`index.mapping.total_fields.limit`>> has been reached and
<<mapping-settings-limit,`index.mapping.total_fields.ignore_dynamic_beyond_limit`>>
is set to `true`.

This field is searchable with <<query-dsl-term-query,`term`>>,
<<query-dsl-terms-query,`terms`>> and <<query-dsl-exists-query,`exists`>>
Expand Down
11 changes: 10 additions & 1 deletion docs/reference/mapping/mapping-settings-limit.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,18 @@ limits the maximum number of clauses in a query.
+
[TIP]
====
If your field mappings contain a large, arbitrary set of keys, consider using the <<flattened,flattened>> data type.
If your field mappings contain a large, arbitrary set of keys, consider using the <<flattened,flattened>> data type,
or setting the index setting `index.mapping.total_fields.ignore_dynamic_beyond_limit` to `true`.
====

`index.mapping.total_fields.ignore_dynamic_beyond_limit`::
This setting determines what happens when a dynamically mapped field would exceed the total fields limit.
When set to `false` (the default), the index request of the document that tries to add a dynamic field to the mapping will fail with the message `Limit of total fields [X] has been exceeded`.
When set to `true`, the index request will not fail.
Instead, fields that would exceed the limit are not added to the mapping, similar to <<dynamic, `dynamic: false`>>.
The fields that were not added to the mapping will be added to the <<mapping-ignored-field, `_ignored` field>>.
The default value is `false`.

`index.mapping.depth.limit`::
The maximum depth for a field, which is measured as the number of inner
objects. For instance, if all fields are defined at the root object level,
Expand Down
7 changes: 7 additions & 0 deletions docs/reference/mapping/params/dynamic.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -90,3 +90,10 @@ accepts the following parameters:
to the mapping, and new fields must be added explicitly.
`strict`:: If new fields are detected, an exception is thrown and the document
is rejected. New fields must be explicitly added to the mapping.

[[dynamic-field-limit]]
==== Behavior when reaching the field limit
Setting `dynamic` to either `true` or `runtime` will only add dynamic fields until <<mapping-settings-limit,`index.mapping.total_fields.limit`>> is reached.
By default, index requests for documents that would exceed the field limit will fail,
unless <<mapping-settings-limit,`index.mapping.total_fields.ignore_dynamic_beyond_limit`>> is set to `true`.
In that case, ignored fields are added to the <<mapping-ignored-field, `_ignored` metadata field>>.
Original file line number Diff line number Diff line change
Expand Up @@ -41,13 +41,16 @@ timing out in the browser's Developer Tools Network tab.
doesn't normally cause problems unless it's combined with overriding
<<mapping-settings-limit,`index.mapping.total_fields.limit`>>. The
default `1000` limit is considered generous, though overriding to `10000`
doesn't cause noticable impact depending on use case. However, to give
doesn't cause noticeable impact depending on use case. However, to give
a bad example, overriding to `100000` and this limit being hit
by mapping totals would usually have strong performance implications.

If your index mapped fields expect to contain a large, arbitrary set of
keys, you may instead consider:

* Setting <<mapping-settings-limit,`index.mapping.total_fields.ignore_dynamic_beyond_limit`>> to `true`.
Instead of rejecting documents that exceed the field limit, this will ignore dynamic fields once the limit is reached.

* Using the <<flattened,flattened>> data type. Please note,
however, that flattened objects is link:https://github.com/elastic/kibana/issues/25820[not fully supported in {kib}] yet. For example, this could apply to sub-mappings like { `host.name` ,
`host.os`, `host.version` }. Desired fields are still accessed by
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ public void addClusterSettingsListeners(ClusterService clusterService, APMTeleme

clusterSettings.addSettingsUpdateConsumer(TELEMETRY_TRACING_ENABLED_SETTING, enabled -> {
apmTracer.setEnabled(enabled);
this.setAgentSetting("instrument", Boolean.toString(enabled));
// The agent records data other than spans, e.g. JVM metrics, so we toggle this setting in order to
// minimise its impact to a running Elasticsearch.
boolean recording = enabled || clusterSettings.get(TELEMETRY_METRICS_ENABLED_SETTING);
Expand Down Expand Up @@ -73,7 +72,6 @@ public void initAgentSystemProperties(Settings settings) {
boolean metrics = TELEMETRY_METRICS_ENABLED_SETTING.get(settings);

this.setAgentSetting("recording", Boolean.toString(tracing || metrics));
this.setAgentSetting("instrument", Boolean.toString(tracing));
// Apply values from the settings in the cluster state
APM_AGENT_SETTINGS.getAsMap(settings).forEach(this::setAgentSetting);
}
Expand Down Expand Up @@ -120,7 +118,8 @@ public void setAgentSetting(String key, String value) {

// Core:
// forbid 'enabled', must remain enabled to dynamically enable tracing / metrics
// forbid 'recording' / 'instrument', controlled by 'telemetry.metrics.enabled' / 'telemetry.tracing.enabled'
// forbid 'recording', controlled by 'telemetry.metrics.enabled' / 'telemetry.tracing.enabled'
// forbid 'instrument', automatic instrumentation can cause issues
"service_name",
"service_node_name",
// forbid 'service_version', forced by APMJvmOptions
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,13 +60,11 @@ public void testEnableTracing() {
apmAgentSettings.initAgentSystemProperties(update);

verify(apmAgentSettings).setAgentSetting("recording", "true");
verify(apmAgentSettings).setAgentSetting("instrument", "true");
clearInvocations(apmAgentSettings);

Settings initial = Settings.builder().put(update).put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), false).build();
triggerUpdateConsumer(initial, update);
verify(apmAgentSettings).setAgentSetting("recording", "true");
verify(apmAgentSettings).setAgentSetting("instrument", "true");
verify(apmTelemetryProvider.getTracer()).setEnabled(true);
}
}
Expand All @@ -76,7 +74,6 @@ public void testEnableTracingUsingLegacySetting() {
apmAgentSettings.initAgentSystemProperties(settings);

verify(apmAgentSettings).setAgentSetting("recording", "true");
verify(apmAgentSettings).setAgentSetting("instrument", "true");
}

public void testEnableMetrics() {
Expand All @@ -90,7 +87,6 @@ public void testEnableMetrics() {
apmAgentSettings.initAgentSystemProperties(update);

verify(apmAgentSettings).setAgentSetting("recording", "true");
verify(apmAgentSettings).setAgentSetting("instrument", Boolean.toString(tracingEnabled));
clearInvocations(apmAgentSettings);

Settings initial = Settings.builder().put(update).put(TELEMETRY_METRICS_ENABLED_SETTING.getKey(), false).build();
Expand All @@ -114,13 +110,11 @@ public void testDisableTracing() {
apmAgentSettings.initAgentSystemProperties(update);

verify(apmAgentSettings).setAgentSetting("recording", Boolean.toString(metricsEnabled));
verify(apmAgentSettings).setAgentSetting("instrument", "false");
clearInvocations(apmAgentSettings);

Settings initial = Settings.builder().put(update).put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true).build();
triggerUpdateConsumer(initial, update);
verify(apmAgentSettings).setAgentSetting("recording", Boolean.toString(metricsEnabled));
verify(apmAgentSettings).setAgentSetting("instrument", "false");
verify(apmTelemetryProvider.getTracer()).setEnabled(false);
}
}
Expand All @@ -130,7 +124,6 @@ public void testDisableTracingUsingLegacySetting() {
apmAgentSettings.initAgentSystemProperties(settings);

verify(apmAgentSettings).setAgentSetting("recording", "false");
verify(apmAgentSettings).setAgentSetting("instrument", "false");
}

public void testDisableMetrics() {
Expand All @@ -144,7 +137,6 @@ public void testDisableMetrics() {
apmAgentSettings.initAgentSystemProperties(update);

verify(apmAgentSettings).setAgentSetting("recording", Boolean.toString(tracingEnabled));
verify(apmAgentSettings).setAgentSetting("instrument", Boolean.toString(tracingEnabled));
clearInvocations(apmAgentSettings);

Settings initial = Settings.builder().put(update).put(TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,10 +60,8 @@
---
"Testing require_data_stream in bulk requests":
- skip:
version: "all"
reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/104774"
#version: " - 8.12.99"
#reason: "require_data_stream was introduced in 8.13.0"
version: " - 8.12.99"
reason: "require_data_stream was introduced in 8.13.0"
features: allowed_warnings

- do:
Expand Down Expand Up @@ -109,7 +107,7 @@

- do:
allowed_warnings:
- "index template [other-template] has index patterns [ds-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [other-template] will take precedence during new index creation"
- "index template [other-template] has index patterns [other-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [other-template] will take precedence during new index creation"
indices.put_index_template:
name: other-template
body:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1404,6 +1404,7 @@ public void testParentChildQueriesViaScrollApi() throws Exception {

for (QueryBuilder query : queries) {
assertScrollResponsesAndHitCount(
client(),
TimeValue.timeValueSeconds(60),
prepareSearch("test").setScroll(TimeValue.timeValueSeconds(30)).setSize(1).addStoredField("_id").setQuery(query),
10,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ public void testDCGAt() {
SearchHit[] hits = new SearchHit[6];
for (int i = 0; i < 6; i++) {
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
hits[i] = new SearchHit(i, Integer.toString(i));
hits[i] = SearchHit.unpooled(i, Integer.toString(i));
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null));
}
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
Expand Down Expand Up @@ -111,7 +111,7 @@ public void testDCGAtSixMissingRatings() {
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
}
}
hits[i] = new SearchHit(i, Integer.toString(i));
hits[i] = SearchHit.unpooled(i, Integer.toString(i));
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null));
}
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
Expand Down Expand Up @@ -168,7 +168,7 @@ public void testDCGAtFourMoreRatings() {
// only create four hits
SearchHit[] hits = new SearchHit[4];
for (int i = 0; i < 4; i++) {
hits[i] = new SearchHit(i, Integer.toString(i));
hits[i] = SearchHit.unpooled(i, Integer.toString(i));
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null));
}
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ private SearchHit[] createSearchHits(List<RatedDocument> rated, Integer[] releva
if (relevanceRatings[i] != null) {
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
}
hits[i] = new SearchHit(i, Integer.toString(i));
hits[i] = SearchHit.unpooled(i, Integer.toString(i));
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null));
}
return hits;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ public void testXContentParsingIsNotLenient() throws IOException {
private static SearchHit[] createSearchHits(int from, int to, String index) {
SearchHit[] hits = new SearchHit[to + 1 - from];
for (int i = from; i <= to; i++) {
hits[i] = new SearchHit(i, i + "");
hits[i] = SearchHit.unpooled(i, i + "");
hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null));
}
return hits;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ public void testIgnoreUnlabeled() {
rated.add(createRatedDoc("test", "1", RELEVANT_RATING));
// add an unlabeled search hit
SearchHit[] searchHits = Arrays.copyOf(toSearchHits(rated, "test"), 3);
searchHits[2] = new SearchHit(2, "2");
searchHits[2] = SearchHit.unpooled(2, "2");
searchHits[2].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null));

EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", searchHits, rated);
Expand All @@ -120,7 +120,7 @@ public void testIgnoreUnlabeled() {
public void testNoRatedDocs() throws Exception {
SearchHit[] hits = new SearchHit[5];
for (int i = 0; i < 5; i++) {
hits[i] = new SearchHit(i, i + "");
hits[i] = SearchHit.unpooled(i, i + "");
hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null));
}
EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList());
Expand Down Expand Up @@ -248,7 +248,7 @@ private static PrecisionAtK mutate(PrecisionAtK original) {
private static SearchHit[] toSearchHits(List<RatedDocument> rated, String index) {
SearchHit[] hits = new SearchHit[rated.size()];
for (int i = 0; i < rated.size(); i++) {
hits[i] = new SearchHit(i, i + "");
hits[i] = SearchHit.unpooled(i, i + "");
hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null));
}
return hits;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ public void testToXContent() throws IOException {
}

private static RatedSearchHit searchHit(String index, int docId, Integer rating) {
SearchHit hit = new SearchHit(docId, docId + "");
SearchHit hit = SearchHit.unpooled(docId, docId + "");
hit.shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null));
hit.score(1.0f);
return new RatedSearchHit(hit, rating != null ? OptionalInt.of(rating) : OptionalInt.empty());
Expand Down
Loading

0 comments on commit 633e762

Please sign in to comment.