Skip to content

Commit

Permalink
Merge branch 'main' into dependabot/gradle/modules/ingest-geoip/com.m…
Browse files Browse the repository at this point in the history
…axmind.geoip2-geoip2-4.2.1

Signed-off-by: gaobinlong <gbinlong@amazon.com>
  • Loading branch information
gaobinlong authored Sep 26, 2024
2 parents aad8deb + ae22e3f commit f0ff3c9
Show file tree
Hide file tree
Showing 36 changed files with 1,442 additions and 269 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Implement WithFieldName interface in ValuesSourceAggregationBuilder & FieldSortBuilder ([#15916](https://github.com/opensearch-project/OpenSearch/pull/15916))
- Add successfulSearchShardIndices in searchRequestContext ([#15967](https://github.com/opensearch-project/OpenSearch/pull/15967))
- Remove identity-related feature flagged code from the RestController ([#15430](https://github.com/opensearch-project/OpenSearch/pull/15430))
- Add support for msearch API to pass search pipeline name - ([#15923](https://github.com/opensearch-project/OpenSearch/pull/15923))

### Dependencies
- Bump `com.azure:azure-identity` from 1.13.0 to 1.13.2 ([#15578](https://github.com/opensearch-project/OpenSearch/pull/15578))
Expand All @@ -24,8 +25,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Update protobuf from 3.25.4 to 3.25.5 ([#16011](https://github.com/opensearch-project/OpenSearch/pull/16011))
- Bump `actions/github-script` from 5 to 7 ([#16039](https://github.com/opensearch-project/OpenSearch/pull/16039))
- Bump `com.maxmind.geoip2:geoip2` from 4.2.0 to 4.2.1 ([#16042](https://github.com/opensearch-project/OpenSearch/pull/16042))
- Bump `dnsjava:dnsjava` from 3.6.1 to 3.6.2 ([#16041](https://github.com/opensearch-project/OpenSearch/pull/16041))

### Changed
- Add support for docker compose v2 in TestFixturesPlugin ([#16049](https://github.com/opensearch-project/OpenSearch/pull/16049))


### Deprecated
Expand All @@ -36,6 +39,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Fix wildcard query containing escaped character ([#15737](https://github.com/opensearch-project/OpenSearch/pull/15737))
- Fix case-insensitive query on wildcard field ([#15882](https://github.com/opensearch-project/OpenSearch/pull/15882))
- Add validation for the search backpressure cancellation settings ([#15501](https://github.com/opensearch-project/OpenSearch/pull/15501))
- Fix search_as_you_type not supporting multi-fields ([#15988](https://github.com/opensearch-project/OpenSearch/pull/15988))
- Avoid infinite loop when `flat_object` field contains invalid token ([#15985](https://github.com/opensearch-project/OpenSearch/pull/15985))
- Fix infinite loop in nested agg ([#15931](https://github.com/opensearch-project/OpenSearch/pull/15931))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ public DockerAvailability getDockerAvailability() {
Version version = null;
boolean isVersionHighEnough = false;
boolean isComposeAvailable = false;
boolean isComposeV2Available = false;

// Check if the Docker binary exists
final Optional<String> dockerBinary = getDockerPath();
Expand All @@ -129,6 +130,8 @@ public DockerAvailability getDockerAvailability() {
if (lastResult.isSuccess() && composePath.isPresent()) {
isComposeAvailable = runCommand(composePath.get(), "version").isSuccess();
}

isComposeV2Available = runCommand(dockerPath, "compose", "version").isSuccess();
}
}
}
Expand All @@ -138,6 +141,7 @@ public DockerAvailability getDockerAvailability() {
this.dockerAvailability = new DockerAvailability(
isAvailable,
isComposeAvailable,
isComposeV2Available,
isVersionHighEnough,
dockerPath,
version,
Expand Down Expand Up @@ -356,6 +360,11 @@ public static class DockerAvailability {
*/
public final boolean isComposeAvailable;

/**
* True if docker compose is available.
*/
public final boolean isComposeV2Available;

/**
* True if the installed Docker version is &gt;= 17.05
*/
Expand All @@ -379,13 +388,15 @@ public static class DockerAvailability {
DockerAvailability(
boolean isAvailable,
boolean isComposeAvailable,
boolean isComposeV2Available,
boolean isVersionHighEnough,
String path,
Version version,
Result lastCommand
) {
this.isAvailable = isAvailable;
this.isComposeAvailable = isComposeAvailable;
this.isComposeV2Available = isComposeV2Available;
this.isVersionHighEnough = isVersionHighEnough;
this.path = path;
this.version = version;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,11 @@ public void execute(Task task) {
.findFirst();

composeExtension.getExecutable().set(dockerCompose.isPresent() ? dockerCompose.get() : "/usr/bin/docker");
composeExtension.getUseDockerComposeV2().set(false);
if (dockerSupport.get().getDockerAvailability().isComposeV2Available) {
composeExtension.getUseDockerComposeV2().set(true);
} else if (dockerSupport.get().getDockerAvailability().isComposeAvailable) {
composeExtension.getUseDockerComposeV2().set(false);
}

tasks.named("composeUp").configure(t -> {
// Avoid running docker-compose tasks in parallel in CI due to some issues on certain Linux distributions
Expand Down Expand Up @@ -228,7 +232,8 @@ private void maybeSkipTask(Provider<DockerSupportService> dockerSupport, TaskPro

private void maybeSkipTask(Provider<DockerSupportService> dockerSupport, Task task) {
task.onlyIf(spec -> {
boolean isComposeAvailable = dockerSupport.get().getDockerAvailability().isComposeAvailable;
boolean isComposeAvailable = dockerSupport.get().getDockerAvailability().isComposeV2Available
|| dockerSupport.get().getDockerAvailability().isComposeAvailable;
if (isComposeAvailable == false) {
LOGGER.info("Task {} requires docker-compose but it is unavailable. Task will be skipped.", task.getPath());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,15 @@ public SearchAsYouTypeFieldMapper build(Mapper.BuilderContext context) {
}
ft.setPrefixField(prefixFieldType);
ft.setShingleFields(shingleFieldTypes);
return new SearchAsYouTypeFieldMapper(name, ft, copyTo.build(), prefixFieldMapper, shingleFieldMappers, this);
return new SearchAsYouTypeFieldMapper(
name,
ft,
multiFieldsBuilder.build(this, context),
copyTo.build(),
prefixFieldMapper,
shingleFieldMappers,
this
);
}
}

Expand Down Expand Up @@ -623,12 +631,13 @@ public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRew
public SearchAsYouTypeFieldMapper(
String simpleName,
SearchAsYouTypeFieldType mappedFieldType,
MultiFields multiFields,
CopyTo copyTo,
PrefixFieldMapper prefixField,
ShingleFieldMapper[] shingleFields,
Builder builder
) {
super(simpleName, mappedFieldType, MultiFields.empty(), copyTo);
super(simpleName, mappedFieldType, multiFields, copyTo);
this.prefixField = prefixField;
this.shingleFields = shingleFields;
this.maxShingleSize = builder.maxShingleSize.getValue();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -298,6 +298,20 @@ private void assertMultiField(int shingleSize) throws IOException {
}
}

public void testSubField() throws IOException {
MapperService mapperService = createMapperService(
fieldMapping(
b -> b.field("type", "search_as_you_type")
.startObject("fields")
.startObject("subField")
.field("type", "keyword")
.endObject()
.endObject()
)
);
assertThat(mapperService.fieldType("field.subField"), instanceOf(KeywordFieldMapper.KeywordFieldType.class));
}

public void testIndexOptions() throws IOException {
DocumentMapper mapper = createDocumentMapper(
fieldMapping(b -> b.field("type", "search_as_you_type").field("index_options", "offsets"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ teardown:
---
"Invalid docs":
- skip:
version: "- 2.99.99"
version: "- 2.17.99"
reason: "parsing of these objects would infinite loop prior to 2.18"
# The following documents are invalid.
- do:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,19 @@ setup:

---
"Date histogram aggregation w/ shared field range test":
- do:
indices.create:
index: dhisto-agg-w-query
body:
settings:
number_of_shards: 1
number_of_replicas: 0
refresh_interval: -1
mappings:
properties:
date:
type: date

- do:
bulk:
refresh: true
Expand All @@ -127,6 +140,11 @@ setup:
- '{"index": {}}'
- '{"date": "2025-02-14"}'

- do:
indices.forcemerge:
index: dhisto-agg-w-query
max_num_segments: 1

- do:
search:
index: dhisto-agg-w-query
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
setup:
- do:
indices.create:
index: test_1
body:
mappings:
properties:
text:
type: search_as_you_type
fields:
subField:
type: keyword
- do:
index:
index: test_1
id: 1
body: { text: test search as you type }

- do:
indices.refresh:
index: [test_1]

---
teardown:
- do:
indices.delete:
index: test_1

# related issue: https://github.com/opensearch-project/OpenSearch/issues/5035
---
"Test search_as_you_type data type supports multi-fields":
- skip:
version: " - 2.17.99"
reason: "the bug was fixed since 2.18.0"

- do:
indices.get_mapping: {
index: test_1
}

- match: {test_1.mappings.properties.text.type: search_as_you_type}
- match: {test_1.mappings.properties.text.fields.subField.type: keyword}

- do:
search:
index: test_1
body:
query:
multi_match:
query: "test search"
type: "bool_prefix"

- match: {hits.total.value: 1}

- do:
search:
index: test_1
body:
query:
multi_match:
query: "test search"
type: "bool_prefix"
fields: ["text.subField"]

- match: {hits.total.value: 1}

- do:
search:
index: test_1
body:
query:
term:
text.subField: "test search as you type"

- match: {hits.total.value: 1}
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,6 @@ public void testRemotePublicationDownloadStats() {
assertDataNodeDownloadStats(nodesStatsResponseDataNode);
}

@AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/15767")
public void testRemotePublicationDisabledByRollingRestart() throws Exception {
prepareCluster(3, 2, INDEX_NAME, 1, 2);
ensureStableCluster(5);
Expand Down Expand Up @@ -272,7 +271,6 @@ public void doAfterNodes(int n, Client client) {
assertTrue(
stats.getFullClusterStateReceivedCount() > 0 || stats.getCompatibleClusterStateDiffReceivedCount() > 0
);
assertEquals(0, stats.getIncompatibleClusterStateDiffReceivedCount());
} else {
DiscoveryStats stats = nodeStats.getDiscoveryStats();
assertEquals(0, stats.getPublishStats().getFullClusterStateReceivedCount());
Expand All @@ -297,7 +295,7 @@ public void doAfterNodes(int n, Client client) {
);
if (activeCMRestarted) {
assertNull(remoteState.getLastAcceptedState());
// assertNull(remoteState.getLastAcceptedManifest());
assertNull(remoteState.getLastAcceptedManifest());
} else {
ClusterState localState = registry.getPersistedState(PersistedStateRegistry.PersistedStateType.LOCAL)
.getLastAcceptedState();
Expand Down Expand Up @@ -326,7 +324,6 @@ public void doAfterNodes(int n, Client client) {
response.getNodes().forEach(nodeStats -> {
PublishClusterStateStats stats = nodeStats.getDiscoveryStats().getPublishStats();
assertTrue(stats.getFullClusterStateReceivedCount() > 0 || stats.getCompatibleClusterStateDiffReceivedCount() > 0);
assertEquals(0, stats.getIncompatibleClusterStateDiffReceivedCount());
});
NodesInfoResponse nodesInfoResponse = client().admin()
.cluster()
Expand All @@ -341,7 +338,7 @@ public void doAfterNodes(int n, Client client) {
PersistedStateRegistry registry = internalCluster().getInstance(PersistedStateRegistry.class, node);
CoordinationState.PersistedState remoteState = registry.getPersistedState(PersistedStateRegistry.PersistedStateType.REMOTE);
assertNull(remoteState.getLastAcceptedState());
// assertNull(remoteState.getLastAcceptedManifest());
assertNull(remoteState.getLastAcceptedManifest());
});
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import org.opensearch.common.settings.Settings;
import org.opensearch.common.settings.SettingsException;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.repositories.blobstore.BlobStoreRepository;
import org.opensearch.test.InternalTestCluster;
import org.opensearch.test.OpenSearchIntegTestCase;

Expand Down Expand Up @@ -68,15 +69,17 @@ public void testNewIndexIsRemoteStoreBackedForRemoteStoreDirectionAndMixedMode()
assertRemoteStoreBackedIndex(indexName2);
}

@AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/15793")
public void testNewRestoredIndexIsRemoteStoreBackedForRemoteStoreDirectionAndMixedMode() throws Exception {
logger.info("Initialize cluster: gives non remote cluster manager");
initializeCluster(false);

logger.info("Add remote and non-remote nodes");
setClusterMode(MIXED.mode);
addRemote = false;
String nonRemoteNodeName = internalCluster().startNode();
Settings settings = Settings.builder()
.put(BlobStoreRepository.SNAPSHOT_SHARD_PATH_PREFIX_SETTING.getKey(), snapshotShardPathFixedPrefix ? "c" : "")
.build();
String nonRemoteNodeName = internalCluster().startNode(settings);
addRemote = true;
String remoteNodeName = internalCluster().startNode();
internalCluster().validateClusterFormed();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
import org.opensearch.test.OpenSearchIntegTestCase;
import org.junit.Before;

import java.io.IOError;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
Expand Down Expand Up @@ -339,10 +340,11 @@ public void testFullClusterRestoreManifestFilePointsToInvalidIndexMetadataPathTh
for (UploadedIndexMetadata md : manifest.getIndices()) {
Files.move(segmentRepoPath.resolve(md.getUploadedFilename()), segmentRepoPath.resolve("cluster-state/"));
}
internalCluster().stopAllNodes();
} catch (IOException e) {
throw new RuntimeException(e);
}
assertThrows(IllegalStateException.class, () -> addNewNodes(dataNodeCount, clusterManagerNodeCount));
assertThrows(IOError.class, () -> internalCluster().client());
// Test is complete

// Starting a node without remote state to ensure test cleanup
Expand Down
Loading

0 comments on commit f0ff3c9

Please sign in to comment.