diff --git a/.github/workflows/sql-test-and-build-workflow.yml b/.github/workflows/sql-test-and-build-workflow.yml index 90ada395e2..87256e6175 100644 --- a/.github/workflows/sql-test-and-build-workflow.yml +++ b/.github/workflows/sql-test-and-build-workflow.yml @@ -25,11 +25,11 @@ jobs: matrix: entry: - { os: ubuntu-latest, java: 11 } - - { os: windows-latest, java: 11, os_build_args: -x doctest -x integTest -x jacocoTestReport -PbuildPlatform=windows } - - { os: macos-latest, java: 11, os_build_args: -x doctest -x integTest -x jacocoTestReport } + - { os: windows-latest, java: 11, os_build_args: -x doctest -PbuildPlatform=windows } + - { os: macos-latest, java: 11} - { os: ubuntu-latest, java: 17 } - - { os: windows-latest, java: 17, os_build_args: -x doctest -x integTest -x jacocoTestReport -PbuildPlatform=windows } - - { os: macos-latest, java: 17, os_build_args: -x doctest -x integTest -x jacocoTestReport } + - { os: windows-latest, java: 17, os_build_args: -x doctest -PbuildPlatform=windows } + - { os: macos-latest, java: 17 } runs-on: ${{ matrix.entry.os }} steps: diff --git a/.gitignore b/.gitignore index 67e5bb07e9..46d984ce4f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ *.class +*.http .settings/ # Mobile Tools for Java (J2ME) .mtj.tmp/ @@ -33,7 +34,6 @@ gen/ # git mergetool artifact *.orig gen -*.tokens # Python */.venv @@ -46,3 +46,4 @@ gen /.prom.pid.lock .java-version +.worktrees \ No newline at end of file diff --git a/build-tools/sqlplugin-coverage.gradle b/build-tools/sqlplugin-coverage.gradle index 6f99ceac1f..1665637690 100644 --- a/build-tools/sqlplugin-coverage.gradle +++ b/build-tools/sqlplugin-coverage.gradle @@ -16,6 +16,7 @@ * cluster is stopped and dump it to a file. Luckily our current security policy seems to allow this. This will also probably * break if there are multiple nodes in the integTestCluster. But for now... it sorta works. */ +import org.apache.tools.ant.taskdefs.condition.Os apply plugin: 'jacoco' // Get gradle to generate the required jvm agent arg for us using a dummy tasks of type Test. Unfortunately Elastic's @@ -45,7 +46,12 @@ integTest.runner { } testClusters.integTest { - jvmArgs " ${dummyIntegTest.jacoco.getAsJvmArg()}" + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + // Replacing build with absolute path to fix the error "error opening zip file or JAR manifest missing : /build/tmp/expandedArchives/..../jacocoagent.jar" + jvmArgs " ${dummyIntegTest.jacoco.getAsJvmArg()}".replace('build',"${buildDir}") + } else { + jvmArgs " ${dummyIntegTest.jacoco.getAsJvmArg()}".replace('javaagent:','javaagent:/') + } systemProperty 'com.sun.management.jmxremote', "true" systemProperty 'com.sun.management.jmxremote.authenticate', "false" systemProperty 'com.sun.management.jmxremote.port', "7777" diff --git a/buildSrc/src/main/groovy/com/wiredforcode/spawn/KillProcessTask.groovy b/buildSrc/src/main/groovy/com/wiredforcode/spawn/KillProcessTask.groovy index 15c60b1aec..55bb76f1a1 100644 --- a/buildSrc/src/main/groovy/com/wiredforcode/spawn/KillProcessTask.groovy +++ b/buildSrc/src/main/groovy/com/wiredforcode/spawn/KillProcessTask.groovy @@ -1,5 +1,6 @@ package com.wiredforcode.gradle.spawn +import org.apache.tools.ant.taskdefs.condition.Os import org.gradle.api.tasks.TaskAction class KillProcessTask extends DefaultSpawnTask { @@ -12,7 +13,13 @@ class KillProcessTask extends DefaultSpawnTask { } def pid = pidFile.text - def process = "kill $pid".execute() + def killCommandLine + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + killCommandLine = Arrays.asList("taskkill", "/F", "/T", "/PID", "$pid") + } else { + killCommandLine = Arrays.asList("kill", "$pid") + } + def process = killCommandLine.execute() try { process.waitFor() diff --git a/buildSrc/src/main/groovy/com/wiredforcode/spawn/SpawnProcessTask.groovy b/buildSrc/src/main/groovy/com/wiredforcode/spawn/SpawnProcessTask.groovy index 1b3d0c4f00..a79bd13f60 100644 --- a/buildSrc/src/main/groovy/com/wiredforcode/spawn/SpawnProcessTask.groovy +++ b/buildSrc/src/main/groovy/com/wiredforcode/spawn/SpawnProcessTask.groovy @@ -95,9 +95,18 @@ class SpawnProcessTask extends DefaultSpawnTask { } private int extractPidFromProcess(Process process) { - def pidField = process.class.getDeclaredField('pid') - pidField.accessible = true - - return pidField.getInt(process) + def pid + try { + // works since java 9 + def pidMethod = process.class.getDeclaredMethod('pid') + pidMethod.setAccessible(true) + pid = pidMethod.invoke(process) + } catch (ignored) { + // fallback to UNIX-only implementation + def pidField = process.class.getDeclaredField('pid') + pidField.accessible = true + pid = pidField.getInt(process) + } + return pid } } diff --git a/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java b/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java index e165fe1945..533977197f 100644 --- a/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java +++ b/core/src/main/java/org/opensearch/sql/analysis/Analyzer.java @@ -64,7 +64,7 @@ import org.opensearch.sql.data.model.ExprMissingValue; import org.opensearch.sql.data.type.ExprCoreType; import org.opensearch.sql.datasource.DataSourceService; -import org.opensearch.sql.datasource.model.DataSource; +import org.opensearch.sql.datasource.model.DataSourceMetadata; import org.opensearch.sql.exception.SemanticCheckException; import org.opensearch.sql.expression.DSL; import org.opensearch.sql.expression.Expression; @@ -134,9 +134,9 @@ public LogicalPlan analyze(UnresolvedPlan unresolved, AnalysisContext context) { @Override public LogicalPlan visitRelation(Relation node, AnalysisContext context) { QualifiedName qualifiedName = node.getTableQualifiedName(); - Set allowedDataSourceNames = dataSourceService.getDataSources() + Set allowedDataSourceNames = dataSourceService.getDataSourceMetadataSet() .stream() - .map(DataSource::getName) + .map(DataSourceMetadata::getName) .collect(Collectors.toSet()); DataSourceSchemaIdentifierNameResolver dataSourceSchemaIdentifierNameResolver = new DataSourceSchemaIdentifierNameResolver(qualifiedName.getParts(), @@ -182,9 +182,9 @@ public LogicalPlan visitRelationSubquery(RelationSubquery node, AnalysisContext @Override public LogicalPlan visitTableFunction(TableFunction node, AnalysisContext context) { QualifiedName qualifiedName = node.getFunctionName(); - Set allowedDataSourceNames = dataSourceService.getDataSources() + Set allowedDataSourceNames = dataSourceService.getDataSourceMetadataSet() .stream() - .map(DataSource::getName) + .map(DataSourceMetadata::getName) .collect(Collectors.toSet()); DataSourceSchemaIdentifierNameResolver dataSourceSchemaIdentifierNameResolver = new DataSourceSchemaIdentifierNameResolver(qualifiedName.getParts(), diff --git a/core/src/main/java/org/opensearch/sql/datasource/DataSourceMetadataStorage.java b/core/src/main/java/org/opensearch/sql/datasource/DataSourceMetadataStorage.java new file mode 100644 index 0000000000..85ffd0a1b3 --- /dev/null +++ b/core/src/main/java/org/opensearch/sql/datasource/DataSourceMetadataStorage.java @@ -0,0 +1,64 @@ +/* + * + * * Copyright OpenSearch Contributors + * * SPDX-License-Identifier: Apache-2.0 + * + */ + +package org.opensearch.sql.datasource; + +import java.util.List; +import java.util.Optional; +import javax.xml.crypto.Data; +import org.opensearch.sql.datasource.model.DataSource; +import org.opensearch.sql.datasource.model.DataSourceMetadata; + +/** + * Interface for DataSourceMetadata Storage + * which will be only used by DataSourceService for Storage. + */ +public interface DataSourceMetadataStorage { + + /** + * Returns all dataSource Metadata objects. The returned objects won't contain + * any of the credential info. + * + * @return list of {@link DataSourceMetadata}. + */ + List getDataSourceMetadata(); + + + /** + * Gets {@link DataSourceMetadata} corresponding to the + * datasourceName from underlying storage. + * + * @param datasourceName name of the {@link DataSource}. + */ + Optional getDataSourceMetadata(String datasourceName); + + + /** + * Stores {@link DataSourceMetadata} in underlying storage. + * + * @param dataSourceMetadata {@link DataSourceMetadata}. + */ + void createDataSourceMetadata(DataSourceMetadata dataSourceMetadata); + + + /** + * Updates {@link DataSourceMetadata} in underlying storage. + * + * @param dataSourceMetadata {@link DataSourceMetadata}. + */ + void updateDataSourceMetadata(DataSourceMetadata dataSourceMetadata); + + + /** + * Deletes {@link DataSourceMetadata} corresponding to the + * datasourceName from underlying storage. + * + * @param datasourceName name of the {@link DataSource}. + */ + void deleteDataSourceMetadata(String datasourceName); + +} \ No newline at end of file diff --git a/core/src/main/java/org/opensearch/sql/datasource/DataSourceService.java b/core/src/main/java/org/opensearch/sql/datasource/DataSourceService.java index 37e6f8e085..e45b9cd9c8 100644 --- a/core/src/main/java/org/opensearch/sql/datasource/DataSourceService.java +++ b/core/src/main/java/org/opensearch/sql/datasource/DataSourceService.java @@ -15,26 +15,49 @@ public interface DataSourceService { /** - * Returns all DataSource objects. + * Returns {@link DataSource} corresponding to the DataSource name. * - * @return set of {@link DataSource}. + * @param dataSourceName Name of the {@link DataSource}. + * @return {@link DataSource}. */ - Set getDataSources(); + DataSource getDataSource(String dataSourceName); + /** - * Returns {@link DataSource} with corresponding to the DataSource name. + * Returns all dataSource Metadata objects. The returned objects won't contain + * any of the credential info. * - * @param dataSourceName Name of the {@link DataSource}. - * @return {@link DataSource}. + * @return set of {@link DataSourceMetadata}. */ - DataSource getDataSource(String dataSourceName); + Set getDataSourceMetadataSet(); /** * Register {@link DataSource} defined by {@link DataSourceMetadata}. * * @param metadatas list of {@link DataSourceMetadata}. */ - void addDataSource(DataSourceMetadata... metadatas); + void createDataSource(DataSourceMetadata... metadatas); + + /** + * Updates {@link DataSource} corresponding to dataSourceMetadata. + * + * @param dataSourceMetadata {@link DataSourceMetadata}. + */ + void updateDataSource(DataSourceMetadata dataSourceMetadata); + + + /** + * Deletes {@link DataSource} corresponding to the DataSource name. + * + * @param dataSourceName name of the {@link DataSource}. + */ + void deleteDataSource(String dataSourceName); + + /** + * This method is to bootstrap + * datasources during the startup of the plugin. + */ + void bootstrapDataSources(); /** * remove all the registered {@link DataSource}. diff --git a/core/src/main/java/org/opensearch/sql/datasource/DataSourceServiceImpl.java b/core/src/main/java/org/opensearch/sql/datasource/DataSourceServiceImpl.java index 274024e548..915e5aa287 100644 --- a/core/src/main/java/org/opensearch/sql/datasource/DataSourceServiceImpl.java +++ b/core/src/main/java/org/opensearch/sql/datasource/DataSourceServiceImpl.java @@ -7,6 +7,8 @@ import com.google.common.base.Preconditions; import com.google.common.base.Strings; +import com.google.common.collect.ImmutableMap; +import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -45,8 +47,12 @@ public DataSourceServiceImpl(Set dataSourceFactories) { } @Override - public Set getDataSources() { - return Set.copyOf(dataSourceMap.values()); + public Set getDataSourceMetadataSet() { + return dataSourceMap.values().stream() + .map(dataSource + -> new DataSourceMetadata(dataSource.getName(), + dataSource.getConnectorType(), ImmutableMap.of())) + .collect(Collectors.toSet()); } @Override @@ -59,7 +65,7 @@ public DataSource getDataSource(String dataSourceName) { } @Override - public void addDataSource(DataSourceMetadata... metadatas) { + public void createDataSource(DataSourceMetadata... metadatas) { for (DataSourceMetadata metadata : metadatas) { validateDataSourceMetaData(metadata); dataSourceMap.put( @@ -68,6 +74,21 @@ public void addDataSource(DataSourceMetadata... metadatas) { } } + @Override + public void updateDataSource(DataSourceMetadata dataSourceMetadata) { + throw new UnsupportedOperationException("will be supported in future"); + } + + @Override + public void deleteDataSource(String dataSourceName) { + throw new UnsupportedOperationException("will be supported in future"); + } + + @Override + public void bootstrapDataSources() { + throw new UnsupportedOperationException("will be supported in future"); + } + @Override public void clear() { dataSourceMap.clear(); diff --git a/core/src/main/java/org/opensearch/sql/datasource/model/DataSourceMetadata.java b/core/src/main/java/org/opensearch/sql/datasource/model/DataSourceMetadata.java index f97d272bb9..854e489acd 100644 --- a/core/src/main/java/org/opensearch/sql/datasource/model/DataSourceMetadata.java +++ b/core/src/main/java/org/opensearch/sql/datasource/model/DataSourceMetadata.java @@ -13,14 +13,19 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableMap; import java.util.Map; +import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.RequiredArgsConstructor; import lombok.Setter; import org.opensearch.sql.datasource.DataSourceService; @JsonIgnoreProperties(ignoreUnknown = true) @Getter @Setter +@AllArgsConstructor +@NoArgsConstructor @EqualsAndHashCode public class DataSourceMetadata { @@ -39,10 +44,7 @@ public class DataSourceMetadata { * {@link DataSource} to {@link DataSourceService}. */ public static DataSourceMetadata defaultOpenSearchDataSourceMetadata() { - DataSourceMetadata dataSourceMetadata = new DataSourceMetadata(); - dataSourceMetadata.setName(DEFAULT_DATASOURCE_NAME); - dataSourceMetadata.setConnector(DataSourceType.OPENSEARCH); - dataSourceMetadata.setProperties(ImmutableMap.of()); - return dataSourceMetadata; + return new DataSourceMetadata(DEFAULT_DATASOURCE_NAME, + DataSourceType.OPENSEARCH, ImmutableMap.of()); } } diff --git a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScan.java b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScan.java index 14cd09e162..60969f4d54 100644 --- a/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScan.java +++ b/core/src/main/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScan.java @@ -18,7 +18,7 @@ import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.data.model.ExprValueUtils; import org.opensearch.sql.datasource.DataSourceService; -import org.opensearch.sql.datasource.model.DataSource; +import org.opensearch.sql.datasource.model.DataSourceMetadata; import org.opensearch.sql.storage.TableScanOperator; /** @@ -47,14 +47,15 @@ public String explain() { @Override public void open() { List exprValues = new ArrayList<>(); - Set dataSources = dataSourceService.getDataSources(); - for (DataSource dataSource : dataSources) { + Set dataSourceMetadataSet + = dataSourceService.getDataSourceMetadataSet(); + for (DataSourceMetadata dataSourceMetadata : dataSourceMetadataSet) { exprValues.add( new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( "DATASOURCE_NAME", - ExprValueUtils.stringValue(dataSource.getName()), + ExprValueUtils.stringValue(dataSourceMetadata.getName()), "CONNECTOR_TYPE", - ExprValueUtils.stringValue(dataSource.getConnectorType().name()))))); + ExprValueUtils.stringValue(dataSourceMetadata.getConnector().name()))))); } iterator = exprValues.iterator(); } diff --git a/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTestBase.java b/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTestBase.java index f88f95da7e..1203232d33 100644 --- a/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTestBase.java +++ b/core/src/test/java/org/opensearch/sql/analysis/AnalyzerTestBase.java @@ -11,13 +11,17 @@ import static org.opensearch.sql.data.type.ExprCoreType.LONG; import static org.opensearch.sql.data.type.ExprCoreType.STRING; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; import org.apache.commons.lang3.tuple.Pair; import org.opensearch.sql.DataSourceSchemaName; import org.opensearch.sql.analysis.symbol.Namespace; @@ -183,8 +187,10 @@ private class DefaultDataSourceService implements DataSourceService { @Override - public Set getDataSources() { - return ImmutableSet.of(opensearchDataSource, prometheusDataSource); + public Set getDataSourceMetadataSet() { + return Stream.of(opensearchDataSource, prometheusDataSource) + .map(ds -> new DataSourceMetadata(ds.getName(), + ds.getConnectorType(), ImmutableMap.of())).collect(Collectors.toSet()); } @Override @@ -197,10 +203,25 @@ public DataSource getDataSource(String dataSourceName) { } @Override - public void addDataSource(DataSourceMetadata... metadatas) { + public void createDataSource(DataSourceMetadata... metadatas) { throw new UnsupportedOperationException(); } + @Override + public void updateDataSource(DataSourceMetadata dataSourceMetadata) { + + } + + @Override + public void deleteDataSource(String dataSourceName) { + + } + + @Override + public void bootstrapDataSources() { + + } + @Override public void clear() { throw new UnsupportedOperationException(); diff --git a/core/src/test/java/org/opensearch/sql/datasource/DataSourceServiceImplTest.java b/core/src/test/java/org/opensearch/sql/datasource/DataSourceServiceImplTest.java index 2b40b32ee6..b623313c96 100644 --- a/core/src/test/java/org/opensearch/sql/datasource/DataSourceServiceImplTest.java +++ b/core/src/test/java/org/opensearch/sql/datasource/DataSourceServiceImplTest.java @@ -6,6 +6,7 @@ package org.opensearch.sql.datasource; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.lenient; @@ -65,7 +66,7 @@ public void clear() { @Test void getDataSourceSuccess() { - dataSourceService.addDataSource(DataSourceMetadata.defaultOpenSearchDataSourceMetadata()); + dataSourceService.createDataSource(DataSourceMetadata.defaultOpenSearchDataSourceMetadata()); assertEquals( new DataSource(DEFAULT_DATASOURCE_NAME, DataSourceType.OPENSEARCH, storageEngine), @@ -81,19 +82,21 @@ void getNotExistDataSourceShouldFail() { @Test void getAddDataSourcesShouldSuccess() { - assertEquals(0, dataSourceService.getDataSources().size()); + assertEquals(0, dataSourceService.getDataSourceMetadataSet().size()); - dataSourceService.addDataSource(metadata(NAME, DataSourceType.OPENSEARCH, ImmutableMap.of())); - assertEquals(1, dataSourceService.getDataSources().size()); + dataSourceService.createDataSource(metadata(NAME, + DataSourceType.OPENSEARCH, ImmutableMap.of())); + assertEquals(1, dataSourceService.getDataSourceMetadataSet().size()); } @Test void noDataSourceExistAfterClear() { - dataSourceService.addDataSource(metadata(NAME, DataSourceType.OPENSEARCH, ImmutableMap.of())); - assertEquals(1, dataSourceService.getDataSources().size()); + dataSourceService.createDataSource(metadata(NAME, + DataSourceType.OPENSEARCH, ImmutableMap.of())); + assertEquals(1, dataSourceService.getDataSourceMetadataSet().size()); dataSourceService.clear(); - assertEquals(0, dataSourceService.getDataSources().size()); + assertEquals(0, dataSourceService.getDataSourceMetadataSet().size()); } @Test @@ -102,7 +105,7 @@ void metaDataMissingNameShouldFail() { assertThrows( IllegalArgumentException.class, () -> - dataSourceService.addDataSource( + dataSourceService.createDataSource( metadata(null, DataSourceType.OPENSEARCH, ImmutableMap.of()))); assertEquals( "Missing Name Field from a DataSource. Name is a required parameter.", @@ -115,7 +118,7 @@ void metaDataHasIllegalDataSourceNameShouldFail() { assertThrows( IllegalArgumentException.class, () -> - dataSourceService.addDataSource( + dataSourceService.createDataSource( metadata("prometheus.test", DataSourceType.OPENSEARCH, ImmutableMap.of()))); assertEquals( "DataSource Name: prometheus.test contains illegal characters. " @@ -128,7 +131,8 @@ void metaDataMissingPropertiesShouldFail() { IllegalArgumentException exception = assertThrows( IllegalArgumentException.class, - () -> dataSourceService.addDataSource(metadata(NAME, DataSourceType.OPENSEARCH, null))); + () -> dataSourceService.createDataSource(metadata(NAME, + DataSourceType.OPENSEARCH, null))); assertEquals( "Missing properties field in catalog configuration. Properties are required parameters.", exception.getMessage()); @@ -136,18 +140,41 @@ void metaDataMissingPropertiesShouldFail() { @Test void metaDataHasDuplicateNameShouldFail() { - dataSourceService.addDataSource(metadata(NAME, DataSourceType.OPENSEARCH, ImmutableMap.of())); - assertEquals(1, dataSourceService.getDataSources().size()); + dataSourceService.createDataSource(metadata(NAME, + DataSourceType.OPENSEARCH, ImmutableMap.of())); + assertEquals(1, dataSourceService.getDataSourceMetadataSet().size()); IllegalArgumentException exception = assertThrows( IllegalArgumentException.class, - () -> dataSourceService.addDataSource(metadata(NAME, DataSourceType.OPENSEARCH, null))); + () -> dataSourceService.createDataSource(metadata(NAME, + DataSourceType.OPENSEARCH, null))); assertEquals( String.format("Datasource name should be unique, Duplicate datasource found %s.", NAME), exception.getMessage()); } + @Test + void testUpdateDatasource() { + assertThrows( + UnsupportedOperationException.class, + () -> dataSourceService.updateDataSource(new DataSourceMetadata())); + } + + @Test + void testDeleteDatasource() { + assertThrows( + UnsupportedOperationException.class, + () -> dataSourceService.deleteDataSource(NAME)); + } + + @Test + void testLoadDatasource() { + assertThrows( + UnsupportedOperationException.class, + () -> dataSourceService.bootstrapDataSources()); + } + DataSourceMetadata metadata(String name, DataSourceType type, Map properties) { DataSourceMetadata dataSourceMetadata = new DataSourceMetadata(); dataSourceMetadata.setName(name); diff --git a/core/src/test/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScanTest.java b/core/src/test/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScanTest.java index 2f7188a248..1c45807245 100644 --- a/core/src/test/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScanTest.java +++ b/core/src/test/java/org/opensearch/sql/planner/physical/datasource/DataSourceTableScanTest.java @@ -16,15 +16,18 @@ import java.util.HashSet; import java.util.LinkedHashMap; import java.util.Set; +import java.util.stream.Collectors; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; import org.opensearch.sql.data.model.ExprTupleValue; +import org.opensearch.sql.data.model.ExprValue; import org.opensearch.sql.data.model.ExprValueUtils; import org.opensearch.sql.datasource.DataSourceService; import org.opensearch.sql.datasource.model.DataSource; +import org.opensearch.sql.datasource.model.DataSourceMetadata; import org.opensearch.sql.datasource.model.DataSourceType; import org.opensearch.sql.storage.StorageEngine; @@ -54,17 +57,26 @@ void testIterator() { Set dataSourceSet = new HashSet<>(); dataSourceSet.add(new DataSource("prometheus", DataSourceType.PROMETHEUS, storageEngine)); dataSourceSet.add(new DataSource("opensearch", DataSourceType.OPENSEARCH, storageEngine)); - when(dataSourceService.getDataSources()).thenReturn(dataSourceSet); + Set dataSourceMetadata = dataSourceSet.stream() + .map(dataSource -> new DataSourceMetadata(dataSource.getName(), + dataSource.getConnectorType(), ImmutableMap.of())).collect(Collectors.toSet()); + when(dataSourceService.getDataSourceMetadataSet()).thenReturn(dataSourceMetadata); assertFalse(dataSourceTableScan.hasNext()); dataSourceTableScan.open(); assertTrue(dataSourceTableScan.hasNext()); + Set exprTupleValues = new HashSet<>(); + while (dataSourceTableScan.hasNext()) { + exprTupleValues.add(dataSourceTableScan.next()); + } + + Set expectedExprTupleValues = new HashSet<>(); for (DataSource dataSource : dataSourceSet) { - assertEquals(new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( - "DATASOURCE_NAME", ExprValueUtils.stringValue(dataSource.getName()), - "CONNECTOR_TYPE", ExprValueUtils.stringValue(dataSource.getConnectorType().name())))), - dataSourceTableScan.next()); + expectedExprTupleValues.add(new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of( + "DATASOURCE_NAME", ExprValueUtils.stringValue(dataSource.getName()), + "CONNECTOR_TYPE", ExprValueUtils.stringValue(dataSource.getConnectorType().name()))))); } + assertEquals(expectedExprTupleValues, exprTupleValues); } } diff --git a/doctest/build.gradle b/doctest/build.gradle index ec403f32a3..7b52a1cfa4 100644 --- a/doctest/build.gradle +++ b/doctest/build.gradle @@ -56,7 +56,12 @@ task startPrometheus(type: SpawnProcessTask) { //evaluationDependsOn(':') task startOpenSearch(type: SpawnProcessTask) { - command "${path}/gradlew -p ${plugin_path} runRestTestCluster" + if( getOSFamilyType() == "windows") { + command "${path}\\gradlew.bat -p ${plugin_path} runRestTestCluster" + } + else { + command "${path}/gradlew -p ${plugin_path} runRestTestCluster" + } ready 'started' } @@ -94,12 +99,13 @@ task stopPrometheus() { } } } - -stopPrometheus.mustRunAfter startPrometheus +if(getOSFamilyType() != "windows") { + stopPrometheus.mustRunAfter startPrometheus + startOpenSearch.dependsOn startPrometheus + stopOpenSearch.finalizedBy stopPrometheus +} doctest.dependsOn startOpenSearch -startOpenSearch.dependsOn startPrometheus doctest.finalizedBy stopOpenSearch -stopOpenSearch.finalizedBy stopPrometheus check.dependsOn doctest clean.dependsOn(cleanBootstrap) diff --git a/integ-test/build.gradle b/integ-test/build.gradle index 911ee0b253..fb5cb85f32 100644 --- a/integ-test/build.gradle +++ b/integ-test/build.gradle @@ -22,7 +22,6 @@ * under the License. */ -import org.gradle.nativeplatform.platform.internal.DefaultNativePlatform import org.opensearch.gradle.test.RestIntegTestTask import org.opensearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -151,9 +150,10 @@ stopPrometheus.mustRunAfter startPrometheus // Run PPL ITs and new, legacy and comparison SQL ITs with new SQL engine enabled integTest { dependsOn ':opensearch-sql-plugin:bundlePlugin' - dependsOn startPrometheus - finalizedBy stopPrometheus - + if(getOSFamilyType() != "windows") { + dependsOn startPrometheus + finalizedBy stopPrometheus + } systemProperty 'tests.security.manager', 'false' systemProperty('project.root', project.projectDir.absolutePath) @@ -178,6 +178,12 @@ integTest { } } + if(getOSFamilyType() == "windows") { + exclude 'org/opensearch/sql/ppl/PrometheusDataSourceCommandsIT.class' + exclude 'org/opensearch/sql/ppl/ShowDataSourcesCommandIT.class' + exclude 'org/opensearch/sql/ppl/InformationSchemaCommandIT.class' + } + exclude 'org/opensearch/sql/doctest/**/*IT.class' exclude 'org/opensearch/sql/correctness/**' diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/CsvFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/CsvFormatIT.java index 37c08742ab..430ae9a7b2 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/CsvFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/CsvFormatIT.java @@ -11,6 +11,7 @@ import java.io.IOException; import java.util.Locale; import org.junit.Test; +import org.opensearch.sql.common.utils.StringUtils; public class CsvFormatIT extends PPLIntegTestCase { @@ -23,13 +24,13 @@ public void init() throws IOException { public void sanitizeTest() throws IOException { String result = executeCsvQuery( String.format(Locale.ROOT, "source=%s | fields firstname, lastname", TEST_INDEX_BANK_CSV_SANITIZE)); - assertEquals( - "firstname,lastname\n" - + "'+Amber JOHnny,Duke Willmington+\n" - + "'-Hattie,Bond-\n" - + "'=Nanette,Bates=\n" - + "'@Dale,Adams@\n" - + "\",Elinor\",\"Ratliff,,,\"\n", + assertEquals(StringUtils.format( + "firstname,lastname%n" + + "'+Amber JOHnny,Duke Willmington+%n" + + "'-Hattie,Bond-%n" + + "'=Nanette,Bates=%n" + + "'@Dale,Adams@%n" + + "\",Elinor\",\"Ratliff,,,\"%n"), result); } @@ -37,13 +38,13 @@ public void sanitizeTest() throws IOException { public void escapeSanitizeTest() throws IOException { String result = executeCsvQuery( String.format(Locale.ROOT, "source=%s | fields firstname, lastname", TEST_INDEX_BANK_CSV_SANITIZE), false); - assertEquals( - "firstname,lastname\n" - + "+Amber JOHnny,Duke Willmington+\n" - + "-Hattie,Bond-\n" - + "=Nanette,Bates=\n" - + "@Dale,Adams@\n" - + "\",Elinor\",\"Ratliff,,,\"\n", + assertEquals(StringUtils.format( + "firstname,lastname%n" + + "+Amber JOHnny,Duke Willmington+%n" + + "-Hattie,Bond-%n" + + "=Nanette,Bates=%n" + + "@Dale,Adams@%n" + + "\",Elinor\",\"Ratliff,,,\"%n"), result); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/DescribeCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/DescribeCommandIT.java index 77fd910f35..23bea69a52 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/DescribeCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/DescribeCommandIT.java @@ -6,19 +6,17 @@ package org.opensearch.sql.ppl; -import org.json.JSONObject; -import org.junit.jupiter.api.Test; -import org.opensearch.client.Request; -import org.opensearch.client.ResponseException; - -import java.io.IOException; - import static org.opensearch.sql.legacy.TestsConstants.TEST_INDEX_DOG; import static org.opensearch.sql.util.MatcherUtils.columnName; -import static org.opensearch.sql.util.MatcherUtils.rows; import static org.opensearch.sql.util.MatcherUtils.verifyColumn; import static org.opensearch.sql.util.MatcherUtils.verifyDataRows; +import java.io.IOException; +import org.json.JSONObject; +import org.junit.jupiter.api.Test; +import org.opensearch.client.Request; +import org.opensearch.client.ResponseException; + public class DescribeCommandIT extends PPLIntegTestCase { @Override @@ -88,25 +86,4 @@ public void describeCommandWithoutIndexShouldFailToParse() throws IOException { assertTrue(e.getMessage().contains("Failed to parse query due to offending symbol")); } } - - @Test - public void testDescribeCommandWithPrometheusCatalog() throws IOException { - JSONObject result = executeQuery("describe my_prometheus.prometheus_http_requests_total"); - verifyColumn( - result, - columnName("TABLE_CATALOG"), - columnName("TABLE_SCHEMA"), - columnName("TABLE_NAME"), - columnName("COLUMN_NAME"), - columnName("DATA_TYPE") - ); - verifyDataRows(result, - rows("my_prometheus", "default", "prometheus_http_requests_total", "handler", "keyword"), - rows("my_prometheus", "default", "prometheus_http_requests_total", "code", "keyword"), - rows("my_prometheus", "default", "prometheus_http_requests_total", "instance", "keyword"), - rows("my_prometheus", "default", "prometheus_http_requests_total", "@value", "double"), - rows("my_prometheus", "default", "prometheus_http_requests_total", "@timestamp", - "timestamp"), - rows("my_prometheus", "default", "prometheus_http_requests_total", "job", "keyword")); - } } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java index 3f9191c9c9..8e7c03777e 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/InformationSchemaCommandIT.java @@ -71,4 +71,27 @@ public void testTablesFromPrometheusCatalog() throws IOException { "counter", "", "Counter of HTTP requests.")); } + + // Moved this IT from DescribeCommandIT to segregate Datasource Integ Tests. + @Test + public void testDescribeCommandWithPrometheusCatalog() throws IOException { + JSONObject result = executeQuery("describe my_prometheus.prometheus_http_requests_total"); + verifyColumn( + result, + columnName("TABLE_CATALOG"), + columnName("TABLE_SCHEMA"), + columnName("TABLE_NAME"), + columnName("COLUMN_NAME"), + columnName("DATA_TYPE") + ); + verifyDataRows(result, + rows("my_prometheus", "default", "prometheus_http_requests_total", "handler", "keyword"), + rows("my_prometheus", "default", "prometheus_http_requests_total", "code", "keyword"), + rows("my_prometheus", "default", "prometheus_http_requests_total", "instance", "keyword"), + rows("my_prometheus", "default", "prometheus_http_requests_total", "@value", "double"), + rows("my_prometheus", "default", "prometheus_http_requests_total", "@timestamp", + "timestamp"), + rows("my_prometheus", "default", "prometheus_http_requests_total", "job", "keyword")); + } + } diff --git a/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java b/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java index 71988a8e31..2a882afbe1 100644 --- a/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/ppl/StandaloneIT.java @@ -79,7 +79,7 @@ public void init() { new ImmutableSet.Builder() .add(new OpenSearchDataSourceFactory(client, defaultSettings())) .build()); - dataSourceService.addDataSource(defaultOpenSearchDataSourceMetadata()); + dataSourceService.createDataSource(defaultOpenSearchDataSourceMetadata()); ModulesBuilder modules = new ModulesBuilder(); modules.add(new StandaloneModule(new InternalRestHighLevelClient(client()), defaultSettings(), dataSourceService)); diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java index 2c5005413e..782e2e22b5 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/CsvFormatIT.java @@ -11,6 +11,7 @@ import java.io.IOException; import java.util.Locale; import org.junit.Test; +import org.opensearch.sql.common.utils.StringUtils; import org.opensearch.sql.legacy.SQLIntegTestCase; public class CsvFormatIT extends SQLIntegTestCase { @@ -24,13 +25,13 @@ public void init() throws IOException { public void sanitizeTest() { String result = executeQuery( String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), "csv"); - assertEquals( - "firstname,lastname\n" - + "'+Amber JOHnny,Duke Willmington+\n" - + "'-Hattie,Bond-\n" - + "'=Nanette,Bates=\n" - + "'@Dale,Adams@\n" - + "\",Elinor\",\"Ratliff,,,\"\n", + assertEquals(StringUtils.format( + "firstname,lastname%n" + + "'+Amber JOHnny,Duke Willmington+%n" + + "'-Hattie,Bond-%n" + + "'=Nanette,Bates=%n" + + "'@Dale,Adams@%n" + + "\",Elinor\",\"Ratliff,,,\"%n"), result); } @@ -39,13 +40,13 @@ public void escapeSanitizeTest() { String result = executeQuery( String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_CSV_SANITIZE), "csv&sanitize=false"); - assertEquals( - "firstname,lastname\n" - + "+Amber JOHnny,Duke Willmington+\n" - + "-Hattie,Bond-\n" - + "=Nanette,Bates=\n" - + "@Dale,Adams@\n" - + "\",Elinor\",\"Ratliff,,,\"\n", + assertEquals(StringUtils.format( + "firstname,lastname%n" + + "+Amber JOHnny,Duke Willmington+%n" + + "-Hattie,Bond-%n" + + "=Nanette,Bates=%n" + + "@Dale,Adams@%n" + + "\",Elinor\",\"Ratliff,,,\"%n"), result); } } diff --git a/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java b/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java index 43af66185e..8cba86647c 100644 --- a/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java +++ b/integ-test/src/test/java/org/opensearch/sql/sql/RawFormatIT.java @@ -11,6 +11,7 @@ import java.io.IOException; import java.util.Locale; import org.junit.Test; +import org.opensearch.sql.common.utils.StringUtils; import org.opensearch.sql.legacy.SQLIntegTestCase; public class RawFormatIT extends SQLIntegTestCase { @@ -24,13 +25,13 @@ public void init() throws IOException { public void rawFormatWithPipeFieldTest() { String result = executeQuery( String.format(Locale.ROOT, "SELECT firstname, lastname FROM %s", TEST_INDEX_BANK_RAW_SANITIZE), "raw"); - assertEquals( - "firstname|lastname\n" - + "+Amber JOHnny|Duke Willmington+\n" - + "-Hattie|Bond-\n" - + "=Nanette|Bates=\n" - + "@Dale|Adams@\n" - + "@Elinor|\"Ratliff|||\"\n", + assertEquals(StringUtils.format( + "firstname|lastname%n" + + "+Amber JOHnny|Duke Willmington+%n" + + "-Hattie|Bond-%n" + + "=Nanette|Bates=%n" + + "@Dale|Adams@%n" + + "@Elinor|\"Ratliff|||\"%n"), result); } diff --git a/plugin/build.gradle b/plugin/build.gradle index f0bad12c2d..5239dd81b4 100644 --- a/plugin/build.gradle +++ b/plugin/build.gradle @@ -108,6 +108,8 @@ configurations.all { resolutionStrategy.force "com.squareup.okhttp3:okhttp:4.9.3" resolutionStrategy.force "joda-time:joda-time:2.10.12" resolutionStrategy.force "org.slf4j:slf4j-api:1.7.36" + resolutionStrategy.force "org.apache.httpcomponents:httpcore:4.4.15" + resolutionStrategy.force "org.apache.httpcomponents:httpclient:4.5.13" } compileJava { options.compilerArgs.addAll(["-processor", 'lombok.launch.AnnotationProcessorHider$AnnotationProcessor']) diff --git a/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java b/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java index 2c401af352..e05cc8e842 100644 --- a/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java +++ b/plugin/src/main/java/org/opensearch/sql/plugin/SQLPlugin.java @@ -166,7 +166,7 @@ public Collection createComponents( new OpenSearchNodeClient(this.client), pluginSettings)) .add(new PrometheusStorageFactory()) .build()); - dataSourceService.addDataSource(defaultOpenSearchDataSourceMetadata()); + dataSourceService.createDataSource(defaultOpenSearchDataSourceMetadata()); loadDataSources(dataSourceService, clusterService.getSettings()); LocalClusterState.state().setClusterService(clusterService); LocalClusterState.state().setPluginSettings((OpenSearchSettings) pluginSettings); @@ -211,7 +211,7 @@ public ScriptEngine getScriptEngine(Settings settings, Collection metadataList = objectMapper.readValue(inputStream, new TypeReference<>() {}); - dataSourceService.addDataSource(metadataList.toArray(new DataSourceMetadata[0])); + dataSourceService.createDataSource(metadataList.toArray(new DataSourceMetadata[0])); } catch (IOException e) { LOG.error( "DataSource Configuration File uploaded is malformed. Verify and re-upload.", e); diff --git a/plugin/src/test/java/org/opensearch/sql/plugin/datasource/DataSourceMetaDataTest.java b/plugin/src/test/java/org/opensearch/sql/plugin/datasource/DataSourceMetaDataTest.java index e7e4c677b6..3ccb1cd403 100644 --- a/plugin/src/test/java/org/opensearch/sql/plugin/datasource/DataSourceMetaDataTest.java +++ b/plugin/src/test/java/org/opensearch/sql/plugin/datasource/DataSourceMetaDataTest.java @@ -96,7 +96,7 @@ public void testLoadConnectorsWithMalformedJson() { Settings settings = getDataSourceSettings("malformed_datasources.json"); loadConnectors(settings); - verify(dataSourceService, never()).addDataSource(any()); + verify(dataSourceService, never()).createDataSource(any()); } private Settings getDataSourceSettings(String filename) throws URISyntaxException, IOException { @@ -114,7 +114,7 @@ void loadConnectors(Settings settings) { void verifyAddDataSourceWithMetadata(List metadataList) { ArgumentCaptor metadataCaptor = ArgumentCaptor.forClass(DataSourceMetadata[].class); - verify(dataSourceService, times(1)).addDataSource(metadataCaptor.capture()); + verify(dataSourceService, times(1)).createDataSource(metadataCaptor.capture()); List actualValues = Arrays.asList(metadataCaptor.getValue()); assertEquals(metadataList.size(), actualValues.size()); assertEquals(metadataList, actualValues); diff --git a/prometheus/build.gradle b/prometheus/build.gradle index 7cf1e56085..ca70813e58 100644 --- a/prometheus/build.gradle +++ b/prometheus/build.gradle @@ -22,6 +22,8 @@ dependencies { implementation group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-cbor', version: "${versions.jackson}" implementation group: 'com.squareup.okhttp3', name: 'okhttp', version: '4.9.3' implementation 'com.github.babbel:okhttp-aws-signer:1.0.2' + implementation group: 'com.amazonaws', name: 'aws-java-sdk-core', version: '1.12.1' + implementation group: 'com.amazonaws', name: 'aws-java-sdk-sts', version: '1.12.1' implementation group: 'org.json', name: 'json', version: '20180813' testImplementation('org.junit.jupiter:junit-jupiter:5.6.2') diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/authinterceptors/AwsSigningInterceptor.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/authinterceptors/AwsSigningInterceptor.java index f3d91c55a2..56e66431fd 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/authinterceptors/AwsSigningInterceptor.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/authinterceptors/AwsSigningInterceptor.java @@ -7,6 +7,9 @@ package org.opensearch.sql.prometheus.authinterceptors; +import com.amazonaws.auth.AWSCredentials; +import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider; import com.babbel.mobile.android.commons.okhttpawssigner.OkHttpAwsV4Signer; import java.io.IOException; import java.time.ZoneId; @@ -16,29 +19,29 @@ import okhttp3.Interceptor; import okhttp3.Request; import okhttp3.Response; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; public class AwsSigningInterceptor implements Interceptor { private OkHttpAwsV4Signer okHttpAwsV4Signer; - private String accessKey; + private AWSCredentialsProvider awsCredentialsProvider; - private String secretKey; + private static final Logger LOG = LogManager.getLogger(); /** * AwsSigningInterceptor which intercepts http requests * and adds required headers for sigv4 authentication. * - * @param accessKey accessKey. - * @param secretKey secretKey. + * @param awsCredentialsProvider awsCredentialsProvider. * @param region region. * @param serviceName serviceName. */ - public AwsSigningInterceptor(@NonNull String accessKey, @NonNull String secretKey, + public AwsSigningInterceptor(@NonNull AWSCredentialsProvider awsCredentialsProvider, @NonNull String region, @NonNull String serviceName) { this.okHttpAwsV4Signer = new OkHttpAwsV4Signer(region, serviceName); - this.accessKey = accessKey; - this.secretKey = secretKey; + this.awsCredentialsProvider = awsCredentialsProvider; } @Override @@ -48,11 +51,21 @@ public Response intercept(Interceptor.Chain chain) throws IOException { DateTimeFormatter timestampFormat = DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss'Z'") .withZone(ZoneId.of("GMT")); - Request newRequest = request.newBuilder() + + Request.Builder newRequestBuilder = request.newBuilder() .addHeader("x-amz-date", timestampFormat.format(ZonedDateTime.now())) - .addHeader("host", request.url().host()) - .build(); - Request signed = okHttpAwsV4Signer.sign(newRequest, accessKey, secretKey); + .addHeader("host", request.url().host()); + + AWSCredentials awsCredentials = awsCredentialsProvider.getCredentials(); + if (awsCredentialsProvider instanceof STSAssumeRoleSessionCredentialsProvider) { + newRequestBuilder.addHeader("x-amz-security-token", + ((STSAssumeRoleSessionCredentialsProvider) awsCredentialsProvider) + .getCredentials() + .getSessionToken()); + } + Request newRequest = newRequestBuilder.build(); + Request signed = okHttpAwsV4Signer.sign(newRequest, + awsCredentials.getAWSAccessKeyId(), awsCredentials.getAWSSecretKey()); return chain.proceed(signed); } diff --git a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactory.java b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactory.java index 4e8b30af2f..d65f315c8a 100644 --- a/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactory.java +++ b/prometheus/src/main/java/org/opensearch/sql/prometheus/storage/PrometheusStorageFactory.java @@ -7,6 +7,8 @@ package org.opensearch.sql.prometheus.storage; +import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.BasicAWSCredentials; import java.net.URI; import java.net.URISyntaxException; import java.util.HashSet; @@ -75,7 +77,8 @@ private OkHttpClient getHttpClient(Map config) { } else if (AuthenticationType.AWSSIGV4AUTH.equals(authenticationType)) { validateFieldsInConfig(config, Set.of(REGION, ACCESS_KEY, SECRET_KEY)); okHttpClient.addInterceptor(new AwsSigningInterceptor( - config.get(ACCESS_KEY), config.get(SECRET_KEY), + new AWSStaticCredentialsProvider( + new BasicAWSCredentials(config.get(ACCESS_KEY), config.get(SECRET_KEY))), config.get(REGION), "aps")); } else { throw new IllegalArgumentException( diff --git a/prometheus/src/test/java/org/opensearch/sql/prometheus/authinterceptors/AwsSigningInterceptorTest.java b/prometheus/src/test/java/org/opensearch/sql/prometheus/authinterceptors/AwsSigningInterceptorTest.java index a9224bf80f..5d5471edc0 100644 --- a/prometheus/src/test/java/org/opensearch/sql/prometheus/authinterceptors/AwsSigningInterceptorTest.java +++ b/prometheus/src/test/java/org/opensearch/sql/prometheus/authinterceptors/AwsSigningInterceptorTest.java @@ -10,6 +10,12 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.auth.AWSSessionCredentials; +import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider; +import com.amazonaws.auth.STSSessionCredentialsProvider; import lombok.SneakyThrows; import okhttp3.Interceptor; import okhttp3.Request; @@ -30,16 +36,19 @@ public class AwsSigningInterceptorTest { @Captor ArgumentCaptor requestArgumentCaptor; + @Mock + private STSAssumeRoleSessionCredentialsProvider stsAssumeRoleSessionCredentialsProvider; + @Test void testConstructors() { Assertions.assertThrows(NullPointerException.class, () -> - new AwsSigningInterceptor(null, "secretKey", "us-east-1", "aps")); + new AwsSigningInterceptor(null, "us-east-1", "aps")); Assertions.assertThrows(NullPointerException.class, () -> - new AwsSigningInterceptor("accessKey", null, "us-east-1", "aps")); + new AwsSigningInterceptor(getStaticAWSCredentialsProvider("accessKey", "secretKey"), null, + "aps")); Assertions.assertThrows(NullPointerException.class, () -> - new AwsSigningInterceptor("accessKey", "secretKey", null, "aps")); - Assertions.assertThrows(NullPointerException.class, () -> - new AwsSigningInterceptor("accessKey", "secretKey", "us-east-1", null)); + new AwsSigningInterceptor(getStaticAWSCredentialsProvider("accessKey", "secretKey"), + "us-east-1", null)); } @Test @@ -49,7 +58,9 @@ void testIntercept() { .url("http://localhost:9090") .build()); AwsSigningInterceptor awsSigningInterceptor - = new AwsSigningInterceptor("testAccessKey", "testSecretKey", "us-east-1", "aps"); + = new AwsSigningInterceptor( + getStaticAWSCredentialsProvider("testAccessKey", "testSecretKey"), + "us-east-1", "aps"); awsSigningInterceptor.intercept(chain); verify(chain).proceed(requestArgumentCaptor.capture()); Request request = requestArgumentCaptor.getValue(); @@ -58,4 +69,51 @@ void testIntercept() { Assertions.assertNotNull(request.headers("host")); } + + @Test + @SneakyThrows + void testSTSCredentialsProviderInterceptor() { + when(chain.request()).thenReturn(new Request.Builder() + .url("http://localhost:9090") + .build()); + when(stsAssumeRoleSessionCredentialsProvider.getCredentials()) + .thenReturn(getAWSSessionCredentials()); + AwsSigningInterceptor awsSigningInterceptor + = new AwsSigningInterceptor(stsAssumeRoleSessionCredentialsProvider, + "us-east-1", "aps"); + awsSigningInterceptor.intercept(chain); + verify(chain).proceed(requestArgumentCaptor.capture()); + Request request = requestArgumentCaptor.getValue(); + Assertions.assertNotNull(request.headers("Authorization")); + Assertions.assertNotNull(request.headers("x-amz-date")); + Assertions.assertNotNull(request.headers("host")); + Assertions.assertEquals("session_token", + request.headers("x-amz-security-token").get(0)); + } + + + private AWSCredentialsProvider getStaticAWSCredentialsProvider(String accessKey, + String secretKey) { + return new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKey, secretKey)); + } + + private AWSSessionCredentials getAWSSessionCredentials() { + return new AWSSessionCredentials() { + @Override + public String getSessionToken() { + return "session_token"; + } + + @Override + public String getAWSAccessKeyId() { + return "access_key"; + } + + @Override + public String getAWSSecretKey() { + return "secret_key"; + } + }; + } + } diff --git a/scripts/integtest.sh b/scripts/integtest.sh index 5b468b5356..59f4e88c76 100755 --- a/scripts/integtest.sh +++ b/scripts/integtest.sh @@ -95,18 +95,32 @@ fi USERNAME=`echo $CREDENTIAL | awk -F ':' '{print $1}'` PASSWORD=`echo $CREDENTIAL | awk -F ':' '{print $2}'` -OPENSEARCH_HOME=`ps -ef | grep -o "[o]pensearch.path.home=\S\+" | cut -d= -f2- | head -n1` - -curl -SL https://raw.githubusercontent.com/opensearch-project/sql/main/integ-test/src/test/resources/datasource/datasources.json -o "$OPENSEARCH_HOME"/datasources.json - -yes | $OPENSEARCH_HOME/bin/opensearch-keystore add-file plugins.query.federation.datasources.config $OPENSEARCH_HOME/datasources.json - -if [ $SECURITY_ENABLED == "true" ] +OS="`uname`" +#Cygwin or MinGW packages should be preinstalled in the windows. +#This command doesn't work without bash +#https://stackoverflow.com/questions/3466166/how-to-check-if-running-in-cygwin-mac-or-linux +#Operating System uname -s +#Mac OS X Darwin +#Cygwin 32-bit (Win-XP) CYGWIN_NT-5.1 +#Cygwin 32-bit (Win-7 32-bit) CYGWIN_NT-6.1 +#Cygwin 32-bit (Win-7 64-bit) CYGWIN_NT-6.1-WOW64 +#Cygwin 64-bit (Win-7 64-bit) CYGWIN_NT-6.1 +#MinGW (Windows 7 32-bit) MINGW32_NT-6.1 +#MinGW (Windows 10 64-bit) MINGW64_NT-10.0 +#Interix (Services for UNIX) Interix +#MSYS MSYS_NT-6.1 +#MSYS2 MSYS_NT-10.0-17763 +if ! [[ $OS =~ CYGWIN*|MINGW*|MINGW32*|MSYS* ]] then - curl -k --request POST --url https://$BIND_ADDRESS:$BIND_PORT/_nodes/reload_secure_settings --header 'content-type: application/json' --data '{"secure_settings_password":""}' --user $CREDENTIAL -else - curl --request POST --url http://$BIND_ADDRESS:$BIND_PORT/_nodes/reload_secure_settings --header 'content-type: application/json' --data '{"secure_settings_password":""}' + OPENSEARCH_HOME=`ps -ef | grep -o "[o]pensearch.path.home=\S\+" | cut -d= -f2- | head -n1` + curl -SL https://raw.githubusercontent.com/opensearch-project/sql/main/integ-test/src/test/resources/datasource/datasources.json -o "$OPENSEARCH_HOME"/datasources.json + yes | $OPENSEARCH_HOME/bin/opensearch-keystore add-file plugins.query.federation.datasources.config $OPENSEARCH_HOME/datasources.json + if [ $SECURITY_ENABLED == "true" ] + then + curl -k --request POST --url https://$BIND_ADDRESS:$BIND_PORT/_nodes/reload_secure_settings --header 'content-type: application/json' --data '{"secure_settings_password":""}' --user $CREDENTIAL + else + curl --request POST --url http://$BIND_ADDRESS:$BIND_PORT/_nodes/reload_secure_settings --header 'content-type: application/json' --data '{"secure_settings_password":""}' + fi fi - ./gradlew integTest -Dopensearch.version=$OPENSEARCH_VERSION -Dbuild.snapshot=$SNAPSHOT -Dtests.rest.cluster="$BIND_ADDRESS:$BIND_PORT" -Dtests.cluster="$BIND_ADDRESS:$BIND_PORT" -Dtests.clustername="opensearch-integrationtest" -Dhttps=$SECURITY_ENABLED -Duser=$USERNAME -Dpassword=$PASSWORD --console=plain