diff --git a/.buildkite/.editorconfig b/.buildkite/.editorconfig
new file mode 100644
index 0000000000000..a08b2c5600ad8
--- /dev/null
+++ b/.buildkite/.editorconfig
@@ -0,0 +1,2 @@
+[*.ts]
+max_line_length = 120
diff --git a/.buildkite/.gitignore b/.buildkite/.gitignore
new file mode 100644
index 0000000000000..f81d56eaa35f6
--- /dev/null
+++ b/.buildkite/.gitignore
@@ -0,0 +1,169 @@
+# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
+
+# Logs
+
+logs
+_.log
+npm-debug.log_
+yarn-debug.log*
+yarn-error.log*
+lerna-debug.log*
+.pnpm-debug.log*
+
+# Diagnostic reports (https://nodejs.org/api/report.html)
+
+report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
+
+# Runtime data
+
+pids
+_.pid
+_.seed
+\*.pid.lock
+
+# Directory for instrumented libs generated by jscoverage/JSCover
+
+lib-cov
+
+# Coverage directory used by tools like istanbul
+
+coverage
+\*.lcov
+
+# nyc test coverage
+
+.nyc_output
+
+# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
+
+.grunt
+
+# Bower dependency directory (https://bower.io/)
+
+bower_components
+
+# node-waf configuration
+
+.lock-wscript
+
+# Compiled binary addons (https://nodejs.org/api/addons.html)
+
+build/Release
+
+# Dependency directories
+
+node_modules/
+jspm_packages/
+
+# Snowpack dependency directory (https://snowpack.dev/)
+
+web_modules/
+
+# TypeScript cache
+
+\*.tsbuildinfo
+
+# Optional npm cache directory
+
+.npm
+
+# Optional eslint cache
+
+.eslintcache
+
+# Optional stylelint cache
+
+.stylelintcache
+
+# Microbundle cache
+
+.rpt2_cache/
+.rts2_cache_cjs/
+.rts2_cache_es/
+.rts2_cache_umd/
+
+# Optional REPL history
+
+.node_repl_history
+
+# Output of 'npm pack'
+
+\*.tgz
+
+# Yarn Integrity file
+
+.yarn-integrity
+
+# dotenv environment variable files
+
+.env
+.env.development.local
+.env.test.local
+.env.production.local
+.env.local
+
+# parcel-bundler cache (https://parceljs.org/)
+
+.cache
+.parcel-cache
+
+# Next.js build output
+
+.next
+out
+
+# Nuxt.js build / generate output
+
+.nuxt
+dist
+
+# Gatsby files
+
+.cache/
+
+# Comment in the public line in if your project uses Gatsby and not Next.js
+
+# https://nextjs.org/blog/next-9-1#public-directory-support
+
+# public
+
+# vuepress build output
+
+.vuepress/dist
+
+# vuepress v2.x temp and cache directory
+
+.temp
+.cache
+
+# Docusaurus cache and generated files
+
+.docusaurus
+
+# Serverless directories
+
+.serverless/
+
+# FuseBox cache
+
+.fusebox/
+
+# DynamoDB Local files
+
+.dynamodb/
+
+# TernJS port file
+
+.tern-port
+
+# Stores VSCode versions used for testing VSCode extensions
+
+.vscode-test
+
+# yarn v2
+
+.yarn/cache
+.yarn/unplugged
+.yarn/build-state.yml
+.yarn/install-state.gz
+.pnp.\*
diff --git a/.buildkite/bun.lockb b/.buildkite/bun.lockb
new file mode 100755
index 0000000000000..54920b41d665c
Binary files /dev/null and b/.buildkite/bun.lockb differ
diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command
index 3d20e3fb73b8e..40fb970a76196 100644
--- a/.buildkite/hooks/pre-command
+++ b/.buildkite/hooks/pre-command
@@ -48,7 +48,7 @@ BUILDKITE_API_TOKEN=$(vault read -field=token secret/ci/elastic-elasticsearch/bu
export BUILDKITE_API_TOKEN
if [[ "${USE_LUCENE_SNAPSHOT_CREDS:-}" == "true" ]]; then
- data=$(.buildkite/scripts/lucene-snapshot/get-credentials.sh)
+ data=$(.buildkite/scripts/get-legacy-secret.sh aws-elastic/creds/lucene-snapshots)
AWS_ACCESS_KEY_ID=$(echo "$data" | jq -r .data.access_key)
export AWS_ACCESS_KEY_ID
@@ -70,12 +70,38 @@ if [[ "${USE_DRA_CREDENTIALS:-}" == "true" ]]; then
export DRA_VAULT_ADDR
fi
+source .buildkite/scripts/third-party-test-credentials.sh
+
if [[ "${USE_SNYK_CREDENTIALS:-}" == "true" ]]; then
SNYK_TOKEN=$(vault read -field=token secret/ci/elastic-elasticsearch/migrated/snyk)
export SNYK_TOKEN
fi
+if [[ "${USE_PROD_DOCKER_CREDENTIALS:-}" == "true" ]]; then
+ DOCKER_REGISTRY_USERNAME="$(vault read -field=username secret/ci/elastic-elasticsearch/migrated/prod_docker_registry_credentials)"
+ export DOCKER_REGISTRY_USERNAME
+
+ DOCKER_REGISTRY_PASSWORD="$(vault read -field=password secret/ci/elastic-elasticsearch/migrated/prod_docker_registry_credentials)"
+ export DOCKER_REGISTRY_PASSWORD
+fi
+
if [[ "$BUILDKITE_AGENT_META_DATA_PROVIDER" != *"k8s"* ]]; then
# Run in the background, while the job continues
nohup .buildkite/scripts/setup-monitoring.sh /dev/null 2>&1 &
fi
+
+# Initialize the build scan and gobld annotations with empty/open tags
+# This ensures that they are collapsible when they get appended to
+if [[ "${BUILDKITE_LABEL:-}" == *"Pipeline upload"* ]]; then
+ cat << EOF | buildkite-agent annotate --context "gradle-build-scans" --style "info"
+
+
+Gradle build scan links
+EOF
+
+ cat << EOF | buildkite-agent annotate --context "ctx-gobld-metrics" --style "info"
+
+
+Agent information from gobld
+EOF
+fi
diff --git a/.buildkite/package.json b/.buildkite/package.json
new file mode 100644
index 0000000000000..c13d5f10fdf60
--- /dev/null
+++ b/.buildkite/package.json
@@ -0,0 +1,13 @@
+{
+ "name": "buildkite-pipelines",
+ "module": "index.ts",
+ "type": "module",
+ "devDependencies": {
+ "@types/node": "^20.6.0",
+ "bun-types": "latest",
+ "yaml": "^2.3.2"
+ },
+ "peerDependencies": {
+ "typescript": "^5.0.0"
+ }
+}
diff --git a/.buildkite/pipelines/periodic-packaging.bwc.template.yml b/.buildkite/pipelines/periodic-packaging.bwc.template.yml
index 0ec7721381d07..b06bc80d3535d 100644
--- a/.buildkite/pipelines/periodic-packaging.bwc.template.yml
+++ b/.buildkite/pipelines/periodic-packaging.bwc.template.yml
@@ -1,5 +1,5 @@
- label: "{{matrix.image}} / $BWC_VERSION / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v$BWC_VERSION
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v$BWC_VERSION
timeout_in_minutes: 300
matrix:
setup:
diff --git a/.buildkite/pipelines/periodic-packaging.template.yml b/.buildkite/pipelines/periodic-packaging.template.yml
index 1f1852639e997..1c626ffc53bfe 100644
--- a/.buildkite/pipelines/periodic-packaging.template.yml
+++ b/.buildkite/pipelines/periodic-packaging.template.yml
@@ -2,7 +2,7 @@ steps:
- group: packaging-tests-unix
steps:
- label: "{{matrix.image}} / packaging-tests-unix"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ destructivePackagingTest
+ command: ./.ci/scripts/packaging-test.sh destructivePackagingTest
timeout_in_minutes: 300
matrix:
setup:
diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml
index ce0746a5726cc..6a2492fb03ef9 100644
--- a/.buildkite/pipelines/periodic-packaging.yml
+++ b/.buildkite/pipelines/periodic-packaging.yml
@@ -3,7 +3,7 @@ steps:
- group: packaging-tests-unix
steps:
- label: "{{matrix.image}} / packaging-tests-unix"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ destructivePackagingTest
+ command: ./.ci/scripts/packaging-test.sh destructivePackagingTest
timeout_in_minutes: 300
matrix:
setup:
@@ -33,7 +33,7 @@ steps:
- group: packaging-tests-upgrade
steps:
- label: "{{matrix.image}} / 7.0.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.0.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.0.0
timeout_in_minutes: 300
matrix:
setup:
@@ -49,7 +49,7 @@ steps:
BWC_VERSION: 7.0.0
- label: "{{matrix.image}} / 7.0.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.0.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.0.1
timeout_in_minutes: 300
matrix:
setup:
@@ -65,7 +65,7 @@ steps:
BWC_VERSION: 7.0.1
- label: "{{matrix.image}} / 7.1.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.1.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.1.0
timeout_in_minutes: 300
matrix:
setup:
@@ -81,7 +81,7 @@ steps:
BWC_VERSION: 7.1.0
- label: "{{matrix.image}} / 7.1.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.1.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.1.1
timeout_in_minutes: 300
matrix:
setup:
@@ -97,7 +97,7 @@ steps:
BWC_VERSION: 7.1.1
- label: "{{matrix.image}} / 7.2.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.2.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.2.0
timeout_in_minutes: 300
matrix:
setup:
@@ -113,7 +113,7 @@ steps:
BWC_VERSION: 7.2.0
- label: "{{matrix.image}} / 7.2.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.2.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.2.1
timeout_in_minutes: 300
matrix:
setup:
@@ -129,7 +129,7 @@ steps:
BWC_VERSION: 7.2.1
- label: "{{matrix.image}} / 7.3.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.3.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.3.0
timeout_in_minutes: 300
matrix:
setup:
@@ -145,7 +145,7 @@ steps:
BWC_VERSION: 7.3.0
- label: "{{matrix.image}} / 7.3.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.3.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.3.1
timeout_in_minutes: 300
matrix:
setup:
@@ -161,7 +161,7 @@ steps:
BWC_VERSION: 7.3.1
- label: "{{matrix.image}} / 7.3.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.3.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.3.2
timeout_in_minutes: 300
matrix:
setup:
@@ -177,7 +177,7 @@ steps:
BWC_VERSION: 7.3.2
- label: "{{matrix.image}} / 7.4.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.4.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.4.0
timeout_in_minutes: 300
matrix:
setup:
@@ -193,7 +193,7 @@ steps:
BWC_VERSION: 7.4.0
- label: "{{matrix.image}} / 7.4.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.4.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.4.1
timeout_in_minutes: 300
matrix:
setup:
@@ -209,7 +209,7 @@ steps:
BWC_VERSION: 7.4.1
- label: "{{matrix.image}} / 7.4.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.4.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.4.2
timeout_in_minutes: 300
matrix:
setup:
@@ -225,7 +225,7 @@ steps:
BWC_VERSION: 7.4.2
- label: "{{matrix.image}} / 7.5.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.5.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.5.0
timeout_in_minutes: 300
matrix:
setup:
@@ -241,7 +241,7 @@ steps:
BWC_VERSION: 7.5.0
- label: "{{matrix.image}} / 7.5.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.5.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.5.1
timeout_in_minutes: 300
matrix:
setup:
@@ -257,7 +257,7 @@ steps:
BWC_VERSION: 7.5.1
- label: "{{matrix.image}} / 7.5.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.5.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.5.2
timeout_in_minutes: 300
matrix:
setup:
@@ -273,7 +273,7 @@ steps:
BWC_VERSION: 7.5.2
- label: "{{matrix.image}} / 7.6.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.6.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.6.0
timeout_in_minutes: 300
matrix:
setup:
@@ -289,7 +289,7 @@ steps:
BWC_VERSION: 7.6.0
- label: "{{matrix.image}} / 7.6.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.6.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.6.1
timeout_in_minutes: 300
matrix:
setup:
@@ -305,7 +305,7 @@ steps:
BWC_VERSION: 7.6.1
- label: "{{matrix.image}} / 7.6.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.6.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.6.2
timeout_in_minutes: 300
matrix:
setup:
@@ -321,7 +321,7 @@ steps:
BWC_VERSION: 7.6.2
- label: "{{matrix.image}} / 7.7.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.7.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.7.0
timeout_in_minutes: 300
matrix:
setup:
@@ -337,7 +337,7 @@ steps:
BWC_VERSION: 7.7.0
- label: "{{matrix.image}} / 7.7.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.7.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.7.1
timeout_in_minutes: 300
matrix:
setup:
@@ -353,7 +353,7 @@ steps:
BWC_VERSION: 7.7.1
- label: "{{matrix.image}} / 7.8.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.8.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.8.0
timeout_in_minutes: 300
matrix:
setup:
@@ -369,7 +369,7 @@ steps:
BWC_VERSION: 7.8.0
- label: "{{matrix.image}} / 7.8.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.8.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.8.1
timeout_in_minutes: 300
matrix:
setup:
@@ -385,7 +385,7 @@ steps:
BWC_VERSION: 7.8.1
- label: "{{matrix.image}} / 7.9.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.9.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.9.0
timeout_in_minutes: 300
matrix:
setup:
@@ -401,7 +401,7 @@ steps:
BWC_VERSION: 7.9.0
- label: "{{matrix.image}} / 7.9.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.9.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.9.1
timeout_in_minutes: 300
matrix:
setup:
@@ -417,7 +417,7 @@ steps:
BWC_VERSION: 7.9.1
- label: "{{matrix.image}} / 7.9.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.9.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.9.2
timeout_in_minutes: 300
matrix:
setup:
@@ -433,7 +433,7 @@ steps:
BWC_VERSION: 7.9.2
- label: "{{matrix.image}} / 7.9.3 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.9.3
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.9.3
timeout_in_minutes: 300
matrix:
setup:
@@ -449,7 +449,7 @@ steps:
BWC_VERSION: 7.9.3
- label: "{{matrix.image}} / 7.10.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.10.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.10.0
timeout_in_minutes: 300
matrix:
setup:
@@ -465,7 +465,7 @@ steps:
BWC_VERSION: 7.10.0
- label: "{{matrix.image}} / 7.10.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.10.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.10.1
timeout_in_minutes: 300
matrix:
setup:
@@ -481,7 +481,7 @@ steps:
BWC_VERSION: 7.10.1
- label: "{{matrix.image}} / 7.10.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.10.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.10.2
timeout_in_minutes: 300
matrix:
setup:
@@ -497,7 +497,7 @@ steps:
BWC_VERSION: 7.10.2
- label: "{{matrix.image}} / 7.11.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.11.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.11.0
timeout_in_minutes: 300
matrix:
setup:
@@ -513,7 +513,7 @@ steps:
BWC_VERSION: 7.11.0
- label: "{{matrix.image}} / 7.11.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.11.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.11.1
timeout_in_minutes: 300
matrix:
setup:
@@ -529,7 +529,7 @@ steps:
BWC_VERSION: 7.11.1
- label: "{{matrix.image}} / 7.11.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.11.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.11.2
timeout_in_minutes: 300
matrix:
setup:
@@ -545,7 +545,7 @@ steps:
BWC_VERSION: 7.11.2
- label: "{{matrix.image}} / 7.12.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.12.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.12.0
timeout_in_minutes: 300
matrix:
setup:
@@ -561,7 +561,7 @@ steps:
BWC_VERSION: 7.12.0
- label: "{{matrix.image}} / 7.12.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.12.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.12.1
timeout_in_minutes: 300
matrix:
setup:
@@ -577,7 +577,7 @@ steps:
BWC_VERSION: 7.12.1
- label: "{{matrix.image}} / 7.13.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.0
timeout_in_minutes: 300
matrix:
setup:
@@ -593,7 +593,7 @@ steps:
BWC_VERSION: 7.13.0
- label: "{{matrix.image}} / 7.13.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.1
timeout_in_minutes: 300
matrix:
setup:
@@ -609,7 +609,7 @@ steps:
BWC_VERSION: 7.13.1
- label: "{{matrix.image}} / 7.13.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.2
timeout_in_minutes: 300
matrix:
setup:
@@ -625,7 +625,7 @@ steps:
BWC_VERSION: 7.13.2
- label: "{{matrix.image}} / 7.13.3 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.3
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.3
timeout_in_minutes: 300
matrix:
setup:
@@ -641,7 +641,7 @@ steps:
BWC_VERSION: 7.13.3
- label: "{{matrix.image}} / 7.13.4 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.4
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.4
timeout_in_minutes: 300
matrix:
setup:
@@ -657,7 +657,7 @@ steps:
BWC_VERSION: 7.13.4
- label: "{{matrix.image}} / 7.14.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.14.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.14.0
timeout_in_minutes: 300
matrix:
setup:
@@ -673,7 +673,7 @@ steps:
BWC_VERSION: 7.14.0
- label: "{{matrix.image}} / 7.14.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.14.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.14.1
timeout_in_minutes: 300
matrix:
setup:
@@ -689,7 +689,7 @@ steps:
BWC_VERSION: 7.14.1
- label: "{{matrix.image}} / 7.14.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.14.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.14.2
timeout_in_minutes: 300
matrix:
setup:
@@ -705,7 +705,7 @@ steps:
BWC_VERSION: 7.14.2
- label: "{{matrix.image}} / 7.15.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.15.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.15.0
timeout_in_minutes: 300
matrix:
setup:
@@ -721,7 +721,7 @@ steps:
BWC_VERSION: 7.15.0
- label: "{{matrix.image}} / 7.15.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.15.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.15.1
timeout_in_minutes: 300
matrix:
setup:
@@ -737,7 +737,7 @@ steps:
BWC_VERSION: 7.15.1
- label: "{{matrix.image}} / 7.15.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.15.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.15.2
timeout_in_minutes: 300
matrix:
setup:
@@ -753,7 +753,7 @@ steps:
BWC_VERSION: 7.15.2
- label: "{{matrix.image}} / 7.16.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.16.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.16.0
timeout_in_minutes: 300
matrix:
setup:
@@ -769,7 +769,7 @@ steps:
BWC_VERSION: 7.16.0
- label: "{{matrix.image}} / 7.16.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.16.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.16.1
timeout_in_minutes: 300
matrix:
setup:
@@ -785,7 +785,7 @@ steps:
BWC_VERSION: 7.16.1
- label: "{{matrix.image}} / 7.16.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.16.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.16.2
timeout_in_minutes: 300
matrix:
setup:
@@ -801,7 +801,7 @@ steps:
BWC_VERSION: 7.16.2
- label: "{{matrix.image}} / 7.16.3 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.16.3
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.16.3
timeout_in_minutes: 300
matrix:
setup:
@@ -817,7 +817,7 @@ steps:
BWC_VERSION: 7.16.3
- label: "{{matrix.image}} / 7.17.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.0
timeout_in_minutes: 300
matrix:
setup:
@@ -833,7 +833,7 @@ steps:
BWC_VERSION: 7.17.0
- label: "{{matrix.image}} / 7.17.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.1
timeout_in_minutes: 300
matrix:
setup:
@@ -849,7 +849,7 @@ steps:
BWC_VERSION: 7.17.1
- label: "{{matrix.image}} / 7.17.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.2
timeout_in_minutes: 300
matrix:
setup:
@@ -865,7 +865,7 @@ steps:
BWC_VERSION: 7.17.2
- label: "{{matrix.image}} / 7.17.3 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.3
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.3
timeout_in_minutes: 300
matrix:
setup:
@@ -881,7 +881,7 @@ steps:
BWC_VERSION: 7.17.3
- label: "{{matrix.image}} / 7.17.4 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.4
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.4
timeout_in_minutes: 300
matrix:
setup:
@@ -897,7 +897,7 @@ steps:
BWC_VERSION: 7.17.4
- label: "{{matrix.image}} / 7.17.5 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.5
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.5
timeout_in_minutes: 300
matrix:
setup:
@@ -913,7 +913,7 @@ steps:
BWC_VERSION: 7.17.5
- label: "{{matrix.image}} / 7.17.6 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.6
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.6
timeout_in_minutes: 300
matrix:
setup:
@@ -929,7 +929,7 @@ steps:
BWC_VERSION: 7.17.6
- label: "{{matrix.image}} / 7.17.7 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.7
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.7
timeout_in_minutes: 300
matrix:
setup:
@@ -945,7 +945,7 @@ steps:
BWC_VERSION: 7.17.7
- label: "{{matrix.image}} / 7.17.8 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.8
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.8
timeout_in_minutes: 300
matrix:
setup:
@@ -961,7 +961,7 @@ steps:
BWC_VERSION: 7.17.8
- label: "{{matrix.image}} / 7.17.9 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.9
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.9
timeout_in_minutes: 300
matrix:
setup:
@@ -977,7 +977,7 @@ steps:
BWC_VERSION: 7.17.9
- label: "{{matrix.image}} / 7.17.10 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.10
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.10
timeout_in_minutes: 300
matrix:
setup:
@@ -993,7 +993,7 @@ steps:
BWC_VERSION: 7.17.10
- label: "{{matrix.image}} / 7.17.11 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.11
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.11
timeout_in_minutes: 300
matrix:
setup:
@@ -1009,7 +1009,7 @@ steps:
BWC_VERSION: 7.17.11
- label: "{{matrix.image}} / 7.17.12 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.12
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.12
timeout_in_minutes: 300
matrix:
setup:
@@ -1025,7 +1025,7 @@ steps:
BWC_VERSION: 7.17.12
- label: "{{matrix.image}} / 7.17.13 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.13
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.13
timeout_in_minutes: 300
matrix:
setup:
@@ -1041,7 +1041,7 @@ steps:
BWC_VERSION: 7.17.13
- label: "{{matrix.image}} / 7.17.14 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.14
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.14
timeout_in_minutes: 300
matrix:
setup:
@@ -1057,7 +1057,7 @@ steps:
BWC_VERSION: 7.17.14
- label: "{{matrix.image}} / 8.0.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.0
timeout_in_minutes: 300
matrix:
setup:
@@ -1073,7 +1073,7 @@ steps:
BWC_VERSION: 8.0.0
- label: "{{matrix.image}} / 8.0.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.1
timeout_in_minutes: 300
matrix:
setup:
@@ -1089,7 +1089,7 @@ steps:
BWC_VERSION: 8.0.1
- label: "{{matrix.image}} / 8.1.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.1.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.1.0
timeout_in_minutes: 300
matrix:
setup:
@@ -1105,7 +1105,7 @@ steps:
BWC_VERSION: 8.1.0
- label: "{{matrix.image}} / 8.1.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.1.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.1.1
timeout_in_minutes: 300
matrix:
setup:
@@ -1121,7 +1121,7 @@ steps:
BWC_VERSION: 8.1.1
- label: "{{matrix.image}} / 8.1.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.1.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.1.2
timeout_in_minutes: 300
matrix:
setup:
@@ -1137,7 +1137,7 @@ steps:
BWC_VERSION: 8.1.2
- label: "{{matrix.image}} / 8.1.3 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.1.3
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.1.3
timeout_in_minutes: 300
matrix:
setup:
@@ -1153,7 +1153,7 @@ steps:
BWC_VERSION: 8.1.3
- label: "{{matrix.image}} / 8.2.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.2.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.2.0
timeout_in_minutes: 300
matrix:
setup:
@@ -1169,7 +1169,7 @@ steps:
BWC_VERSION: 8.2.0
- label: "{{matrix.image}} / 8.2.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.2.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.2.1
timeout_in_minutes: 300
matrix:
setup:
@@ -1185,7 +1185,7 @@ steps:
BWC_VERSION: 8.2.1
- label: "{{matrix.image}} / 8.2.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.2.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.2.2
timeout_in_minutes: 300
matrix:
setup:
@@ -1201,7 +1201,7 @@ steps:
BWC_VERSION: 8.2.2
- label: "{{matrix.image}} / 8.2.3 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.2.3
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.2.3
timeout_in_minutes: 300
matrix:
setup:
@@ -1217,7 +1217,7 @@ steps:
BWC_VERSION: 8.2.3
- label: "{{matrix.image}} / 8.3.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.3.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.3.0
timeout_in_minutes: 300
matrix:
setup:
@@ -1233,7 +1233,7 @@ steps:
BWC_VERSION: 8.3.0
- label: "{{matrix.image}} / 8.3.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.3.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.3.1
timeout_in_minutes: 300
matrix:
setup:
@@ -1249,7 +1249,7 @@ steps:
BWC_VERSION: 8.3.1
- label: "{{matrix.image}} / 8.3.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.3.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.3.2
timeout_in_minutes: 300
matrix:
setup:
@@ -1265,7 +1265,7 @@ steps:
BWC_VERSION: 8.3.2
- label: "{{matrix.image}} / 8.3.3 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.3.3
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.3.3
timeout_in_minutes: 300
matrix:
setup:
@@ -1281,7 +1281,7 @@ steps:
BWC_VERSION: 8.3.3
- label: "{{matrix.image}} / 8.4.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.4.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.4.0
timeout_in_minutes: 300
matrix:
setup:
@@ -1297,7 +1297,7 @@ steps:
BWC_VERSION: 8.4.0
- label: "{{matrix.image}} / 8.4.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.4.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.4.1
timeout_in_minutes: 300
matrix:
setup:
@@ -1313,7 +1313,7 @@ steps:
BWC_VERSION: 8.4.1
- label: "{{matrix.image}} / 8.4.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.4.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.4.2
timeout_in_minutes: 300
matrix:
setup:
@@ -1329,7 +1329,7 @@ steps:
BWC_VERSION: 8.4.2
- label: "{{matrix.image}} / 8.4.3 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.4.3
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.4.3
timeout_in_minutes: 300
matrix:
setup:
@@ -1345,7 +1345,7 @@ steps:
BWC_VERSION: 8.4.3
- label: "{{matrix.image}} / 8.5.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.5.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.5.0
timeout_in_minutes: 300
matrix:
setup:
@@ -1361,7 +1361,7 @@ steps:
BWC_VERSION: 8.5.0
- label: "{{matrix.image}} / 8.5.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.5.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.5.1
timeout_in_minutes: 300
matrix:
setup:
@@ -1377,7 +1377,7 @@ steps:
BWC_VERSION: 8.5.1
- label: "{{matrix.image}} / 8.5.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.5.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.5.2
timeout_in_minutes: 300
matrix:
setup:
@@ -1393,7 +1393,7 @@ steps:
BWC_VERSION: 8.5.2
- label: "{{matrix.image}} / 8.5.3 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.5.3
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.5.3
timeout_in_minutes: 300
matrix:
setup:
@@ -1409,7 +1409,7 @@ steps:
BWC_VERSION: 8.5.3
- label: "{{matrix.image}} / 8.6.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.6.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.6.0
timeout_in_minutes: 300
matrix:
setup:
@@ -1425,7 +1425,7 @@ steps:
BWC_VERSION: 8.6.0
- label: "{{matrix.image}} / 8.6.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.6.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.6.1
timeout_in_minutes: 300
matrix:
setup:
@@ -1441,7 +1441,7 @@ steps:
BWC_VERSION: 8.6.1
- label: "{{matrix.image}} / 8.6.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.6.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.6.2
timeout_in_minutes: 300
matrix:
setup:
@@ -1457,7 +1457,7 @@ steps:
BWC_VERSION: 8.6.2
- label: "{{matrix.image}} / 8.7.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.7.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.7.0
timeout_in_minutes: 300
matrix:
setup:
@@ -1473,7 +1473,7 @@ steps:
BWC_VERSION: 8.7.0
- label: "{{matrix.image}} / 8.7.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.7.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.7.1
timeout_in_minutes: 300
matrix:
setup:
@@ -1489,7 +1489,7 @@ steps:
BWC_VERSION: 8.7.1
- label: "{{matrix.image}} / 8.8.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.8.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.8.0
timeout_in_minutes: 300
matrix:
setup:
@@ -1505,7 +1505,7 @@ steps:
BWC_VERSION: 8.8.0
- label: "{{matrix.image}} / 8.8.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.8.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.8.1
timeout_in_minutes: 300
matrix:
setup:
@@ -1521,7 +1521,7 @@ steps:
BWC_VERSION: 8.8.1
- label: "{{matrix.image}} / 8.8.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.8.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.8.2
timeout_in_minutes: 300
matrix:
setup:
@@ -1537,7 +1537,7 @@ steps:
BWC_VERSION: 8.8.2
- label: "{{matrix.image}} / 8.9.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.9.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.9.0
timeout_in_minutes: 300
matrix:
setup:
@@ -1553,7 +1553,7 @@ steps:
BWC_VERSION: 8.9.0
- label: "{{matrix.image}} / 8.9.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.9.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.9.1
timeout_in_minutes: 300
matrix:
setup:
@@ -1569,7 +1569,7 @@ steps:
BWC_VERSION: 8.9.1
- label: "{{matrix.image}} / 8.9.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.9.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.9.2
timeout_in_minutes: 300
matrix:
setup:
@@ -1585,7 +1585,7 @@ steps:
BWC_VERSION: 8.9.2
- label: "{{matrix.image}} / 8.10.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.0
timeout_in_minutes: 300
matrix:
setup:
@@ -1601,7 +1601,7 @@ steps:
BWC_VERSION: 8.10.0
- label: "{{matrix.image}} / 8.10.1 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.1
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.1
timeout_in_minutes: 300
matrix:
setup:
@@ -1617,7 +1617,7 @@ steps:
BWC_VERSION: 8.10.1
- label: "{{matrix.image}} / 8.10.2 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.2
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.2
timeout_in_minutes: 300
matrix:
setup:
@@ -1633,7 +1633,7 @@ steps:
BWC_VERSION: 8.10.2
- label: "{{matrix.image}} / 8.10.3 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.3
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.10.3
timeout_in_minutes: 300
matrix:
setup:
@@ -1649,7 +1649,7 @@ steps:
BWC_VERSION: 8.10.3
- label: "{{matrix.image}} / 8.11.0 / packaging-tests-upgrade"
- command: ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.11.0
+ command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.11.0
timeout_in_minutes: 300
matrix:
setup:
diff --git a/.buildkite/pipelines/periodic-platform-support.yml b/.buildkite/pipelines/periodic-platform-support.yml
index 8522ead742768..08c9fda4c9a6b 100644
--- a/.buildkite/pipelines/periodic-platform-support.yml
+++ b/.buildkite/pipelines/periodic-platform-support.yml
@@ -26,8 +26,9 @@ steps:
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
- diskSizeGb: 350
- machineType: n1-standard-32
+ localSsds: 1
+ localSsdInterface: nvme
+ machineType: custom-32-98304
env: {}
- group: platform-support-windows
steps:
diff --git a/.buildkite/pipelines/periodic.template.yml b/.buildkite/pipelines/periodic.template.yml
index e4f844afc3f41..ec3ae76ffcdfb 100644
--- a/.buildkite/pipelines/periodic.template.yml
+++ b/.buildkite/pipelines/periodic.template.yml
@@ -103,6 +103,73 @@ steps:
image: family/elasticsearch-ubuntu-2004
diskSizeGb: 350
machineType: custom-32-98304
+ - group: third-party tests
+ steps:
+ - label: third-party / azure-sas
+ command: |
+ export azure_storage_container=elasticsearch-ci-thirdparty-sas
+ export azure_storage_base_path=$BUILDKITE_BRANCH
+
+ .ci/scripts/run-gradle.sh azureThirdPartyTest
+ env:
+ USE_3RD_PARTY_AZURE_SAS_CREDENTIALS: "true"
+ timeout_in_minutes: 30
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: n2-standard-8
+ buildDirectory: /dev/shm/bk
+ - label: third-party / azure
+ command: |
+ export azure_storage_container=elasticsearch-ci-thirdparty
+ export azure_storage_base_path=$BUILDKITE_BRANCH
+
+ .ci/scripts/run-gradle.sh azureThirdPartyTest
+ env:
+ USE_3RD_PARTY_AZURE_CREDENTIALS: "true"
+ timeout_in_minutes: 30
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: n2-standard-8
+ buildDirectory: /dev/shm/bk
+ - label: third-party / gcs
+ command: |
+ export google_storage_bucket=elasticsearch-ci-thirdparty
+ export google_storage_base_path=$BUILDKITE_BRANCH
+
+ .ci/scripts/run-gradle.sh gcsThirdPartyTest
+ env:
+ USE_3RD_PARTY_GCS_CREDENTIALS: "true"
+ timeout_in_minutes: 30
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: n2-standard-8
+ buildDirectory: /dev/shm/bk
+ - label: third-party / geoip
+ command: |
+ .ci/scripts/run-gradle.sh :modules:ingest-geoip:internalClusterTest -Dtests.jvm.argline="-Dgeoip_use_service=true"
+ timeout_in_minutes: 30
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: n2-standard-8
+ buildDirectory: /dev/shm/bk
+ - label: third-party / s3
+ command: |
+ export amazon_s3_bucket=elasticsearch-ci.us-west-2
+ export amazon_s3_base_path=$BUILDKITE_BRANCH
+
+ .ci/scripts/run-gradle.sh s3ThirdPartyTest
+ env:
+ USE_3RD_PARTY_S3_CREDENTIALS: "true"
+ timeout_in_minutes: 30
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: n2-standard-8
+ buildDirectory: /dev/shm/bk
- label: Upload Snyk Dependency Graph
command: .ci/scripts/run-gradle.sh uploadSnykDependencyGraph -PsnykTargetReference=$BUILDKITE_BRANCH
env:
diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml
index 967dcbb8cf535..f0a3cfdfd1f50 100644
--- a/.buildkite/pipelines/periodic.yml
+++ b/.buildkite/pipelines/periodic.yml
@@ -1124,6 +1124,72 @@ steps:
image: family/elasticsearch-ubuntu-2004
diskSizeGb: 350
machineType: custom-32-98304
+ - group: third-party tests
+ steps:
+ - label: third-party / azure-sas
+ command: |
+ export azure_storage_container=elasticsearch-ci-thirdparty-sas
+ export azure_storage_base_path=$BUILDKITE_BRANCH
+
+ .ci/scripts/run-gradle.sh azureThirdPartyTest
+ env:
+ USE_3RD_PARTY_AZURE_SAS_CREDENTIALS: "true"
+ timeout_in_minutes: 30
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: n2-standard-8
+ buildDirectory: /dev/shm/bk
+ - label: third-party / azure
+ command: |
+ export azure_storage_container=elasticsearch-ci-thirdparty
+ export azure_storage_base_path=$BUILDKITE_BRANCH
+
+ .ci/scripts/run-gradle.sh azureThirdPartyTest
+ env:
+ USE_3RD_PARTY_AZURE_CREDENTIALS: "true"
+ timeout_in_minutes: 30
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: n2-standard-8
+ buildDirectory: /dev/shm/bk
+ - label: third-party / gcs
+ command: |
+ export google_storage_bucket=elasticsearch-ci-thirdparty
+ export google_storage_base_path=$BUILDKITE_BRANCH
+
+ .ci/scripts/run-gradle.sh gcsThirdPartyTest
+ env:
+ USE_3RD_PARTY_GCS_CREDENTIALS: "true"
+ timeout_in_minutes: 30
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: n2-standard-8
+ buildDirectory: /dev/shm/bk
+ - label: third-party / geoip
+ command: |
+ .ci/scripts/run-gradle.sh :modules:ingest-geoip:internalClusterTest -Dtests.jvm.argline="-Dgeoip_use_service=true"
+ timeout_in_minutes: 30
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: n2-standard-8
+ - label: third-party / s3
+ command: |
+ export amazon_s3_bucket=elasticsearch-ci.us-west-2
+ export amazon_s3_base_path=$BUILDKITE_BRANCH
+
+ .ci/scripts/run-gradle.sh s3ThirdPartyTest
+ env:
+ USE_3RD_PARTY_S3_CREDENTIALS: "true"
+ timeout_in_minutes: 30
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: n2-standard-8
+ buildDirectory: /dev/shm/bk
- label: Upload Snyk Dependency Graph
command: .ci/scripts/run-gradle.sh uploadSnykDependencyGraph -PsnykTargetReference=$BUILDKITE_BRANCH
env:
diff --git a/.buildkite/pipelines/pull-request/.defaults.yml b/.buildkite/pipelines/pull-request/.defaults.yml
new file mode 100644
index 0000000000000..84d73cbd738a2
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/.defaults.yml
@@ -0,0 +1,6 @@
+config:
+ skip-labels: ">test-mute"
+ excluded-regions:
+ - ^docs/.*
+ - ^x-pack/docs/.*
+# Note that there is also a trigger-phrase default inside pull-request.ts (it's dynamic based on the name of each pipeline file)
diff --git a/.buildkite/pipelines/pull-request/build-benchmark.yml b/.buildkite/pipelines/pull-request/build-benchmark.yml
new file mode 100644
index 0000000000000..8d3215b8393ce
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/build-benchmark.yml
@@ -0,0 +1,24 @@
+config:
+ allow-labels: build-benchmark
+ trigger-phrase: '.*run\W+elasticsearch-ci/build-bench.*'
+steps:
+ - label: build-benchmark / {{matrix.part}}
+ key: "build-benchmark"
+ command: |
+ .ci/scripts/run-gradle.sh :build-tools-internal:bootstrapPerformanceTests
+ .ci/scripts/install-gradle-profiler.sh
+ .ci/scripts/run-gradle-profiler.sh --benchmark --scenario-file build-tools-internal/build/performanceTests/elasticsearch-build-benchmark-{{matrix.part}}.scenarios --project-dir . --output-dir profile-out
+ mkdir build
+ tar -czf build/$BUILDKITE_BUILD_NUMBER.tar.bz2 profile-out
+ matrix:
+ setup:
+ part:
+ - part1
+ - part2
+ env:
+ BUILD_PERFORMANCE_TEST: "true"
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: custom-32-98304
+ buildDirectory: /dev/shm/bk
diff --git a/.buildkite/pipelines/pull-request/bwc-snapshots-windows.yml b/.buildkite/pipelines/pull-request/bwc-snapshots-windows.yml
new file mode 100644
index 0000000000000..d37bdf380f926
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/bwc-snapshots-windows.yml
@@ -0,0 +1,20 @@
+config:
+ allow-labels: test-windows
+steps:
+ - group: bwc-snapshots-windows
+ steps:
+ - label: "{{matrix.BWC_VERSION}} / bwc-snapshots-windows"
+ key: "bwc-snapshots-windows"
+ command: .\.buildkite\scripts\run-script.ps1 bash .buildkite/scripts/windows-run-gradle.sh
+ env:
+ GRADLE_TASK: "v{{matrix.BWC_VERSION}}#bwcTest"
+ timeout_in_minutes: 300
+ matrix:
+ setup:
+ BWC_VERSION: $SNAPSHOT_BWC_VERSIONS
+ agents:
+ provider: gcp
+ image: family/elasticsearch-windows-2022
+ machineType: custom-32-98304
+ diskType: pd-ssd
+ diskSizeGb: 350
diff --git a/.buildkite/pipelines/pull-request/bwc-snapshots.yml b/.buildkite/pipelines/pull-request/bwc-snapshots.yml
new file mode 100644
index 0000000000000..21873475056ea
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/bwc-snapshots.yml
@@ -0,0 +1,20 @@
+config:
+ trigger-phrase: '.*run\W+elasticsearch-ci/bwc.*'
+ skip-labels:
+ - ">test-mute"
+ - "test-full-bwc"
+steps:
+ - group: bwc-snapshots
+ steps:
+ - label: "{{matrix.BWC_VERSION}} / bwc-snapshots"
+ key: "bwc-snapshots"
+ command: .ci/scripts/run-gradle.sh -Dignore.tests.seed v{{matrix.BWC_VERSION}}#bwcTest
+ timeout_in_minutes: 300
+ matrix:
+ setup:
+ BWC_VERSION: $SNAPSHOT_BWC_VERSIONS
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: custom-32-98304
+ buildDirectory: /dev/shm/bk
diff --git a/.buildkite/pipelines/pull-request/cloud-deploy.yml b/.buildkite/pipelines/pull-request/cloud-deploy.yml
new file mode 100644
index 0000000000000..ce8e8206d51ff
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/cloud-deploy.yml
@@ -0,0 +1,13 @@
+config:
+ allow-labels: cloud-deploy
+steps:
+ - label: cloud-deploy
+ command: .buildkite/scripts/cloud-deploy.sh
+ env:
+ USE_PROD_DOCKER_CREDENTIALS: "true"
+ timeout_in_minutes: 20
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: custom-32-98304
+ buildDirectory: /dev/shm/bk
diff --git a/.buildkite/pipelines/pull-request/docs-check.yml b/.buildkite/pipelines/pull-request/docs-check.yml
new file mode 100644
index 0000000000000..2201eb2d1e4ea
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/docs-check.yml
@@ -0,0 +1,14 @@
+config:
+ included-regions:
+ - ^docs/.*
+ - ^x-pack/docs/.*
+ excluded-regions: []
+steps:
+ - label: docs-check
+ command: .ci/scripts/run-gradle.sh -Dignore.tests.seed precommit :docs:check
+ timeout_in_minutes: 300
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: custom-32-98304
+ buildDirectory: /dev/shm/bk
diff --git a/.buildkite/pipelines/pull-request/eql-correctness.yml b/.buildkite/pipelines/pull-request/eql-correctness.yml
new file mode 100644
index 0000000000000..8f7ca6942c0e9
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/eql-correctness.yml
@@ -0,0 +1,9 @@
+steps:
+ - label: eql-correctness
+ command: .buildkite/scripts/eql-correctness.sh
+ timeout_in_minutes: 300
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: custom-32-98304
+ buildDirectory: /dev/shm/bk
diff --git a/.buildkite/pipelines/pull-request/example-plugins.yml b/.buildkite/pipelines/pull-request/example-plugins.yml
new file mode 100644
index 0000000000000..18d0de6594980
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/example-plugins.yml
@@ -0,0 +1,18 @@
+config:
+ included-regions:
+ - build-conventions/.*
+ - build-tools/.*
+ - build-tools-internal/.*
+ - plugins/examples/.*
+steps:
+ - label: example-plugins
+ command: |-
+ cd $$WORKSPACE/plugins/examples
+
+ $$WORKSPACE/.ci/scripts/run-gradle.sh -Dorg.gradle.jvmargs=-Xmx8g build --include-build $$WORKSPACE
+ timeout_in_minutes: 300
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: custom-32-98304
+ buildDirectory: /dev/shm/bk
diff --git a/.buildkite/pipelines/pull-request/full-bwc.yml b/.buildkite/pipelines/pull-request/full-bwc.yml
new file mode 100644
index 0000000000000..d3fa8eccaf7d9
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/full-bwc.yml
@@ -0,0 +1,15 @@
+config:
+ allow-labels: test-full-bwc
+steps:
+ - group: bwc
+ steps:
+ - label: $BWC_VERSION / bwc
+ key: "full-bwc:$BWC_VERSION_SNAKE"
+ bwc_template: true
+ command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v$BWC_VERSION#bwcTest
+ timeout_in_minutes: 300
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: custom-32-98304
+ buildDirectory: /dev/shm/bk
diff --git a/.buildkite/pipelines/pull-request/packaging-tests-unix-sample.yml b/.buildkite/pipelines/pull-request/packaging-tests-unix-sample.yml
new file mode 100644
index 0000000000000..98bc61ea33738
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/packaging-tests-unix-sample.yml
@@ -0,0 +1,27 @@
+config:
+ skip-labels:
+ - ">test-mute"
+ - ":Delivery/Packaging"
+steps:
+ - group: packaging-tests-unix-sample
+ steps:
+ - label: "{{matrix.image}} / {{matrix.PACKAGING_TASK}} / packaging-tests-unix-sample"
+ key: "packaging-tests-unix-sample"
+ command: ./.ci/scripts/packaging-test.sh $$PACKAGING_TASK
+ timeout_in_minutes: 300
+ matrix:
+ setup:
+ image:
+ - rhel-8
+ - ubuntu-2004
+ PACKAGING_TASK:
+ - destructiveDistroTest.docker
+ - destructiveDistroTest.packages
+ - destructiveDistroTest.archives
+ agents:
+ provider: gcp
+ image: family/elasticsearch-{{matrix.image}}
+ diskSizeGb: 350
+ machineType: custom-16-32768
+ env:
+ PACKAGING_TASK: "{{matrix.PACKAGING_TASK}}"
diff --git a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml
new file mode 100644
index 0000000000000..ec2e29b284781
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml
@@ -0,0 +1,39 @@
+config:
+ allow-labels: ":Delivery/Packaging"
+steps:
+ - group: packaging-tests-unix
+ steps:
+ - label: "{{matrix.image}} / {{matrix.PACKAGING_TASK}} / packaging-tests-unix"
+ key: "packaging-tests-unix"
+ command: ./.ci/scripts/packaging-test.sh $$PACKAGING_TASK
+ timeout_in_minutes: 300
+ matrix:
+ setup:
+ image:
+ - centos-7
+ - debian-10
+ - debian-11
+ - opensuse-leap-15
+ - oraclelinux-7
+ - oraclelinux-8
+ - sles-12
+ - sles-15
+ - ubuntu-1804
+ - ubuntu-2004
+ - ubuntu-2204
+ - rocky-8
+ - rhel-7
+ - rhel-8
+ - rhel-9
+ - almalinux-8
+ PACKAGING_TASK:
+ - destructiveDistroTest.docker
+ - destructiveDistroTest.packages
+ - destructiveDistroTest.archives
+ agents:
+ provider: gcp
+ image: family/elasticsearch-{{matrix.image}}
+ diskSizeGb: 350
+ machineType: custom-16-32768
+ env:
+ PACKAGING_TASK: "{{matrix.PACKAGING_TASK}}"
diff --git a/.buildkite/pipelines/pull-request/packaging-tests-windows-sample.yml b/.buildkite/pipelines/pull-request/packaging-tests-windows-sample.yml
new file mode 100644
index 0000000000000..bcf38f51f4a66
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/packaging-tests-windows-sample.yml
@@ -0,0 +1,25 @@
+config:
+ skip-labels:
+ - ">test-mute"
+ - ":Delivery/Packaging"
+steps:
+ - group: packaging-tests-windows-sample
+ steps:
+ - label: "{{matrix.image}} / {{matrix.PACKAGING_TASK}} / packaging-tests-windows-sample"
+ key: "packaging-tests-windows-sample"
+ command: .\.buildkite\scripts\run-script.ps1 .\.ci\scripts\packaging-test.ps1 -GradleTasks destructiveDistroTest.{{matrix.PACKAGING_TASK}}
+ timeout_in_minutes: 300
+ matrix:
+ setup:
+ image:
+ - windows-2019
+ PACKAGING_TASK:
+ - default-windows-archive
+ agents:
+ provider: gcp
+ image: family/elasticsearch-{{matrix.image}}
+ machineType: custom-32-98304
+ diskType: pd-ssd
+ diskSizeGb: 350
+ env:
+ PACKAGING_TASK: "{{matrix.PACKAGING_TASK}}"
diff --git a/.buildkite/pipelines/pull-request/packaging-tests-windows.yml b/.buildkite/pipelines/pull-request/packaging-tests-windows.yml
new file mode 100644
index 0000000000000..651a82982460f
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/packaging-tests-windows.yml
@@ -0,0 +1,25 @@
+config:
+ allow-labels: ":Delivery/Packaging"
+steps:
+ - group: packaging-tests-windows
+ steps:
+ - label: "{{matrix.image}} / {{matrix.PACKAGING_TASK}} / packaging-tests-windows"
+ key: "packaging-tests-windows"
+ command: .\.buildkite\scripts\run-script.ps1 .\.ci\scripts\packaging-test.ps1 -GradleTasks destructiveDistroTest.{{matrix.PACKAGING_TASK}}
+ timeout_in_minutes: 300
+ matrix:
+ setup:
+ image:
+ - windows-2016
+ - windows-2019
+ - windows-2022
+ PACKAGING_TASK:
+ - default-windows-archive
+ agents:
+ provider: gcp
+ image: family/elasticsearch-{{matrix.image}}
+ machineType: custom-32-98304
+ diskType: pd-ssd
+ diskSizeGb: 350
+ env:
+ PACKAGING_TASK: "{{matrix.PACKAGING_TASK}}"
diff --git a/.buildkite/pipelines/pull-request/packaging-upgrade-tests.yml b/.buildkite/pipelines/pull-request/packaging-upgrade-tests.yml
new file mode 100644
index 0000000000000..c62cf23310422
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/packaging-upgrade-tests.yml
@@ -0,0 +1,22 @@
+config:
+ allow-labels: ":Delivery/Packaging"
+steps:
+ - group: packaging-tests-upgrade
+ steps:
+ - label: "{{matrix.image}} / $BWC_VERSION / packaging-tests-upgrade"
+ key: "packaging-tests-upgrade:$BWC_VERSION_SNAKE"
+ command: ./.ci/scripts/packaging-test.sh destructiveDistroUpgradeTest.v$BWC_VERSION
+ timeout_in_minutes: 300
+ bwc_template: true
+ matrix:
+ setup:
+ image:
+ - rocky-8
+ - ubuntu-2004
+ agents:
+ provider: gcp
+ image: family/elasticsearch-{{matrix.image}}
+ machineType: custom-16-32768
+ buildDirectory: /dev/shm/bk
+ env:
+ BWC_VERSION: $BWC_VERSION
diff --git a/.buildkite/pipelines/pull-request/part-1-fips.yml b/.buildkite/pipelines/pull-request/part-1-fips.yml
new file mode 100644
index 0000000000000..42f930c1bde9a
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/part-1-fips.yml
@@ -0,0 +1,11 @@
+config:
+ allow-labels: "Team:Security"
+steps:
+ - label: part-1-fips
+ command: .ci/scripts/run-gradle.sh -Dignore.tests.seed -Dtests.fips.enabled=true checkPart1
+ timeout_in_minutes: 300
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: custom-32-98304
+ buildDirectory: /dev/shm/bk
diff --git a/.buildkite/pipelines/pull-request/part-1-windows.yml b/.buildkite/pipelines/pull-request/part-1-windows.yml
new file mode 100644
index 0000000000000..20d46ebaa7406
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/part-1-windows.yml
@@ -0,0 +1,14 @@
+config:
+ allow-labels: "test-windows"
+steps:
+ - label: part-1-windows
+ command: .\.buildkite\scripts\run-script.ps1 bash .buildkite/scripts/windows-run-gradle.sh
+ timeout_in_minutes: 300
+ agents:
+ provider: gcp
+ image: family/elasticsearch-windows-2022
+ machineType: custom-32-98304
+ diskType: pd-ssd
+ diskSizeGb: 350
+ env:
+ GRADLE_TASK: checkPart1
diff --git a/.buildkite/pipelines/pull-request/part-1.yml b/.buildkite/pipelines/pull-request/part-1.yml
new file mode 100644
index 0000000000000..3d467c6c41e43
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/part-1.yml
@@ -0,0 +1,9 @@
+steps:
+ - label: part-1
+ command: .ci/scripts/run-gradle.sh -Dignore.tests.seed checkPart1
+ timeout_in_minutes: 300
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: custom-32-98304
+ buildDirectory: /dev/shm/bk
diff --git a/.buildkite/pipelines/pull-request/part-2-fips.yml b/.buildkite/pipelines/pull-request/part-2-fips.yml
new file mode 100644
index 0000000000000..6a3647ceb50ae
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/part-2-fips.yml
@@ -0,0 +1,11 @@
+config:
+ allow-labels: "Team:Security"
+steps:
+ - label: part-2-fips
+ command: .ci/scripts/run-gradle.sh -Dignore.tests.seed -Dtests.fips.enabled=true checkPart2
+ timeout_in_minutes: 300
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: custom-32-98304
+ buildDirectory: /dev/shm/bk
diff --git a/.buildkite/pipelines/pull-request/part-2-windows.yml b/.buildkite/pipelines/pull-request/part-2-windows.yml
new file mode 100644
index 0000000000000..f38df244e8389
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/part-2-windows.yml
@@ -0,0 +1,14 @@
+config:
+ allow-labels: "test-windows"
+steps:
+ - label: part-2-windows
+ command: .\.buildkite\scripts\run-script.ps1 bash .buildkite/scripts/windows-run-gradle.sh
+ timeout_in_minutes: 300
+ agents:
+ provider: gcp
+ image: family/elasticsearch-windows-2022
+ machineType: custom-32-98304
+ diskType: pd-ssd
+ diskSizeGb: 350
+ env:
+ GRADLE_TASK: checkPart2
diff --git a/.buildkite/pipelines/pull-request/part-2.yml b/.buildkite/pipelines/pull-request/part-2.yml
new file mode 100644
index 0000000000000..43de69bbcd945
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/part-2.yml
@@ -0,0 +1,9 @@
+steps:
+ - label: part-2
+ command: .ci/scripts/run-gradle.sh -Dignore.tests.seed checkPart2
+ timeout_in_minutes: 300
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: custom-32-98304
+ buildDirectory: /dev/shm/bk
diff --git a/.buildkite/pipelines/pull-request/part-3-fips.yml b/.buildkite/pipelines/pull-request/part-3-fips.yml
new file mode 100644
index 0000000000000..cee3ea153acb9
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/part-3-fips.yml
@@ -0,0 +1,11 @@
+config:
+ allow-labels: "Team:Security"
+steps:
+ - label: part-3-fips
+ command: .ci/scripts/run-gradle.sh -Dignore.tests.seed -Dtests.fips.enabled=true checkPart3
+ timeout_in_minutes: 300
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: custom-32-98304
+ buildDirectory: /dev/shm/bk
diff --git a/.buildkite/pipelines/pull-request/part-3-windows.yml b/.buildkite/pipelines/pull-request/part-3-windows.yml
new file mode 100644
index 0000000000000..3bad740aedb72
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/part-3-windows.yml
@@ -0,0 +1,14 @@
+config:
+ allow-labels: "test-windows"
+steps:
+ - label: part-3-windows
+ command: .\.buildkite\scripts\run-script.ps1 bash .buildkite/scripts/windows-run-gradle.sh
+ timeout_in_minutes: 300
+ agents:
+ provider: gcp
+ image: family/elasticsearch-windows-2022
+ machineType: custom-32-98304
+ diskType: pd-ssd
+ diskSizeGb: 350
+ env:
+ GRADLE_TASK: checkPart3
diff --git a/.buildkite/pipelines/pull-request/part-3.yml b/.buildkite/pipelines/pull-request/part-3.yml
new file mode 100644
index 0000000000000..12abae7634822
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/part-3.yml
@@ -0,0 +1,11 @@
+config:
+ skip-target-branches: "7.17"
+steps:
+ - label: part-3
+ command: .ci/scripts/run-gradle.sh -Dignore.tests.seed checkPart3
+ timeout_in_minutes: 300
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: custom-32-98304
+ buildDirectory: /dev/shm/bk
diff --git a/.buildkite/pipelines/pull-request/precommit.yml b/.buildkite/pipelines/pull-request/precommit.yml
new file mode 100644
index 0000000000000..f6548dfeed9b2
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/precommit.yml
@@ -0,0 +1,12 @@
+config:
+ allow-labels: ">test-mute"
+ skip-labels: []
+steps:
+ - label: precommit
+ command: .ci/scripts/run-gradle.sh -Dignore.tests.seed precommit
+ timeout_in_minutes: 300
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: custom-32-98304
+ buildDirectory: /dev/shm/bk
diff --git a/.buildkite/pipelines/pull-request/release-tests.yml b/.buildkite/pipelines/pull-request/release-tests.yml
new file mode 100644
index 0000000000000..7d7a5c77d3320
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/release-tests.yml
@@ -0,0 +1,11 @@
+config:
+ allow-labels: test-release
+steps:
+ - label: release-tests
+ command: .buildkite/scripts/release-tests.sh
+ timeout_in_minutes: 300
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ diskSizeGb: 350
+ machineType: custom-32-98304
diff --git a/.buildkite/pipelines/pull-request/rest-compatibility.yml b/.buildkite/pipelines/pull-request/rest-compatibility.yml
new file mode 100644
index 0000000000000..a69810e23d960
--- /dev/null
+++ b/.buildkite/pipelines/pull-request/rest-compatibility.yml
@@ -0,0 +1,11 @@
+config:
+ skip-target-branches: "7.17"
+steps:
+ - label: rest-compatibility
+ command: .ci/scripts/run-gradle.sh -Dignore.tests.seed checkRestCompat
+ timeout_in_minutes: 300
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: custom-32-98304
+ buildDirectory: /dev/shm/bk
diff --git a/.buildkite/scripts/cloud-deploy.sh b/.buildkite/scripts/cloud-deploy.sh
new file mode 100755
index 0000000000000..2b98aa224406b
--- /dev/null
+++ b/.buildkite/scripts/cloud-deploy.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+set -euo pipefail
+
+.ci/scripts/run-gradle.sh buildCloudDockerImage
+
+ES_VERSION=$(grep 'elasticsearch' build-tools-internal/version.properties | awk '{print $3}')
+DOCKER_TAG="docker.elastic.co/elasticsearch-ci/elasticsearch-cloud:${ES_VERSION}-${BUILDKITE_COMMIT:0:7}"
+docker tag elasticsearch-cloud:test "$DOCKER_TAG"
+
+echo "$DOCKER_REGISTRY_PASSWORD" | docker login -u "$DOCKER_REGISTRY_USERNAME" --password-stdin docker.elastic.co
+unset DOCKER_REGISTRY_USERNAME DOCKER_REGISTRY_PASSWORD
+
+docker push "$DOCKER_TAG"
diff --git a/.buildkite/scripts/get-legacy-secret.sh b/.buildkite/scripts/get-legacy-secret.sh
new file mode 100755
index 0000000000000..3df6c27f484f2
--- /dev/null
+++ b/.buildkite/scripts/get-legacy-secret.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+
+set -euo pipefail
+
+# WARNING: this script will echo the credentials to the console. It is meant to be called from another script and captured in a variable.
+# It should really only be used inside .buildkite/hooks/pre-command
+
+source .buildkite/scripts/setup-legacy-vault.sh
+
+vault read -format=json "$1"
diff --git a/.buildkite/scripts/pull-request/README.md b/.buildkite/scripts/pull-request/README.md
new file mode 100644
index 0000000000000..5fc1d564dc74f
--- /dev/null
+++ b/.buildkite/scripts/pull-request/README.md
@@ -0,0 +1,38 @@
+# Pull Request pipeline generator
+
+## Overview
+
+Each time a pull request build is triggered, such as via commit or comment, we use this generator to dynamically create the steps that are needed to run.
+
+The generator handles the following:
+
+ - `allow-labels` - only trigger a step if the PR has one of these labels
+ - `skip-labels` - don't trigger the step if the PR has one of these labels
+ - `excluded-regions` - don't trigger the step if **all** of the changes in the PR match these paths/regexes
+ - `included-regions` - trigger the step if **all** of the changes in the PR match these paths/regexes
+ - `trigger-phrase` - trigger this step, and ignore all other steps, if the build was triggered by a comment and that comment matches this regex
+ - Note that each step has an automatic phrase of `.*run\\W+elasticsearch-ci/.*`
+ - Replacing `$SNAPSHOT_BWC_VERSIONS` in pipelines with an array of versions from `.ci/snapshotBwcVersions`
+ - Duplicating any step with `bwc_template: true` for each BWC version in `.ci/bwcVersions`
+
+[Bun](https://bun.sh/) is used to test and run the TypeScript. It's an alternative JavaScript runtime that natively handles TypeScript.
+
+### Pipelines Location
+
+Pipelines are in [`.buildkite/pipelines`](../../pipelines/pull-request). They are automatically picked up and given a name based on their filename.
+
+
+## Setup
+
+- [Install bun](https://bun.sh/)
+ - `npm install -g bun` will work if you already have `npm`
+- `cd .buildkite; bun install` to install dependencies
+
+## Run tests
+
+```bash
+cd .buildkite
+bun test
+```
+
+If you need to regenerate the snapshots, run `bun test --update-snapshots`.
diff --git a/.buildkite/scripts/pull-request/__snapshots__/pipeline.test.ts.snap b/.buildkite/scripts/pull-request/__snapshots__/pipeline.test.ts.snap
new file mode 100644
index 0000000000000..39cd3fe07beb4
--- /dev/null
+++ b/.buildkite/scripts/pull-request/__snapshots__/pipeline.test.ts.snap
@@ -0,0 +1,185 @@
+// Bun Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`generatePipelines should generate correct pipelines with a non-docs change 1`] = `
+[
+ {
+ "steps": [
+ {
+ "group": "bwc-snapshots",
+ "steps": [
+ {
+ "agents": {
+ "buildDirectory": "/dev/shm/bk",
+ "image": "family/elasticsearch-ubuntu-2004",
+ "machineType": "custom-32-98304",
+ "provider": "gcp",
+ },
+ "command": ".ci/scripts/run-gradle.sh -Dignore.tests.seed v{{matrix.BWC_VERSION}}#bwcTest",
+ "env": {
+ "BWC_VERSION": "{{matrix.BWC_VERSION}}",
+ },
+ "label": "{{matrix.BWC_VERSION}} / bwc-snapshots",
+ "matrix": {
+ "setup": {
+ "BWC_VERSION": [
+ "7.17.14",
+ "8.10.3",
+ "8.11.0",
+ ],
+ },
+ },
+ "timeout_in_minutes": 300,
+ },
+ ],
+ },
+ ],
+ },
+ {
+ "env": {
+ "CUSTOM_ENV_VAR": "value",
+ },
+ "steps": [
+ {
+ "command": "echo 'hello world'",
+ "label": "test-step",
+ },
+ ],
+ },
+]
+`;
+
+exports[`generatePipelines should generate correct pipelines with only docs changes 1`] = `
+[
+ {
+ "steps": [
+ {
+ "agents": {
+ "buildDirectory": "/dev/shm/bk",
+ "image": "family/elasticsearch-ubuntu-2004",
+ "machineType": "custom-32-98304",
+ "provider": "gcp",
+ },
+ "command": ".ci/scripts/run-gradle.sh -Dignore.tests.seed precommit :docs:check",
+ "label": "docs-check",
+ "timeout_in_minutes": 300,
+ },
+ ],
+ },
+]
+`;
+
+exports[`generatePipelines should generate correct pipelines with full BWC expansion 1`] = `
+[
+ {
+ "steps": [
+ {
+ "group": "bwc",
+ "steps": [
+ {
+ "agents": {
+ "buildDirectory": "/dev/shm/bk",
+ "image": "family/elasticsearch-ubuntu-2004",
+ "machineType": "custom-32-98304",
+ "provider": "gcp",
+ },
+ "command": ".ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.0.0#bwcTest",
+ "env": {
+ "BWC_VERSION": "7.0.0",
+ },
+ "key": "full-bwc:7_0_0",
+ "label": "7.0.0 / bwc",
+ "timeout_in_minutes": 300,
+ },
+ {
+ "agents": {
+ "buildDirectory": "/dev/shm/bk",
+ "image": "family/elasticsearch-ubuntu-2004",
+ "machineType": "custom-32-98304",
+ "provider": "gcp",
+ },
+ "command": ".ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.0.1#bwcTest",
+ "env": {
+ "BWC_VERSION": "7.0.1",
+ },
+ "key": "full-bwc:7_0_1",
+ "label": "7.0.1 / bwc",
+ "timeout_in_minutes": 300,
+ },
+ {
+ "agents": {
+ "buildDirectory": "/dev/shm/bk",
+ "image": "family/elasticsearch-ubuntu-2004",
+ "machineType": "custom-32-98304",
+ "provider": "gcp",
+ },
+ "command": ".ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.1.0#bwcTest",
+ "env": {
+ "BWC_VERSION": "7.1.0",
+ },
+ "key": "full-bwc:7_1_0",
+ "label": "7.1.0 / bwc",
+ "timeout_in_minutes": 300,
+ },
+ {
+ "agents": {
+ "buildDirectory": "/dev/shm/bk",
+ "image": "family/elasticsearch-ubuntu-2004",
+ "machineType": "custom-32-98304",
+ "provider": "gcp",
+ },
+ "command": ".ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.10.0#bwcTest",
+ "env": {
+ "BWC_VERSION": "8.10.0",
+ },
+ "key": "full-bwc:8_10_0",
+ "label": "8.10.0 / bwc",
+ "timeout_in_minutes": 300,
+ },
+ {
+ "agents": {
+ "buildDirectory": "/dev/shm/bk",
+ "image": "family/elasticsearch-ubuntu-2004",
+ "machineType": "custom-32-98304",
+ "provider": "gcp",
+ },
+ "command": ".ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.11.0#bwcTest",
+ "env": {
+ "BWC_VERSION": "8.11.0",
+ },
+ "key": "full-bwc:8_11_0",
+ "label": "8.11.0 / bwc",
+ "timeout_in_minutes": 300,
+ },
+ ],
+ },
+ ],
+ },
+ {
+ "env": {
+ "CUSTOM_ENV_VAR": "value",
+ },
+ "steps": [
+ {
+ "command": "echo 'hello world'",
+ "label": "test-step",
+ },
+ ],
+ },
+]
+`;
+
+exports[`generatePipelines should generate correct pipeline when using a trigger comment for it 1`] = `
+[
+ {
+ "env": {
+ "CUSTOM_ENV_VAR": "value",
+ },
+ "steps": [
+ {
+ "command": "echo 'hello world'",
+ "label": "test-step",
+ },
+ ],
+ },
+]
+`;
diff --git a/.buildkite/scripts/pull-request/bwc-versions.ts b/.buildkite/scripts/pull-request/bwc-versions.ts
new file mode 100644
index 0000000000000..adbe92fad76f4
--- /dev/null
+++ b/.buildkite/scripts/pull-request/bwc-versions.ts
@@ -0,0 +1,30 @@
+import { parse } from "yaml";
+import { readFileSync } from "fs";
+import { resolve } from "path";
+
+const PROJECT_ROOT = resolve(`${import.meta.dir}/../../..`);
+
+let BWC_VERSIONS_PATH = `${PROJECT_ROOT}/.ci/bwcVersions`;
+let BWC_VERSIONS: any;
+
+let SNAPSHOT_BWC_VERSIONS_PATH = `${PROJECT_ROOT}/.ci/snapshotBwcVersions`;
+let SNAPSHOT_BWC_VERSIONS: any;
+
+export const getSnapshotBwcVersions = () => {
+ SNAPSHOT_BWC_VERSIONS = SNAPSHOT_BWC_VERSIONS ?? parse(readFileSync(SNAPSHOT_BWC_VERSIONS_PATH, "utf-8")).BWC_VERSION;
+
+ return SNAPSHOT_BWC_VERSIONS;
+};
+
+export const getBwcVersions = () => {
+ BWC_VERSIONS = BWC_VERSIONS ?? parse(readFileSync(BWC_VERSIONS_PATH, "utf-8")).BWC_VERSION;
+ return BWC_VERSIONS;
+};
+
+export const setSnapshotBwcVersionsPath = (path: string) => {
+ SNAPSHOT_BWC_VERSIONS_PATH = path;
+};
+
+export const setBwcVersionsPath = (path: string) => {
+ BWC_VERSIONS_PATH = path;
+};
diff --git a/.buildkite/scripts/pull-request/mocks/bwcVersions b/.buildkite/scripts/pull-request/mocks/bwcVersions
new file mode 100644
index 0000000000000..0f4382943d70b
--- /dev/null
+++ b/.buildkite/scripts/pull-request/mocks/bwcVersions
@@ -0,0 +1,6 @@
+BWC_VERSION:
+ - "7.0.0"
+ - "7.0.1"
+ - "7.1.0"
+ - "8.10.0"
+ - "8.11.0"
diff --git a/.buildkite/scripts/pull-request/mocks/pipelines/.defaults.yml b/.buildkite/scripts/pull-request/mocks/pipelines/.defaults.yml
new file mode 100644
index 0000000000000..b5341c16a7e97
--- /dev/null
+++ b/.buildkite/scripts/pull-request/mocks/pipelines/.defaults.yml
@@ -0,0 +1,5 @@
+config:
+ skip-labels: ">test-mute"
+ excluded-regions:
+ - ^docs/.*
+ - ^x-pack/docs/.*
diff --git a/.buildkite/scripts/pull-request/mocks/pipelines/bwc-snapshots.yml b/.buildkite/scripts/pull-request/mocks/pipelines/bwc-snapshots.yml
new file mode 100644
index 0000000000000..0f549ed9f1195
--- /dev/null
+++ b/.buildkite/scripts/pull-request/mocks/pipelines/bwc-snapshots.yml
@@ -0,0 +1,21 @@
+config:
+ trigger-phrase: '.*run\W+elasticsearch-ci/bwc.*'
+ skip-labels:
+ - ">test-mute"
+ - "test-full-bwc"
+steps:
+ - group: bwc-snapshots
+ steps:
+ - label: "{{matrix.BWC_VERSION}} / bwc-snapshots"
+ command: .ci/scripts/run-gradle.sh -Dignore.tests.seed v{{matrix.BWC_VERSION}}#bwcTest
+ timeout_in_minutes: 300
+ matrix:
+ setup:
+ BWC_VERSION: $SNAPSHOT_BWC_VERSIONS
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: custom-32-98304
+ buildDirectory: /dev/shm/bk
+ env:
+ BWC_VERSION: "{{matrix.BWC_VERSION}}"
diff --git a/.buildkite/scripts/pull-request/mocks/pipelines/docs-check.yml b/.buildkite/scripts/pull-request/mocks/pipelines/docs-check.yml
new file mode 100644
index 0000000000000..2201eb2d1e4ea
--- /dev/null
+++ b/.buildkite/scripts/pull-request/mocks/pipelines/docs-check.yml
@@ -0,0 +1,14 @@
+config:
+ included-regions:
+ - ^docs/.*
+ - ^x-pack/docs/.*
+ excluded-regions: []
+steps:
+ - label: docs-check
+ command: .ci/scripts/run-gradle.sh -Dignore.tests.seed precommit :docs:check
+ timeout_in_minutes: 300
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: custom-32-98304
+ buildDirectory: /dev/shm/bk
diff --git a/.buildkite/scripts/pull-request/mocks/pipelines/full-bwc.yml b/.buildkite/scripts/pull-request/mocks/pipelines/full-bwc.yml
new file mode 100644
index 0000000000000..2737597815ad0
--- /dev/null
+++ b/.buildkite/scripts/pull-request/mocks/pipelines/full-bwc.yml
@@ -0,0 +1,17 @@
+config:
+ allow-labels: test-full-bwc
+steps:
+ - group: bwc
+ steps:
+ - label: $BWC_VERSION / bwc
+ key: full-bwc:$BWC_VERSION_SNAKE
+ bwc_template: true
+ command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v$BWC_VERSION#bwcTest
+ timeout_in_minutes: 300
+ agents:
+ provider: gcp
+ image: family/elasticsearch-ubuntu-2004
+ machineType: custom-32-98304
+ buildDirectory: /dev/shm/bk
+ env:
+ BWC_VERSION: $BWC_VERSION
diff --git a/.buildkite/scripts/pull-request/mocks/pipelines/using-defaults.yml b/.buildkite/scripts/pull-request/mocks/pipelines/using-defaults.yml
new file mode 100644
index 0000000000000..a3b2010547a34
--- /dev/null
+++ b/.buildkite/scripts/pull-request/mocks/pipelines/using-defaults.yml
@@ -0,0 +1,5 @@
+env:
+ CUSTOM_ENV_VAR: "value"
+steps:
+ - label: test-step
+ command: echo 'hello world'
diff --git a/.buildkite/scripts/pull-request/mocks/snapshotBwcVersions b/.buildkite/scripts/pull-request/mocks/snapshotBwcVersions
new file mode 100644
index 0000000000000..1bc1fa321d9da
--- /dev/null
+++ b/.buildkite/scripts/pull-request/mocks/snapshotBwcVersions
@@ -0,0 +1,4 @@
+BWC_VERSION:
+ - "7.17.14"
+ - "8.10.3"
+ - "8.11.0"
diff --git a/.buildkite/scripts/pull-request/pipeline.generate.ts b/.buildkite/scripts/pull-request/pipeline.generate.ts
new file mode 100644
index 0000000000000..69caff990dcfe
--- /dev/null
+++ b/.buildkite/scripts/pull-request/pipeline.generate.ts
@@ -0,0 +1,19 @@
+import { stringify } from "yaml";
+import { execSync } from "child_process";
+
+import { generatePipelines } from "./pipeline";
+
+const pipelines = generatePipelines();
+
+for (const pipeline of pipelines) {
+ if (!process.env.CI) {
+ // Just for local debugging purposes
+ console.log("");
+ console.log(stringify(pipeline));
+ } else {
+ execSync(`buildkite-agent pipeline upload`, {
+ input: stringify(pipeline),
+ stdio: ["pipe", "inherit", "inherit"],
+ });
+ }
+}
diff --git a/.buildkite/scripts/pull-request/pipeline.sh b/.buildkite/scripts/pull-request/pipeline.sh
new file mode 100755
index 0000000000000..77bbc1e115430
--- /dev/null
+++ b/.buildkite/scripts/pull-request/pipeline.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+set -euo pipefail
+
+npm install -g bun
+bun .buildkite/scripts/pull-request/pipeline.generate.ts
diff --git a/.buildkite/scripts/pull-request/pipeline.test.ts b/.buildkite/scripts/pull-request/pipeline.test.ts
new file mode 100644
index 0000000000000..e13b1e1f73278
--- /dev/null
+++ b/.buildkite/scripts/pull-request/pipeline.test.ts
@@ -0,0 +1,38 @@
+import { beforeEach, describe, expect, test } from "bun:test";
+
+import { generatePipelines } from "./pipeline";
+import { setBwcVersionsPath, setSnapshotBwcVersionsPath } from "./bwc-versions";
+
+describe("generatePipelines", () => {
+ beforeEach(() => {
+ setBwcVersionsPath(`${import.meta.dir}/mocks/bwcVersions`);
+ setSnapshotBwcVersionsPath(`${import.meta.dir}/mocks/snapshotBwcVersions`);
+
+ process.env["GITHUB_PR_LABELS"] = "test-label-1,test-label-2";
+ process.env["GITHUB_PR_TRIGGER_COMMENT"] = "";
+ });
+
+ test("should generate correct pipelines with a non-docs change", () => {
+ const pipelines = generatePipelines(`${import.meta.dir}/mocks/pipelines`, ["build.gradle", "docs/README.asciidoc"]);
+ expect(pipelines).toMatchSnapshot();
+ });
+
+ test("should generate correct pipelines with only docs changes", () => {
+ const pipelines = generatePipelines(`${import.meta.dir}/mocks/pipelines`, ["docs/README.asciidoc"]);
+ expect(pipelines).toMatchSnapshot();
+ });
+
+ test("should generate correct pipelines with full BWC expansion", () => {
+ process.env["GITHUB_PR_LABELS"] = "test-full-bwc";
+
+ const pipelines = generatePipelines(`${import.meta.dir}/mocks/pipelines`, ["build.gradle"]);
+ expect(pipelines).toMatchSnapshot();
+ });
+
+ test("should generate correct pipeline when using a trigger comment for it", () => {
+ process.env["GITHUB_PR_TRIGGER_COMMENT"] = "run elasticsearch-ci/using-defaults";
+
+ const pipelines = generatePipelines(`${import.meta.dir}/mocks/pipelines`, ["build.gradle"]);
+ expect(pipelines).toMatchSnapshot();
+ });
+});
diff --git a/.buildkite/scripts/pull-request/pipeline.ts b/.buildkite/scripts/pull-request/pipeline.ts
new file mode 100644
index 0000000000000..c4e12f2aa48fe
--- /dev/null
+++ b/.buildkite/scripts/pull-request/pipeline.ts
@@ -0,0 +1,162 @@
+import { parse } from "yaml";
+import { readFileSync, readdirSync } from "fs";
+import { basename, resolve } from "path";
+import { execSync } from "child_process";
+
+import { BuildkitePipeline, BuildkiteStep, EsPipeline, EsPipelineConfig } from "./types";
+import { getBwcVersions, getSnapshotBwcVersions } from "./bwc-versions";
+
+const PROJECT_ROOT = resolve(`${import.meta.dir}/../../..`);
+
+const getArray = (strOrArray: string | string[] | undefined): string[] => {
+ if (typeof strOrArray === "undefined") {
+ return [];
+ }
+
+ return typeof strOrArray === "string" ? [strOrArray] : strOrArray;
+};
+
+const labelCheckAllow = (pipeline: EsPipeline, labels: string[]): boolean => {
+ if (pipeline.config?.["allow-labels"]) {
+ return getArray(pipeline.config["allow-labels"]).some((label) => labels.includes(label));
+ }
+ return true;
+};
+
+const labelCheckSkip = (pipeline: EsPipeline, labels: string[]): boolean => {
+ if (pipeline.config?.["skip-labels"]) {
+ return !getArray(pipeline.config["skip-labels"]).some((label) => labels.includes(label));
+ }
+ return true;
+};
+
+// Exclude the pipeline if all of the changed files in the PR are in at least one excluded region
+const changedFilesExcludedCheck = (pipeline: EsPipeline, changedFiles: string[]): boolean => {
+ if (pipeline.config?.["excluded-regions"]) {
+ return !changedFiles.every((file) =>
+ getArray(pipeline.config?.["excluded-regions"]).some((region) => file.match(region))
+ );
+ }
+ return true;
+};
+
+// Include the pipeline if all of the changed files in the PR are in at least one included region
+const changedFilesIncludedCheck = (pipeline: EsPipeline, changedFiles: string[]): boolean => {
+ if (pipeline.config?.["included-regions"]) {
+ return changedFiles.every((file) =>
+ getArray(pipeline.config?.["included-regions"]).some((region) => file.match(region))
+ );
+ }
+ return true;
+};
+
+const triggerCommentCheck = (pipeline: EsPipeline): boolean => {
+ if (process.env["GITHUB_PR_TRIGGER_COMMENT"] && pipeline.config?.["trigger-phrase"]) {
+ return !!process.env["GITHUB_PR_TRIGGER_COMMENT"].match(pipeline.config["trigger-phrase"]);
+ }
+ return false;
+};
+
+// There are so many BWC versions that we can't use the matrix feature in Buildkite, as it's limited to 20 elements per dimension
+// So we need to duplicate the steps instead
+// Recursively check for any steps that have a bwc_template attribute and expand them out into multiple steps, one for each BWC_VERSION
+const doBwcTransforms = (step: BuildkitePipeline | BuildkiteStep) => {
+ const stepsToExpand = (step.steps || []).filter((s) => s.bwc_template);
+ step.steps = (step.steps || []).filter((s) => !s.bwc_template);
+
+ for (const s of step.steps) {
+ if (s.steps?.length) {
+ doBwcTransforms(s);
+ }
+ }
+
+ for (const stepToExpand of stepsToExpand) {
+ for (const bwcVersion of getBwcVersions()) {
+ let newStepJson = JSON.stringify(stepToExpand).replaceAll("$BWC_VERSION_SNAKE", bwcVersion.replaceAll(".", "_"));
+ newStepJson = newStepJson.replaceAll("$BWC_VERSION", bwcVersion);
+ const newStep = JSON.parse(newStepJson);
+ delete newStep.bwc_template;
+ step.steps.push(newStep);
+ }
+ }
+};
+
+export const generatePipelines = (
+ directory: string = `${PROJECT_ROOT}/.buildkite/pipelines/pull-request`,
+ changedFiles: string[] = []
+) => {
+ let defaults: EsPipelineConfig = { config: {} };
+ defaults = parse(readFileSync(`${directory}/.defaults.yml`, "utf-8"));
+ defaults.config = defaults.config || {};
+
+ let pipelines: EsPipeline[] = [];
+ const files = readdirSync(directory);
+ for (const file of files) {
+ if (!file.endsWith(".yml") || file.endsWith(".defaults.yml")) {
+ continue;
+ }
+
+ let yaml = readFileSync(`${directory}/${file}`, "utf-8");
+ yaml = yaml.replaceAll("$SNAPSHOT_BWC_VERSIONS", JSON.stringify(getSnapshotBwcVersions()));
+ const pipeline: EsPipeline = parse(yaml) || {};
+
+ pipeline.config = { ...defaults.config, ...(pipeline.config || {}) };
+
+ // '.../build-benchmark.yml' => 'build-benchmark'
+ const name = basename(file).split(".", 2)[0];
+ pipeline.name = name;
+ pipeline.config["trigger-phrase"] = pipeline.config["trigger-phrase"] || `.*run\\W+elasticsearch-ci/${name}.*`;
+
+ pipelines.push(pipeline);
+ }
+
+ const labels = (process.env["GITHUB_PR_LABELS"] || "")
+ .split(",")
+ .map((x) => x.trim())
+ .filter((x) => x);
+
+ if (!changedFiles?.length) {
+ const mergeBase = execSync(`git merge-base ${process.env["GITHUB_PR_TARGET_BRANCH"]} HEAD`, { cwd: PROJECT_ROOT })
+ .toString()
+ .trim();
+
+ const changedFilesOutput = execSync(`git diff --name-only ${mergeBase}`, { cwd: PROJECT_ROOT }).toString().trim();
+
+ changedFiles = changedFilesOutput
+ .split("\n")
+ .map((x) => x.trim())
+ .filter((x) => x);
+ }
+
+ let filters: ((pipeline: EsPipeline) => boolean)[] = [
+ (pipeline) => labelCheckAllow(pipeline, labels),
+ (pipeline) => labelCheckSkip(pipeline, labels),
+ (pipeline) => changedFilesExcludedCheck(pipeline, changedFiles),
+ (pipeline) => changedFilesIncludedCheck(pipeline, changedFiles),
+ ];
+
+ // When triggering via comment, we ONLY want to run pipelines that match the trigger phrase, regardless of labels, etc
+ if (process.env["GITHUB_PR_TRIGGER_COMMENT"]) {
+ filters = [triggerCommentCheck];
+ }
+
+ for (const filter of filters) {
+ pipelines = pipelines.filter(filter);
+ }
+
+ for (const pipeline of pipelines) {
+ doBwcTransforms(pipeline);
+ }
+
+ pipelines.sort((a, b) => (a.name ?? "").localeCompare(b.name ?? ""));
+
+ const finalPipelines = pipelines.map((pipeline) => {
+ const finalPipeline = { ...pipeline };
+ delete finalPipeline.config;
+ delete finalPipeline.name;
+
+ return finalPipeline;
+ });
+
+ return finalPipelines;
+};
diff --git a/.buildkite/scripts/pull-request/types.ts b/.buildkite/scripts/pull-request/types.ts
new file mode 100644
index 0000000000000..15140a03fb86a
--- /dev/null
+++ b/.buildkite/scripts/pull-request/types.ts
@@ -0,0 +1,24 @@
+export type EsPipelineConfig = {
+ config?: {
+ "allow-labels"?: string | string[];
+ "skip-labels"?: string | string[];
+ "included-regions"?: string | string[];
+ "excluded-regions"?: string | string[];
+ "trigger-phrase"?: string;
+ };
+};
+
+export type BuildkiteStep = {
+ steps?: BuildkiteStep[];
+ group?: string;
+ bwc_template?: boolean;
+};
+
+export type BuildkitePipeline = {
+ steps?: BuildkiteStep[];
+};
+
+export type EsPipeline = EsPipelineConfig &
+ BuildkitePipeline & {
+ name?: string;
+ };
diff --git a/.buildkite/scripts/lucene-snapshot/get-credentials.sh b/.buildkite/scripts/setup-legacy-vault.sh
similarity index 61%
rename from .buildkite/scripts/lucene-snapshot/get-credentials.sh
rename to .buildkite/scripts/setup-legacy-vault.sh
index 042c664384a7f..d84f2a94d5391 100755
--- a/.buildkite/scripts/lucene-snapshot/get-credentials.sh
+++ b/.buildkite/scripts/setup-legacy-vault.sh
@@ -2,9 +2,6 @@
set -euo pipefail
-# WARNING: this script will echo the credentials to the console. It is meant to be called from another script and captured in a variable.
-# It should really only be used inside .buildkite/hooks/pre-command
-
VAULT_ROLE_ID=$(vault read -field=role-id secret/ci/elastic-elasticsearch/legacy-vault-credentials)
VAULT_SECRET_ID=$(vault read -field=secret-id secret/ci/elastic-elasticsearch/legacy-vault-credentials)
VAULT_ADDR=https://secrets.elastic.co:8200
@@ -12,5 +9,3 @@ VAULT_ADDR=https://secrets.elastic.co:8200
unset VAULT_TOKEN
VAULT_TOKEN=$(vault write -field=token auth/approle/login role_id=$VAULT_ROLE_ID secret_id=$VAULT_SECRET_ID)
export VAULT_TOKEN
-
-vault read -format=json aws-elastic/creds/lucene-snapshots
diff --git a/.buildkite/scripts/third-party-test-credentials.gcs.sh b/.buildkite/scripts/third-party-test-credentials.gcs.sh
new file mode 100755
index 0000000000000..fd1b435ed484b
--- /dev/null
+++ b/.buildkite/scripts/third-party-test-credentials.gcs.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+
+set -euo pipefail
+
+# Usage: .buildkite/scripts/third-party-test-credentials.gcs.sh
+
+source .buildkite/scripts/setup-legacy-vault.sh
+
+vault read -field=private_key_data gcp-elastic-ci-prod/key/elasticsearch-ci-thirdparty-gcs | base64 --decode > "$1"
diff --git a/.buildkite/scripts/third-party-test-credentials.sh b/.buildkite/scripts/third-party-test-credentials.sh
new file mode 100755
index 0000000000000..c882d61cbade6
--- /dev/null
+++ b/.buildkite/scripts/third-party-test-credentials.sh
@@ -0,0 +1,51 @@
+#!/bin/bash
+
+set -euo pipefail
+
+# You'll notice that most of the variables are exported twice with different names here
+# The first/uppercase export is to ensure that Buildkite masks the values in the logs should they accidentally be output
+# The second/lowercase export is what the tests expect/require
+
+if [[ "${USE_3RD_PARTY_AZURE_CREDENTIALS:-}" == "true" ]]; then
+ json=$(vault read -format=json secret/ci/elastic-elasticsearch/migrated/azure_thirdparty_test_creds)
+
+ AZURE_STORAGE_ACCOUNT_SECRET=$(echo "$json" | jq -r .data.account_id)
+ export AZURE_STORAGE_ACCOUNT_SECRET
+ export azure_storage_account="$AZURE_STORAGE_ACCOUNT_SECRET"
+
+ AZURE_STORAGE_KEY=$(echo "$json" | jq -r .data.account_key)
+ export AZURE_STORAGE_KEY
+ export azure_storage_key="$AZURE_STORAGE_KEY"
+fi
+
+if [[ "${USE_3RD_PARTY_AZURE_SAS_CREDENTIALS:-}" == "true" ]]; then
+ json=$(vault read -format=json secret/ci/elastic-elasticsearch/migrated/azure_thirdparty_sas_test_creds)
+
+ AZURE_STORAGE_ACCOUNT_SECRET=$(echo "$json" | jq -r .data.account_id)
+ export AZURE_STORAGE_ACCOUNT_SECRET
+ export azure_storage_account="$AZURE_STORAGE_ACCOUNT_SECRET"
+
+ AZURE_STORAGE_SAS_TOKEN=$(echo "$json" | jq -r .data.account_sas_token)
+ export AZURE_STORAGE_SAS_TOKEN
+ export azure_storage_sas_token="$AZURE_STORAGE_SAS_TOKEN"
+fi
+
+if [[ "${USE_3RD_PARTY_S3_CREDENTIALS:-}" == "true" ]]; then
+ json=$(.buildkite/scripts/get-legacy-secret.sh aws-test/creds/elasticsearch-ci-s3)
+ AMAZON_S3_ACCESS_KEY=$(echo "$json" | jq -r .data.access_key)
+ export AMAZON_S3_ACCESS_KEY
+ export amazon_s3_access_key="$AMAZON_S3_ACCESS_KEY"
+
+ AMAZON_S3_SECRET_KEY=$(echo "$json" | jq -r .data.secret_key)
+ export AMAZON_S3_SECRET_KEY
+ export amazon_s3_secret_key="$AMAZON_S3_SECRET_KEY"
+fi
+
+if [[ "${USE_3RD_PARTY_GCS_CREDENTIALS:-}" == "true" ]]; then
+ export google_storage_service_account=$(mktemp)
+ .buildkite/scripts/third-party-test-credentials.gcs.sh "$google_storage_service_account"
+fi
+
+
+
+unset json
diff --git a/.buildkite/tsconfig.json b/.buildkite/tsconfig.json
new file mode 100644
index 0000000000000..1449bc3d931a8
--- /dev/null
+++ b/.buildkite/tsconfig.json
@@ -0,0 +1,22 @@
+{
+ "compilerOptions": {
+ "lib": ["ESNext"],
+ "module": "esnext",
+ "target": "esnext",
+ "moduleResolution": "bundler",
+ "moduleDetection": "force",
+ "allowImportingTsExtensions": true,
+ "noEmit": true,
+ "composite": true,
+ "strict": true,
+ "downlevelIteration": true,
+ "skipLibCheck": true,
+ "jsx": "preserve",
+ "allowSyntheticDefaultImports": true,
+ "forceConsistentCasingInFileNames": true,
+ "allowJs": true,
+ "types": [
+ "bun-types" // add Bun global
+ ]
+ }
+}
diff --git a/.ci/jobs.t/elastic+elasticsearch+multijob+packaging-tests-unix.yml b/.ci/jobs.t/elastic+elasticsearch+multijob+packaging-tests-unix.yml
index 48a537c33b612..7d0d724d6bbc4 100644
--- a/.ci/jobs.t/elastic+elasticsearch+multijob+packaging-tests-unix.yml
+++ b/.ci/jobs.t/elastic+elasticsearch+multijob+packaging-tests-unix.yml
@@ -31,9 +31,9 @@
- almalinux-8-packaging
builders:
- inject:
- properties-file: '.ci/java-versions.properties'
+ properties-file: ".ci/java-versions.properties"
properties-content: |
JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA
- shell: |
#!/usr/local/bin/runbld --redirect-stderr
- ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ destructivePackagingTest
+ ./.ci/scripts/packaging-test.sh destructivePackagingTest
diff --git a/.ci/jobs.t/elastic+elasticsearch+multijob+packaging-tests-upgrade.yml b/.ci/jobs.t/elastic+elasticsearch+multijob+packaging-tests-upgrade.yml
index 291ae9da4cd75..134deae255cd5 100644
--- a/.ci/jobs.t/elastic+elasticsearch+multijob+packaging-tests-upgrade.yml
+++ b/.ci/jobs.t/elastic+elasticsearch+multijob+packaging-tests-upgrade.yml
@@ -22,10 +22,10 @@
name: BWC_VERSION
builders:
- inject:
- properties-file: '.ci/java-versions.properties'
+ properties-file: ".ci/java-versions.properties"
properties-content: |
JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA
JAVA16_HOME=$HOME/.java/openjdk16
- shell: |
#!/usr/local/bin/runbld --redirect-stderr
- ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v$BWC_VERSION
+ ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v$BWC_VERSION
diff --git a/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix-sample.yml b/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix-sample.yml
index 04e48036a8e9e..2d4f372142512 100644
--- a/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix-sample.yml
+++ b/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix-sample.yml
@@ -25,9 +25,9 @@
- ^docs/.*
- ^x-pack/docs/.*
black-list-labels:
- - '>test-mute'
- - ':Delivery/Packaging'
- - 'buildkite-opt-in'
+ - ">test-mute"
+ - ":Delivery/Packaging"
+ - "buildkite-opt-in"
axes:
- axis:
type: label-expression
@@ -39,14 +39,14 @@
type: user-defined
name: PACKAGING_TASK
values:
- - 'destructiveDistroTest.docker'
- - 'destructiveDistroTest.packages'
- - 'destructiveDistroTest.archives'
+ - "destructiveDistroTest.docker"
+ - "destructiveDistroTest.packages"
+ - "destructiveDistroTest.archives"
builders:
- inject:
- properties-file: '.ci/java-versions.properties'
+ properties-file: ".ci/java-versions.properties"
properties-content: |
JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA
- shell: |
#!/usr/local/bin/runbld --redirect-stderr
- ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ $PACKAGING_TASK
+ ./.ci/scripts/packaging-test.sh $PACKAGING_TASK
diff --git a/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix.yml b/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix.yml
index a7413699ff6c3..af1d3f493eeb0 100644
--- a/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix.yml
+++ b/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-tests-unix.yml
@@ -25,10 +25,10 @@
- ^docs/.*
- ^x-pack/docs/.*
white-list-labels:
- - ':Delivery/Packaging'
+ - ":Delivery/Packaging"
black-list-labels:
- - '>test-mute'
- - 'buildkite-opt-in'
+ - ">test-mute"
+ - "buildkite-opt-in"
axes:
- axis:
type: label-expression
@@ -54,14 +54,14 @@
type: user-defined
name: PACKAGING_TASK
values:
- - 'destructiveDistroTest.docker'
- - 'destructiveDistroTest.packages'
- - 'destructiveDistroTest.archives'
+ - "destructiveDistroTest.docker"
+ - "destructiveDistroTest.packages"
+ - "destructiveDistroTest.archives"
builders:
- inject:
- properties-file: '.ci/java-versions.properties'
+ properties-file: ".ci/java-versions.properties"
properties-content: |
JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA
- shell: |
#!/usr/local/bin/runbld --redirect-stderr
- ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ $PACKAGING_TASK
+ ./.ci/scripts/packaging-test.sh $PACKAGING_TASK
diff --git a/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-upgrade-tests.yml b/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-upgrade-tests.yml
index 2b73d0144cab7..19ed5398e3e1d 100644
--- a/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-upgrade-tests.yml
+++ b/.ci/jobs.t/elastic+elasticsearch+pull-request+packaging-upgrade-tests.yml
@@ -26,10 +26,10 @@
- ^docs/.*
- ^x-pack/docs/.*
white-list-labels:
- - ':Delivery/Packaging'
+ - ":Delivery/Packaging"
black-list-labels:
- - '>test-mute'
- - 'buildkite-opt-in'
+ - ">test-mute"
+ - "buildkite-opt-in"
axes:
- axis:
type: label-expression
@@ -43,7 +43,7 @@
name: "BWC_VERSION"
builders:
- inject:
- properties-file: '.ci/java-versions.properties'
+ properties-file: ".ci/java-versions.properties"
properties-content: |
JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA
JAVA8_HOME=$HOME/.java/java8
@@ -51,4 +51,4 @@
JAVA16_HOME=$HOME/.java/openjdk16
- shell: |
#!/usr/local/bin/runbld --redirect-stderr
- ./.ci/scripts/packaging-test.sh --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ destructiveDistroUpgradeTest.v$BWC_VERSION
+ ./.ci/scripts/packaging-test.sh destructiveDistroUpgradeTest.v$BWC_VERSION
diff --git a/.ci/scripts/packaging-test.sh b/.ci/scripts/packaging-test.sh
index afe162b23e564..1626255c30b4f 100755
--- a/.ci/scripts/packaging-test.sh
+++ b/.ci/scripts/packaging-test.sh
@@ -77,5 +77,5 @@ sudo -E env \
--unset=ES_JAVA_HOME \
--unset=JAVA_HOME \
SYSTEM_JAVA_HOME=`readlink -f -n $BUILD_JAVA_HOME` \
- ./gradlew -g $HOME/.gradle --scan --parallel --continue $@
+ ./gradlew -g $HOME/.gradle --scan --parallel --build-cache -Dorg.elasticsearch.build.cache.url=https://gradle-enterprise.elastic.co/cache/ --continue $@
diff --git a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle
index 7f9e258abd65a..5711d66fd848f 100644
--- a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle
+++ b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle
@@ -106,7 +106,7 @@ buildScan {
// Add SCM information
def prId = System.getenv('BUILDKITE_PULL_REQUEST')
if (prId != 'false') {
- def prBaseUrl = (System.getenv('BUILDKITE_PULL_REQUEST_REPO') - ".git")
+ def prBaseUrl = (System.getenv('BUILDKITE_PULL_REQUEST_REPO') - ".git").replaceFirst("git://", "https://")
value 'Git Commit ID', System.getenv('BUILDKITE_COMMIT')
tag "pr/${prId}"
tag 'pull-request'
diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle
index cecc5c7806240..1bd11dc2313ba 100644
--- a/distribution/packages/build.gradle
+++ b/distribution/packages/build.gradle
@@ -184,6 +184,7 @@ def commonPackageConfig(String type, String architecture) {
configurationFile '/etc/elasticsearch/users_roles'
from("${packagingFiles}") {
dirMode 02750
+ setgid = true
into('/etc')
permissionGroup 'elasticsearch'
includeEmptyDirs true
@@ -194,6 +195,7 @@ def commonPackageConfig(String type, String architecture) {
from("${packagingFiles}/etc/elasticsearch") {
into('/etc/elasticsearch')
dirMode 02750
+ setgid = true
fileMode 0660
permissionGroup 'elasticsearch'
includeEmptyDirs true
@@ -240,7 +242,8 @@ def commonPackageConfig(String type, String architecture) {
createDirectoryEntry true
user u
permissionGroup g
- dirMode mode
+ dirMode = mode
+ setgid = mode == 02750
}
}
copyEmptyDir('/var/log/elasticsearch', 'elasticsearch', 'elasticsearch', 02750)
diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java
index 6d832dfff2758..e6dabb471bed7 100644
--- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java
+++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java
@@ -53,7 +53,8 @@ class APMJvmOptions {
"log_file", "_AGENT_HOME_/../../logs/apm.log",
// ES does not use auto-instrumentation.
- "instrument", "false"
+ "instrument", "false",
+ "enable_experimental_instrumentations", "true"
);
/**
diff --git a/docs/changelog/100064.yaml b/docs/changelog/100064.yaml
new file mode 100644
index 0000000000000..f595b7e8e0705
--- /dev/null
+++ b/docs/changelog/100064.yaml
@@ -0,0 +1,5 @@
+pr: 100064
+summary: Update the elastic-apm-agent version
+area: Infra/Core
+type: enhancement
+issues: []
diff --git a/docs/changelog/99604.yaml b/docs/changelog/99604.yaml
new file mode 100644
index 0000000000000..7b473a056d608
--- /dev/null
+++ b/docs/changelog/99604.yaml
@@ -0,0 +1,5 @@
+pr: 99604
+summary: Show concrete error when enrich index not exist rather than NPE
+area: Ingest Node
+type: enhancement
+issues: []
diff --git a/docs/reference/modules/network/threading.asciidoc b/docs/reference/modules/network/threading.asciidoc
index 87e7e2371472b..abf00b521b5cc 100644
--- a/docs/reference/modules/network/threading.asciidoc
+++ b/docs/reference/modules/network/threading.asciidoc
@@ -107,3 +107,12 @@ However, this API itself sends network messages so may not work correctly if
the `transport_worker` threads are too busy. It is more reliable to use
`jstack` to obtain stack dumps or use Java Flight Recorder to obtain a
profiling trace. These tools are independent of any work the JVM is performing.
+
+It may also be possible to identify some reasons for delays from the server
+logs, particularly looking at warnings from
+`org.elasticsearch.transport.InboundHandler` and
+`org.elasticsearch.transport.OutboundHandler`. Warnings about long processing
+times from the `InboundHandler` are particularly indicative of incorrect
+threading behaviour, whereas the transmission time reported by the
+`OutboundHandler` includes time spent waiting for network congestion and the
+`transport_worker` thread is free to do other work during this time.
diff --git a/docs/reference/troubleshooting/network-timeouts.asciidoc b/docs/reference/troubleshooting/network-timeouts.asciidoc
index c15c5ee0d58a5..ab60eeff1b1a9 100644
--- a/docs/reference/troubleshooting/network-timeouts.asciidoc
+++ b/docs/reference/troubleshooting/network-timeouts.asciidoc
@@ -47,5 +47,7 @@ since it doesn't require any JVM threads.
The threads involved in discovery and cluster membership are mainly
`transport_worker` and `cluster_coordination` threads, for which there should
never be a long wait. There may also be evidence of long waits for threads in
-the {es} logs. See <> for more information.
+the {es} logs, particularly looking at warning logs from
+`org.elasticsearch.transport.InboundHandler`. See
+<> for more information.
end::troubleshooting-network-timeouts-threads[]
diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml
index 4ebb0c46159a3..7209c4478d159 100644
--- a/gradle/verification-metadata.xml
+++ b/gradle/verification-metadata.xml
@@ -69,11 +69,11 @@
-
-
-
-
-
+
+
+
+
+
diff --git a/modules/apm/build.gradle b/modules/apm/build.gradle
index c8619c97d1068..c9002a71bf746 100644
--- a/modules/apm/build.gradle
+++ b/modules/apm/build.gradle
@@ -18,7 +18,7 @@ dependencies {
implementation "io.opentelemetry:opentelemetry-api:${otelVersion}"
implementation "io.opentelemetry:opentelemetry-context:${otelVersion}"
implementation "io.opentelemetry:opentelemetry-semconv:${otelVersion}-alpha"
- runtimeOnly "co.elastic.apm:elastic-apm-agent:1.36.0"
+ runtimeOnly "co.elastic.apm:elastic-apm-agent:1.43.0"
}
tasks.named("dependencyLicenses").configure {
diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java
index b460c7aaf2ebc..77fc98971ef0f 100644
--- a/server/src/main/java/org/elasticsearch/TransportVersions.java
+++ b/server/src/main/java/org/elasticsearch/TransportVersions.java
@@ -149,7 +149,7 @@ static TransportVersion def(int id) {
public static final TransportVersion DATA_STREAM_RESPONSE_INDEX_PROPERTIES = def(8_506_00_0);
public static final TransportVersion ML_TRAINED_MODEL_CONFIG_PLATFORM_ADDED = def(8_507_00_0);
public static final TransportVersion LONG_COUNT_IN_HISTOGRAM_ADDED = def(8_508_00_0);
-
+ public static final TransportVersion INFERENCE_MODEL_SECRETS_ADDED = def(8_509_00_0);
/*
* STOP! READ THIS FIRST! No, really,
* ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _
diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java
index 7395d6003ec44..846a0800cc41f 100644
--- a/server/src/main/java/org/elasticsearch/action/ActionModule.java
+++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java
@@ -677,7 +677,7 @@ public void reg
actions.register(ListTasksAction.INSTANCE, TransportListTasksAction.class);
actions.register(GetTaskAction.INSTANCE, TransportGetTaskAction.class);
actions.register(CancelTasksAction.INSTANCE, TransportCancelTasksAction.class);
- actions.register(GetHealthAction.INSTANCE, GetHealthAction.TransportAction.class);
+ actions.register(GetHealthAction.INSTANCE, GetHealthAction.LocalAction.class);
actions.register(PrevalidateNodeRemovalAction.INSTANCE, TransportPrevalidateNodeRemovalAction.class);
actions.register(HealthApiStatsAction.INSTANCE, HealthApiStatsTransportAction.class);
diff --git a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java
index 4d3b9b0c15ff0..cb5a9ce3db353 100644
--- a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java
+++ b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java
@@ -14,6 +14,7 @@
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ActionResponse;
+import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskManager;
@@ -124,4 +125,14 @@ public void onFailure(Exception e) {
}
}
}
+
+ /**
+ * A method to use as a placeholder in implementations of {@link TransportAction} which only ever run on the local node, and therefore
+ * do not need to serialize or deserialize any messages. See also {@link Writeable.Reader#localOnly()}.
+ */
+ // TODO remove this when https://github.com/elastic/elasticsearch/issues/100111 is resolved
+ public static T localOnly() {
+ assert false : "local-only action";
+ throw new UnsupportedOperationException("local-only action");
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java
index b51364ebc2c84..f45f16db502e9 100644
--- a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java
+++ b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java
@@ -26,6 +26,7 @@
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
+import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
@@ -125,16 +126,32 @@ public GroupShardsIterator searchShards(
nodeCounts
);
if (iterator != null) {
- var shardsThatCanHandleSearches = iterator.getShardRoutings()
- .stream()
- .filter(shardRouting -> canSearchShard(shardRouting, clusterState))
- .toList();
+ final List shardsThatCanHandleSearches;
+ if (isStateless) {
+ shardsThatCanHandleSearches = statelessShardsThatHandleSearches(clusterState, iterator);
+ } else {
+ shardsThatCanHandleSearches = statefulShardsThatHandleSearches(iterator);
+ }
set.add(new PlainShardIterator(iterator.shardId(), shardsThatCanHandleSearches));
}
}
return GroupShardsIterator.sortAndCreate(new ArrayList<>(set));
}
+ private static List statefulShardsThatHandleSearches(ShardIterator iterator) {
+ final List shardsThatCanHandleSearches = new ArrayList<>(iterator.size());
+ for (ShardRouting shardRouting : iterator) {
+ if (shardRouting.isSearchable()) {
+ shardsThatCanHandleSearches.add(shardRouting);
+ }
+ }
+ return shardsThatCanHandleSearches;
+ }
+
+ private static List statelessShardsThatHandleSearches(ClusterState clusterState, ShardIterator iterator) {
+ return iterator.getShardRoutings().stream().filter(shardRouting -> canSearchShard(shardRouting, clusterState)).toList();
+ }
+
public static ShardIterator getShards(ClusterState clusterState, ShardId shardId) {
final IndexShardRoutingTable shard = clusterState.routingTable().shardRoutingTable(shardId);
return shard.activeInitializingShardsRandomIt();
@@ -177,7 +194,7 @@ private ShardIterator preferenceActiveShardIterator(
@Nullable Map nodeCounts
) {
if (preference == null || preference.isEmpty()) {
- return shardRoutings(indexShard, nodes, collectorService, nodeCounts);
+ return shardRoutings(indexShard, collectorService, nodeCounts);
}
if (preference.charAt(0) == '_') {
Preference preferenceType = Preference.parse(preference);
@@ -204,7 +221,7 @@ private ShardIterator preferenceActiveShardIterator(
}
// no more preference
if (index == -1 || index == preference.length() - 1) {
- return shardRoutings(indexShard, nodes, collectorService, nodeCounts);
+ return shardRoutings(indexShard, collectorService, nodeCounts);
} else {
// update the preference and continue
preference = preference.substring(index + 1);
@@ -237,7 +254,6 @@ private ShardIterator preferenceActiveShardIterator(
private ShardIterator shardRoutings(
IndexShardRoutingTable indexShard,
- DiscoveryNodes nodes,
@Nullable ResponseCollectorService collectorService,
@Nullable Map nodeCounts
) {
diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/Writeable.java b/server/src/main/java/org/elasticsearch/common/io/stream/Writeable.java
index b263edb112079..d3422c1b51a22 100644
--- a/server/src/main/java/org/elasticsearch/common/io/stream/Writeable.java
+++ b/server/src/main/java/org/elasticsearch/common/io/stream/Writeable.java
@@ -8,6 +8,8 @@
package org.elasticsearch.common.io.stream;
+import org.elasticsearch.action.support.TransportAction;
+
import java.io.IOException;
/**
@@ -74,6 +76,13 @@ interface Reader {
*/
V read(StreamInput in) throws IOException;
+ /**
+ * A {@link Reader} which must never be called, for use in local-only transport actions. See also {@link TransportAction#localOnly}.
+ */
+ // TODO remove this when https://github.com/elastic/elasticsearch/issues/100111 is resolved
+ static Reader localOnly() {
+ return in -> TransportAction.localOnly();
+ }
}
}
diff --git a/server/src/main/java/org/elasticsearch/common/util/CachedSupplier.java b/server/src/main/java/org/elasticsearch/common/util/CachedSupplier.java
index 262187c7879d9..de03455823103 100644
--- a/server/src/main/java/org/elasticsearch/common/util/CachedSupplier.java
+++ b/server/src/main/java/org/elasticsearch/common/util/CachedSupplier.java
@@ -17,21 +17,35 @@
*/
public final class CachedSupplier implements Supplier {
- private Supplier supplier;
- private T result;
- private boolean resultSet;
+ private volatile Supplier supplier;
+ private volatile T result;
- public CachedSupplier(Supplier supplier) {
+ public static CachedSupplier wrap(Supplier supplier) {
+ if (supplier instanceof CachedSupplier c) {
+ // no need to wrap a cached supplier again
+ return c;
+ }
+ return new CachedSupplier<>(supplier);
+ }
+
+ private CachedSupplier(Supplier supplier) {
this.supplier = supplier;
}
@Override
- public synchronized T get() {
- if (resultSet == false) {
- result = supplier.get();
- resultSet = true;
+ public T get() {
+ if (supplier == null) {
+ return result;
}
+ initResult();
return result;
}
+ private synchronized void initResult() {
+ if (supplier != null) {
+ result = supplier.get();
+ supplier = null;
+ }
+ }
+
}
diff --git a/server/src/main/java/org/elasticsearch/health/GetHealthAction.java b/server/src/main/java/org/elasticsearch/health/GetHealthAction.java
index 5e60b2b6c87ba..b571c3f1f005a 100644
--- a/server/src/main/java/org/elasticsearch/health/GetHealthAction.java
+++ b/server/src/main/java/org/elasticsearch/health/GetHealthAction.java
@@ -14,13 +14,14 @@
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.support.ActionFilters;
+import org.elasticsearch.action.support.TransportAction;
import org.elasticsearch.client.internal.node.NodeClient;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.health.stats.HealthApiStats;
@@ -45,7 +46,7 @@ public class GetHealthAction extends ActionType {
public static final String NAME = "cluster:monitor/health_api";
private GetHealthAction() {
- super(NAME, GetHealthAction.Response::new);
+ super(NAME, Writeable.Reader.localOnly());
}
public static class Response extends ActionResponse implements ChunkedToXContent {
@@ -55,10 +56,6 @@ public static class Response extends ActionResponse implements ChunkedToXContent
private final HealthStatus status;
private final List indicators;
- public Response(StreamInput in) {
- throw new AssertionError("GetHealthAction should not be sent over the wire.");
- }
-
public Response(final ClusterName clusterName, final List indicators, boolean showTopLevelStatus) {
this.indicators = indicators;
this.clusterName = clusterName;
@@ -90,7 +87,7 @@ public List getIndicatorResults() {
@Override
public void writeTo(StreamOutput out) throws IOException {
- throw new AssertionError("GetHealthAction should not be sent over the wire.");
+ TransportAction.localOnly();
}
@Override
@@ -173,9 +170,14 @@ public ActionRequestValidationException validate() {
public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) {
return new CancellableTask(id, type, action, "", parentTaskId, headers);
}
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ TransportAction.localOnly();
+ }
}
- public static class TransportAction extends org.elasticsearch.action.support.TransportAction {
+ public static class LocalAction extends TransportAction {
private final ClusterService clusterService;
private final HealthService healthService;
@@ -183,7 +185,7 @@ public static class TransportAction extends org.elasticsearch.action.support.Tra
private final HealthApiStats healthApiStats;
@Inject
- public TransportAction(
+ public LocalAction(
ActionFilters actionFilters,
TransportService transportService,
ClusterService clusterService,
diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
index 37fce6411af4f..e9651a7f63867 100644
--- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
+++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
@@ -878,6 +878,18 @@ protected GetResult realtimeGetUnderLock(
}
boolean getFromSearcherIfNotInTranslog = getFromSearcher;
if (versionValue != null) {
+ /*
+ * Once we've seen the ID in the live version map, in two cases it is still possible not to
+ * be able to follow up with serving the get from the translog:
+ * 1. It is possible that once attempt handling the get, we won't see the doc in the translog
+ * since it might have been moved out.
+ * TODO: ideally we should keep around translog entries long enough to cover this case
+ * 2. We might not be tracking translog locations in the live version map (see @link{trackTranslogLocation})
+ *
+ * In these cases, we should always fall back to get the doc from the internal searcher.
+ */
+
+ getFromSearcherIfNotInTranslog = true;
if (versionValue.isDelete()) {
return GetResult.NOT_EXISTS;
}
@@ -911,11 +923,8 @@ protected GetResult realtimeGetUnderLock(
throw new EngineException(shardId, "failed to read operation from translog", e);
}
} else {
+ // We need to start tracking translog locations in the live version map.
trackTranslogLocation.set(true);
- // We need to start tracking translog locations in the live version map. Refresh and
- // serve all the real-time gets with a missing translog location from the internal searcher
- // (until a flush happens) even if we're supposed to only get from translog.
- getFromSearcherIfNotInTranslog = true;
}
}
assert versionValue.seqNo >= 0 : versionValue;
diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java
index 3bc9cf2a5a116..db38ca1e037a1 100644
--- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java
+++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java
@@ -17,8 +17,9 @@ public interface InferenceService {
String name();
/**
- * Parse model configuration from the {@code config map} and return
- * the parsed {@link Model}.
+ * Parse model configuration from the {@code config map} from a request and return
+ * the parsed {@link Model}. This requires that both the secrets and service settings be contained in the
+ * {@code service_settings} field.
* This function modifies {@code config map}, fields are removed
* from the map as they are read.
*
@@ -27,21 +28,25 @@ public interface InferenceService {
*
* @param modelId Model Id
* @param taskType The model task type
- * @param config Configuration options
+ * @param config Configuration options including the secrets
* @return The parsed {@link Model}
*/
- Model parseConfigStrict(String modelId, TaskType taskType, Map config);
+ Model parseRequestConfig(String modelId, TaskType taskType, Map config);
/**
- * As {@link #parseConfigStrict(String, TaskType, Map)} but the function
- * does not throw on unrecognized options.
+ * Parse model configuration from {@code config map} from persisted storage and return the parsed {@link Model}. This requires that
+ * secrets and service settings be in two separate maps.
+ * This function modifies {@code config map}, fields are removed from the map as they are read.
+ *
+ * If the map contains unrecognized configuration options, no error is thrown.
*
* @param modelId Model Id
* @param taskType The model task type
* @param config Configuration options
+ * @param secrets Sensitive configuration options (e.g. api key)
* @return The parsed {@link Model}
*/
- Model parseConfigLenient(String modelId, TaskType taskType, Map config);
+ Model parsePersistedConfig(String modelId, TaskType taskType, Map config, Map secrets);
/**
* Perform inference on the model.
diff --git a/server/src/main/java/org/elasticsearch/inference/Model.java b/server/src/main/java/org/elasticsearch/inference/Model.java
index 67ee58bad733c..eedb67a8111e5 100644
--- a/server/src/main/java/org/elasticsearch/inference/Model.java
+++ b/server/src/main/java/org/elasticsearch/inference/Model.java
@@ -8,101 +8,72 @@
package org.elasticsearch.inference;
-import org.elasticsearch.TransportVersion;
-import org.elasticsearch.TransportVersions;
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.io.stream.VersionedNamedWriteable;
-import org.elasticsearch.xcontent.ToXContentObject;
-import org.elasticsearch.xcontent.XContentBuilder;
-
-import java.io.IOException;
import java.util.Objects;
-public class Model implements ToXContentObject, VersionedNamedWriteable {
-
- public static final String MODEL_ID = "model_id";
- public static final String SERVICE = "service";
- public static final String SERVICE_SETTINGS = "service_settings";
- public static final String TASK_SETTINGS = "task_settings";
-
- private static final String NAME = "inference_model";
-
+public class Model {
public static String documentId(String modelId) {
return "model_" + modelId;
}
- private final String modelId;
- private final TaskType taskType;
- private final String service;
- private final ServiceSettings serviceSettings;
- private final TaskSettings taskSettings;
-
- public Model(String modelId, TaskType taskType, String service, ServiceSettings serviceSettings, TaskSettings taskSettings) {
- this.modelId = modelId;
- this.taskType = taskType;
- this.service = service;
- this.serviceSettings = serviceSettings;
- this.taskSettings = taskSettings;
- }
-
- public Model(StreamInput in) throws IOException {
- this.modelId = in.readString();
- this.taskType = in.readEnum(TaskType.class);
- this.service = in.readString();
- this.serviceSettings = in.readNamedWriteable(ServiceSettings.class);
- this.taskSettings = in.readNamedWriteable(TaskSettings.class);
- }
+ private final ModelConfigurations configurations;
+ private final ModelSecrets secrets;
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- out.writeString(modelId);
- out.writeEnum(taskType);
- out.writeString(service);
- out.writeNamedWriteable(serviceSettings);
- out.writeNamedWriteable(taskSettings);
+ public Model(ModelConfigurations configurations, ModelSecrets secrets) {
+ this.configurations = Objects.requireNonNull(configurations);
+ this.secrets = Objects.requireNonNull(secrets);
}
- public String getModelId() {
- return modelId;
+ public Model(ModelConfigurations configurations) {
+ this(configurations, new ModelSecrets());
}
- public TaskType getTaskType() {
- return taskType;
+ /**
+ * Returns the model's non-sensitive configurations (e.g. service name).
+ */
+ public ModelConfigurations getConfigurations() {
+ return configurations;
}
- public String getService() {
- return service;
+ /**
+ * Returns the model's sensitive configurations (e.g. api key).
+ *
+ * This returns an object that in json would look like:
+ *
+ *
+ * {@code
+ * {
+ * "secret_settings": { "api_key": "abc" }
+ * }
+ * }
+ *
+ */
+ public ModelSecrets getSecrets() {
+ return secrets;
}
public ServiceSettings getServiceSettings() {
- return serviceSettings;
+ return configurations.getServiceSettings();
}
public TaskSettings getTaskSettings() {
- return taskSettings;
- }
-
- @Override
- public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- builder.startObject();
- builder.field(MODEL_ID, modelId);
- builder.field(TaskType.NAME, taskType.toString());
- builder.field(SERVICE, service);
- builder.field(SERVICE_SETTINGS, serviceSettings);
- builder.field(TASK_SETTINGS, taskSettings);
- builder.endObject();
- return builder;
+ return configurations.getTaskSettings();
}
- @Override
- public String getWriteableName() {
- return NAME;
- }
-
- @Override
- public TransportVersion getMinimalSupportedVersion() {
- return TransportVersions.V_8_500_074;
+ /**
+ * Returns the inner sensitive data defined by a particular service.
+ *
+ * This returns an object that in json would look like:
+ *
+ *
+ * {@code
+ * {
+ * "api_key": "abc"
+ * }
+ * }
+ *
+ */
+ public SecretSettings getSecretSettings() {
+ return secrets.getSecretSettings();
}
@Override
@@ -110,15 +81,11 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Model model = (Model) o;
- return Objects.equals(modelId, model.modelId)
- && taskType == model.taskType
- && Objects.equals(service, model.service)
- && Objects.equals(serviceSettings, model.serviceSettings)
- && Objects.equals(taskSettings, model.taskSettings);
+ return Objects.equals(configurations, model.configurations) && Objects.equals(secrets, model.secrets);
}
@Override
public int hashCode() {
- return Objects.hash(modelId, taskType, service, serviceSettings, taskSettings);
+ return Objects.hash(configurations, secrets);
}
}
diff --git a/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java b/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java
new file mode 100644
index 0000000000000..a8ae380bd3ba1
--- /dev/null
+++ b/server/src/main/java/org/elasticsearch/inference/ModelConfigurations.java
@@ -0,0 +1,125 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.inference;
+
+import org.elasticsearch.TransportVersion;
+import org.elasticsearch.TransportVersions;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.VersionedNamedWriteable;
+import org.elasticsearch.xcontent.ToXContentObject;
+import org.elasticsearch.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+public class ModelConfigurations implements ToXContentObject, VersionedNamedWriteable {
+
+ public static final String MODEL_ID = "model_id";
+ public static final String SERVICE = "service";
+ public static final String SERVICE_SETTINGS = "service_settings";
+ public static final String TASK_SETTINGS = "task_settings";
+ private static final String NAME = "inference_model";
+
+ private final String modelId;
+ private final TaskType taskType;
+ private final String service;
+ private final ServiceSettings serviceSettings;
+ private final TaskSettings taskSettings;
+
+ public ModelConfigurations(
+ String modelId,
+ TaskType taskType,
+ String service,
+ ServiceSettings serviceSettings,
+ TaskSettings taskSettings
+ ) {
+ this.modelId = modelId;
+ this.taskType = taskType;
+ this.service = service;
+ this.serviceSettings = serviceSettings;
+ this.taskSettings = taskSettings;
+ }
+
+ public ModelConfigurations(StreamInput in) throws IOException {
+ this.modelId = in.readString();
+ this.taskType = in.readEnum(TaskType.class);
+ this.service = in.readString();
+ this.serviceSettings = in.readNamedWriteable(ServiceSettings.class);
+ this.taskSettings = in.readNamedWriteable(TaskSettings.class);
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeString(modelId);
+ out.writeEnum(taskType);
+ out.writeString(service);
+ out.writeNamedWriteable(serviceSettings);
+ out.writeNamedWriteable(taskSettings);
+ }
+
+ public String getModelId() {
+ return modelId;
+ }
+
+ public TaskType getTaskType() {
+ return taskType;
+ }
+
+ public String getService() {
+ return service;
+ }
+
+ public ServiceSettings getServiceSettings() {
+ return serviceSettings;
+ }
+
+ public TaskSettings getTaskSettings() {
+ return taskSettings;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(MODEL_ID, modelId);
+ builder.field(TaskType.NAME, taskType.toString());
+ builder.field(SERVICE, service);
+ builder.field(SERVICE_SETTINGS, serviceSettings);
+ builder.field(TASK_SETTINGS, taskSettings);
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public String getWriteableName() {
+ return NAME;
+ }
+
+ @Override
+ public TransportVersion getMinimalSupportedVersion() {
+ return TransportVersions.V_8_500_074;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ModelConfigurations model = (ModelConfigurations) o;
+ return Objects.equals(modelId, model.modelId)
+ && taskType == model.taskType
+ && Objects.equals(service, model.service)
+ && Objects.equals(serviceSettings, model.serviceSettings)
+ && Objects.equals(taskSettings, model.taskSettings);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(modelId, taskType, service, serviceSettings, taskSettings);
+ }
+}
diff --git a/server/src/main/java/org/elasticsearch/inference/ModelSecrets.java b/server/src/main/java/org/elasticsearch/inference/ModelSecrets.java
new file mode 100644
index 0000000000000..78199ae3029ba
--- /dev/null
+++ b/server/src/main/java/org/elasticsearch/inference/ModelSecrets.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.inference;
+
+import org.elasticsearch.TransportVersion;
+import org.elasticsearch.TransportVersions;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.VersionedNamedWriteable;
+import org.elasticsearch.core.Nullable;
+import org.elasticsearch.xcontent.ToXContentObject;
+import org.elasticsearch.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+/**
+ * Represents the portion of a model that contains sensitive data
+ */
+public class ModelSecrets implements ToXContentObject, VersionedNamedWriteable {
+ public static final String SECRET_SETTINGS = "secret_settings";
+ private static final String NAME = "inference_model_secrets";
+ private final SecretSettings secretSettings;
+
+ public ModelSecrets() {
+ this.secretSettings = null;
+ }
+
+ public ModelSecrets(@Nullable SecretSettings secretSettings) {
+ // allow the secrets to be null in cases where the service does not have any secrets
+ this.secretSettings = secretSettings;
+ }
+
+ public ModelSecrets(StreamInput in) throws IOException {
+ this(in.readOptionalNamedWriteable(SecretSettings.class));
+ }
+
+ public SecretSettings getSecretSettings() {
+ return secretSettings;
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeOptionalNamedWriteable(secretSettings);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+
+ if (secretSettings != null) {
+ builder.field(SECRET_SETTINGS, secretSettings);
+ }
+
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public String getWriteableName() {
+ return NAME;
+ }
+
+ @Override
+ public TransportVersion getMinimalSupportedVersion() {
+ return TransportVersions.INFERENCE_MODEL_SECRETS_ADDED;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ModelSecrets that = (ModelSecrets) o;
+ return Objects.equals(secretSettings, that.secretSettings);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(secretSettings);
+ }
+}
diff --git a/server/src/main/java/org/elasticsearch/inference/SecretSettings.java b/server/src/main/java/org/elasticsearch/inference/SecretSettings.java
new file mode 100644
index 0000000000000..581f5dd442ea4
--- /dev/null
+++ b/server/src/main/java/org/elasticsearch/inference/SecretSettings.java
@@ -0,0 +1,16 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.inference;
+
+import org.elasticsearch.common.io.stream.VersionedNamedWriteable;
+import org.elasticsearch.xcontent.ToXContentObject;
+
+public interface SecretSettings extends ToXContentObject, VersionedNamedWriteable {
+
+}
diff --git a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java
index 53ad4188b6ada..191ce130805a8 100644
--- a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java
+++ b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java
@@ -238,7 +238,7 @@ public void testClosesChannelOnErrorInHandshake() throws Exception {
mockAppender.addExpectation(
new MockLogAppender.SeenEventExpectation(
"expected message",
- InboundHandler.class.getCanonicalName(),
+ EXPECTED_LOGGER_NAME,
Level.WARN,
"error processing handshake version"
)
@@ -275,6 +275,12 @@ public void testClosesChannelOnErrorInHandshake() throws Exception {
}
}
+ /**
+ * This logger is mentioned in the docs by name, so we cannot rename it without adjusting the docs. Thus we fix the expected logger
+ * name in this string constant rather than using {@code InboundHandler.class.getCanonicalName()}.
+ */
+ private static final String EXPECTED_LOGGER_NAME = "org.elasticsearch.transport.InboundHandler";
+
public void testLogsSlowInboundProcessing() throws Exception {
final MockLogAppender mockAppender = new MockLogAppender();
mockAppender.start();
@@ -286,12 +292,7 @@ public void testLogsSlowInboundProcessing() throws Exception {
final TransportVersion remoteVersion = TransportVersion.current();
mockAppender.addExpectation(
- new MockLogAppender.SeenEventExpectation(
- "expected slow request",
- InboundHandler.class.getCanonicalName(),
- Level.WARN,
- "handling request "
- )
+ new MockLogAppender.SeenEventExpectation("expected slow request", EXPECTED_LOGGER_NAME, Level.WARN, "handling request ")
);
final long requestId = randomNonNegativeLong();
@@ -318,12 +319,7 @@ public void testLogsSlowInboundProcessing() throws Exception {
mockAppender.assertAllExpectationsMatched();
mockAppender.addExpectation(
- new MockLogAppender.SeenEventExpectation(
- "expected slow response",
- InboundHandler.class.getCanonicalName(),
- Level.WARN,
- "handling response "
- )
+ new MockLogAppender.SeenEventExpectation("expected slow response", EXPECTED_LOGGER_NAME, Level.WARN, "handling response ")
);
final long responseId = randomNonNegativeLong();
diff --git a/server/src/test/java/org/elasticsearch/transport/OutboundHandlerTests.java b/server/src/test/java/org/elasticsearch/transport/OutboundHandlerTests.java
index c47ea906f38df..9896dbf4a861b 100644
--- a/server/src/test/java/org/elasticsearch/transport/OutboundHandlerTests.java
+++ b/server/src/test/java/org/elasticsearch/transport/OutboundHandlerTests.java
@@ -317,16 +317,17 @@ public void onResponseSent(long requestId, String action, Exception error) {
assertEquals("header_value", header.getHeaders().v1().get("header"));
}
+ /**
+ * This logger is mentioned in the docs by name, so we cannot rename it without adjusting the docs. Thus we fix the expected logger
+ * name in this string constant rather than using {@code OutboundHandler.class.getCanonicalName()}.
+ */
+ private static final String EXPECTED_LOGGER_NAME = "org.elasticsearch.transport.OutboundHandler";
+
public void testSlowLogOutboundMessage() throws Exception {
final MockLogAppender mockAppender = new MockLogAppender();
mockAppender.start();
mockAppender.addExpectation(
- new MockLogAppender.SeenEventExpectation(
- "expected message",
- OutboundHandler.class.getCanonicalName(),
- Level.WARN,
- "sending transport message "
- )
+ new MockLogAppender.SeenEventExpectation("expected message", EXPECTED_LOGGER_NAME, Level.WARN, "sending transport message ")
);
final Logger outboundHandlerLogger = LogManager.getLogger(OutboundHandler.class);
Loggers.addAppender(outboundHandlerLogger, mockAppender);
diff --git a/test/fixtures/geoip-fixture/src/main/java/fixture/geoip/GeoIpHttpFixture.java b/test/fixtures/geoip-fixture/src/main/java/fixture/geoip/GeoIpHttpFixture.java
index 6593bce0944fd..5cbfd750498f6 100644
--- a/test/fixtures/geoip-fixture/src/main/java/fixture/geoip/GeoIpHttpFixture.java
+++ b/test/fixtures/geoip-fixture/src/main/java/fixture/geoip/GeoIpHttpFixture.java
@@ -25,6 +25,7 @@
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
+import java.nio.file.StandardCopyOption;
public class GeoIpHttpFixture extends ExternalResource {
@@ -106,15 +107,25 @@ protected void after() {
}
private void copyFiles() throws Exception {
- Files.copy(GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/GeoLite2-ASN.tgz"), source.resolve("GeoLite2-ASN.tgz"));
- Files.copy(GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/GeoLite2-City.mmdb"), source.resolve("GeoLite2-City.mmdb"));
+ Files.copy(
+ GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/GeoLite2-ASN.tgz"),
+ source.resolve("GeoLite2-ASN.tgz"),
+ StandardCopyOption.REPLACE_EXISTING
+ );
+ Files.copy(
+ GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/GeoLite2-City.mmdb"),
+ source.resolve("GeoLite2-City.mmdb"),
+ StandardCopyOption.REPLACE_EXISTING
+ );
Files.copy(
GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/GeoLite2-Country.mmdb"),
- source.resolve("GeoLite2-Country.mmdb")
+ source.resolve("GeoLite2-Country.mmdb"),
+ StandardCopyOption.REPLACE_EXISTING
);
Files.copy(
GeoIpHttpFixture.class.getResourceAsStream("/geoip-fixture/MyCustomGeoLite2-City.mmdb"),
- source.resolve("MyCustomGeoLite2-City.mmdb")
+ source.resolve("MyCustomGeoLite2-City.mmdb"),
+ StandardCopyOption.REPLACE_EXISTING
);
new GeoIpCli().main(
diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java
index a84b9cb2f52f2..2b33e18d83bfb 100644
--- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java
+++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryAction.java
@@ -13,6 +13,7 @@
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.TransportAction;
import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.repositories.RepositoriesService;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.transport.TransportService;
@@ -23,7 +24,7 @@ public class DeleteInternalCcrRepositoryAction extends ActionType ActionResponse.Empty.INSTANCE);
+ super(NAME, Writeable.Reader.localOnly());
}
public static class TransportDeleteInternalRepositoryAction extends TransportAction<
diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryRequest.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryRequest.java
index 23cf2b64d4118..bc7ca12a49e22 100644
--- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryRequest.java
+++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/DeleteInternalCcrRepositoryRequest.java
@@ -9,6 +9,7 @@
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.action.support.TransportAction;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
@@ -29,7 +30,7 @@ public ActionRequestValidationException validate() {
@Override
public void writeTo(StreamOutput out) throws IOException {
- throw new UnsupportedOperationException("DeleteInternalRepositoryRequest cannot be serialized for sending across the wire.");
+ TransportAction.localOnly();
}
public String getName() {
diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java
index d8e323583c4de..68c3ff97e26fe 100644
--- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java
+++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryAction.java
@@ -13,6 +13,7 @@
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.TransportAction;
import org.elasticsearch.common.inject.Inject;
+import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.repositories.RepositoriesService;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.transport.TransportService;
@@ -23,7 +24,7 @@ public class PutInternalCcrRepositoryAction extends ActionType ActionResponse.Empty.INSTANCE);
+ super(NAME, Writeable.Reader.localOnly());
}
public static class TransportPutInternalRepositoryAction extends TransportAction<
diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryRequest.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryRequest.java
index 152555073ef00..6d92062035d76 100644
--- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryRequest.java
+++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutInternalCcrRepositoryRequest.java
@@ -9,6 +9,7 @@
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.action.support.TransportAction;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
@@ -31,7 +32,7 @@ public ActionRequestValidationException validate() {
@Override
public void writeTo(StreamOutput out) throws IOException {
- throw new UnsupportedOperationException("PutInternalRepositoryRequest cannot be serialized for sending across the wire.");
+ TransportAction.localOnly();
}
public String getName() {
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/IndicesAccessControl.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/IndicesAccessControl.java
index 2751a6f60d3e6..6d693b8ad8fdb 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/IndicesAccessControl.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/IndicesAccessControl.java
@@ -46,7 +46,7 @@ public IndicesAccessControl(boolean granted, Map ind
public IndicesAccessControl(boolean granted, Supplier
*/
- public final void encodeNextBatch() {
- bytes.clear();
- firstPosition += positionCount;
- positionCount = 0;
- currentValue = 0;
- readNextBatch();
- }
+ public abstract void encodeNextBatch();
- @Override
- public long ramBytesUsed() {
- return SHALLOW_SIZE + RamUsageEstimator.sizeOf(counts) + RamUsageEstimator.sizeOf(valueOffsets);
- }
+ protected abstract static class MVEncoder extends BatchEncoder {
+ private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(MVEncoder.class);
- /**
- * Encodes the next batch of values. See {@link #encodeNextBatch()}.
- */
- protected abstract void readNextBatch();
+ /**
+ * Buffer into which we encode values.
+ */
+ protected final BytesRefBuilder bytes = new BytesRefBuilder();
- /**
- * Implementations of {@link #readNextBatch} should call this before any
- * values at the current position.
- */
- protected final void startPosition() {
- counts = ArrayUtil.grow(counts, positionCount + 1);
- counts[positionCount] = 0;
- }
+ /**
+ * Count of values at each position.
+ */
+ private int[] counts = new int[ArrayUtil.oversize(10, Integer.BYTES)];
- /**
- * Implementations of {@link #readNextBatch} should call this before adding
- * each value to the current position to mark its start.
- */
- protected final void addingValue() {
- counts[positionCount]++;
- valueOffsets = ArrayUtil.grow(valueOffsets, currentValue + 1);
- valueOffsets[currentValue++] = bytes.length();
- }
+ /**
+ * Offsets into the {@link #bytes} for each value.
+ */
+ private int[] valueOffsets = new int[ArrayUtil.oversize(10, Integer.BYTES)];
- /**
- * Implementations of {@link #readNextBatch} should call this to end
- * the current position.
- */
- protected final void endPosition() {
- valueOffsets = ArrayUtil.grow(valueOffsets, currentValue + 1);
- valueOffsets[currentValue] = bytes.length();
- positionCount++;
- }
+ /**
+ * The first position in the current batch.
+ */
+ private int firstPosition;
- /**
- * Implementations of {@link #readNextBatch} should call this to encode
- * an entirely null position.
- */
- protected final void encodeNull() {
- startPosition();
- addingValue();
- endPosition();
- }
+ /**
+ * The number of positions in the current batch. It's also the maximum index into
+ * {@link #counts} that has an meaning.
+ */
+ private int positionCount;
- /**
- * The number of bytes in all entries in the batch.
- */
- final int bytesLength() {
- return bytes.length();
+ /**
+ * The value being encoded right now.
+ */
+ private int currentValue;
+
+ /**
+ * Build the encoder.
+ * @param batchSize The number of bytes in a batch. We'll allocate this much memory for the
+ * encoder and only expand the allocation if the first entry in a batch
+ * doesn't fit into the buffer.
+ */
+ MVEncoder(int batchSize) {
+ bytes.grow(batchSize);
+ }
+
+ /**
+ * The first position in the current batch.
+ */
+ protected final int firstPosition() {
+ return firstPosition;
+ }
+
+ /**
+ * The number of positions in the current batch.
+ */
+ @Override
+ public final int positionCount() {
+ return positionCount;
+ }
+
+ /**
+ * The number of values at the position with this offset in the batch.
+ * The actual position in the block we're encoding is {@code positionOffset + firstPosition()}.
+ */
+ @Override
+ public final int valueCount(int positionOffset) {
+ if (positionOffset >= positionCount) {
+ throw new IllegalArgumentException("wanted " + positionOffset + " but only have " + positionCount);
+ }
+ return counts[positionOffset];
+ }
+
+ /**
+ * Read the value at the specified index. Values at the first position
+ * start at index {@code 0} and advance one per value. So the values
+ * at position n start at {@code (0..n-1).sum(valueCount)}. There is
+ * no random-access way to get the first index for a position.
+ */
+ @Override
+ public final int read(int index, BytesRefBuilder dst) {
+ int start = valueOffsets[index];
+ int length = valueOffsets[index + 1] - start;
+ if (length > 0) {
+ dst.append(bytes.bytes(), start, length);
+ }
+ return length;
+ }
+
+ /**
+ * Encodes the next batch of entries. This will encode values until the next
+ * value doesn't fit into the buffer. Callers should iterate on the values
+ * that have been encoded and then call this again for the next batch.
+ *
+ * It's possible for this batch to be empty if there isn't room for the
+ * first entry in the buffer. If so, call again to force the buffer to
+ * expand and encode that entry.
+ *
+ */
+ @Override
+ public final void encodeNextBatch() {
+ bytes.clear();
+ firstPosition += positionCount;
+ positionCount = 0;
+ currentValue = 0;
+ readNextBatch();
+ }
+
+ @Override
+ public long ramBytesUsed() {
+ return SHALLOW_SIZE + RamUsageEstimator.sizeOf(counts) + RamUsageEstimator.sizeOf(valueOffsets);
+ }
+
+ /**
+ * Encodes the next batch of values. See {@link #encodeNextBatch()}.
+ */
+ protected abstract void readNextBatch();
+
+ /**
+ * Implementations of {@link #readNextBatch} should call this before any
+ * values at the current position.
+ */
+ protected final void startPosition() {
+ counts = ArrayUtil.grow(counts, positionCount + 1);
+ counts[positionCount] = 0;
+ }
+
+ /**
+ * Implementations of {@link #readNextBatch} should call this before adding
+ * each value to the current position to mark its start.
+ */
+ protected final void addingValue() {
+ counts[positionCount]++;
+ valueOffsets = ArrayUtil.grow(valueOffsets, currentValue + 1);
+ valueOffsets[currentValue++] = bytes.length();
+ }
+
+ /**
+ * Implementations of {@link #readNextBatch} should call this to end
+ * the current position.
+ */
+ protected final void endPosition() {
+ valueOffsets = ArrayUtil.grow(valueOffsets, currentValue + 1);
+ valueOffsets[currentValue] = bytes.length();
+ positionCount++;
+ }
+
+ /**
+ * Implementations of {@link #readNextBatch} should call this to encode
+ * an entirely null position.
+ */
+ protected final void encodeNull() {
+ startPosition();
+ addingValue();
+ endPosition();
+ }
+
+ /**
+ * The number of bytes in all entries in the batch.
+ */
+ final int bytesLength() {
+ return bytes.length();
+ }
+
+ /**
+ * The maximum batch size. This starts the same as the constructor parameter
+ * but will grow if a single entry doesn't fit into the batch.
+ */
+ final int bytesCapacity() {
+ return bytes.bytes().length;
+ }
}
- /**
- * The maximum batch size. This starts the same as the constructor parameter
- * but will grow if a single entry doesn't fit into the batch.
- */
- final int bytesCapacity() {
- return bytes.bytes().length;
+ protected abstract static class DirectEncoder extends BatchEncoder {
+ protected static final long BASE_RAM_USAGE = RamUsageEstimator.shallowSizeOfInstance(DirectEncoder.class);
+ protected final Block block;
+ private int blockPosition = -1;
+ private int valueCount;
+
+ DirectEncoder(Block block) {
+ this.block = block;
+ }
+
+ @Override
+ public final void encodeNextBatch() {
+ valueCount = block.getValueCount(++blockPosition);
+ }
+
+ @Override
+ public final int positionCount() {
+ return Math.max(valueCount, 1);
+ }
+
+ @Override
+ public final int valueCount(int positionOffset) {
+ assert positionOffset == 0 : positionOffset;
+ return positionCount();
+ }
+
+ @Override
+ public int read(int index, BytesRefBuilder dst) {
+ if (valueCount == 0) {
+ assert index == 0 : index;
+ return 0;
+ } else {
+ assert index < valueCount : index + " > " + valueCount;
+ return readValueAtBlockIndex(block.getFirstValueIndex(blockPosition) + index, dst);
+ }
+ }
+
+ protected abstract int readValueAtBlockIndex(int valueIndex, BytesRefBuilder dst);
+
+ @Override
+ public final long ramBytesUsed() {
+ return BASE_RAM_USAGE;
+ }
}
private static final VarHandle intHandle = MethodHandles.byteArrayViewVarHandle(int[].class, ByteOrder.nativeOrder());
- protected abstract static class Ints extends BatchEncoder {
+ protected abstract static class Ints extends MVEncoder {
protected Ints(int batchSize) {
super(batchSize);
}
@@ -267,9 +352,26 @@ public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int
}
}
+ protected static final class DirectInts extends DirectEncoder {
+ DirectInts(IntBlock block) {
+ super(block);
+ }
+
+ @Override
+ protected int readValueAtBlockIndex(int valueIndex, BytesRefBuilder dst) {
+ int before = dst.length();
+ int after = before + Integer.BYTES;
+ dst.grow(after);
+ int v = ((IntBlock) block).getInt(valueIndex);
+ intHandle.set(dst.bytes(), before, v);
+ dst.setLength(after);
+ return Integer.BYTES;
+ }
+ }
+
private static final VarHandle longHandle = MethodHandles.byteArrayViewVarHandle(long[].class, ByteOrder.nativeOrder());
- protected abstract static class Longs extends BatchEncoder {
+ protected abstract static class Longs extends MVEncoder {
protected Longs(int batchSize) {
super(batchSize);
}
@@ -300,6 +402,23 @@ protected final void encode(long v) {
}
}
+ protected static final class DirectLongs extends DirectEncoder {
+ DirectLongs(LongBlock block) {
+ super(block);
+ }
+
+ @Override
+ protected int readValueAtBlockIndex(int valueIndex, BytesRefBuilder dst) {
+ int before = dst.length();
+ int after = before + Long.BYTES;
+ dst.grow(after);
+ long v = ((LongBlock) block).getLong(valueIndex);
+ longHandle.set(dst.bytes(), before, v);
+ dst.setLength(after);
+ return Long.BYTES;
+ }
+ }
+
private static class LongsDecoder implements Decoder {
@Override
public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int count) {
@@ -320,7 +439,7 @@ public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int
private static final VarHandle doubleHandle = MethodHandles.byteArrayViewVarHandle(double[].class, ByteOrder.nativeOrder());
- protected abstract static class Doubles extends BatchEncoder {
+ protected abstract static class Doubles extends MVEncoder {
protected Doubles(int batchSize) {
super(batchSize);
}
@@ -351,6 +470,23 @@ protected final void encode(double v) {
}
}
+ protected static final class DirectDoubles extends DirectEncoder {
+ DirectDoubles(DoubleBlock block) {
+ super(block);
+ }
+
+ @Override
+ protected int readValueAtBlockIndex(int valueIndex, BytesRefBuilder dst) {
+ int before = dst.length();
+ int after = before + Double.BYTES;
+ dst.grow(after);
+ double v = ((DoubleBlock) block).getDouble(valueIndex);
+ doubleHandle.set(dst.bytes(), before, v);
+ dst.setLength(after);
+ return Double.BYTES;
+ }
+ }
+
private static class DoublesDecoder implements Decoder {
@Override
public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int count) {
@@ -368,7 +504,7 @@ public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int
}
}
- protected abstract static class Booleans extends BatchEncoder {
+ protected abstract static class Booleans extends MVEncoder {
protected Booleans(int batchSize) {
super(batchSize);
}
@@ -396,6 +532,19 @@ protected final void encode(boolean v) {
}
}
+ protected static final class DirectBooleans extends DirectEncoder {
+ DirectBooleans(BooleanBlock block) {
+ super(block);
+ }
+
+ @Override
+ protected int readValueAtBlockIndex(int valueIndex, BytesRefBuilder dst) {
+ var v = ((BooleanBlock) block).getBoolean(valueIndex);
+ dst.append((byte) (v ? 1 : 0));
+ return 1;
+ }
+ }
+
private static class BooleansDecoder implements Decoder {
@Override
public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int count) {
@@ -413,7 +562,7 @@ public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int
}
}
- protected abstract static class BytesRefs extends BatchEncoder {
+ protected abstract static class BytesRefs extends MVEncoder {
protected BytesRefs(int batchSize) {
super(batchSize);
}
@@ -448,6 +597,25 @@ protected final void encode(BytesRef v) {
}
}
+ protected static final class DirectBytesRefs extends DirectEncoder {
+ private final BytesRef scratch = new BytesRef();
+
+ DirectBytesRefs(BytesRefBlock block) {
+ super(block);
+ }
+
+ @Override
+ protected int readValueAtBlockIndex(int valueIndex, BytesRefBuilder dst) {
+ var v = ((BytesRefBlock) block).getBytesRef(valueIndex, scratch);
+ int start = dst.length();
+ dst.grow(start + Integer.BYTES + v.length);
+ intHandle.set(dst.bytes(), start, v.length);
+ dst.setLength(start + Integer.BYTES);
+ dst.append(v);
+ return Integer.BYTES + v.length;
+ }
+ }
+
private static class BytesRefsDecoder implements Decoder {
@Override
public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int count) {
diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java
index ba730c1947c2e..902fa90a502ca 100644
--- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java
+++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java
@@ -8,8 +8,12 @@
package org.elasticsearch.compute.operator;
import org.elasticsearch.compute.data.Block;
+import org.elasticsearch.compute.data.BooleanBlock;
+import org.elasticsearch.compute.data.BytesRefBlock;
+import org.elasticsearch.compute.data.DoubleBlock;
import org.elasticsearch.compute.data.ElementType;
import org.elasticsearch.compute.data.IntBlock;
+import org.elasticsearch.compute.data.LongBlock;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator;
@@ -124,17 +128,28 @@ public record HashResult(IntBlock ords, boolean sawNull) {}
* and then encodes the results into a {@link byte[]} which can be used for
* things like hashing many fields together.
*/
- public static BatchEncoder batchEncoder(Block.Ref ref, int batchSize) {
- // TODO collect single-valued block handling here. And maybe vector. And maybe all null?
- // TODO check for for unique multivalued fields and for ascending multivalue fields.
- return switch (ref.block().elementType()) {
- case BOOLEAN -> new MultivalueDedupeBoolean(ref).batchEncoder(batchSize);
- case BYTES_REF -> new MultivalueDedupeBytesRef(ref).batchEncoder(batchSize);
- case INT -> new MultivalueDedupeInt(ref).batchEncoder(batchSize);
- case LONG -> new MultivalueDedupeLong(ref).batchEncoder(batchSize);
- case DOUBLE -> new MultivalueDedupeDouble(ref).batchEncoder(batchSize);
- default -> throw new IllegalArgumentException();
- };
+ public static BatchEncoder batchEncoder(Block.Ref ref, int batchSize, boolean allowDirectEncoder) {
+ var elementType = ref.block().elementType();
+ if (allowDirectEncoder && ref.block().mvDeduplicated()) {
+ var block = ref.block();
+ return switch (elementType) {
+ case BOOLEAN -> new BatchEncoder.DirectBooleans((BooleanBlock) block);
+ case BYTES_REF -> new BatchEncoder.DirectBytesRefs((BytesRefBlock) block);
+ case INT -> new BatchEncoder.DirectInts((IntBlock) block);
+ case LONG -> new BatchEncoder.DirectLongs((LongBlock) block);
+ case DOUBLE -> new BatchEncoder.DirectDoubles((DoubleBlock) block);
+ default -> throw new IllegalArgumentException("Unknown [" + elementType + "]");
+ };
+ } else {
+ return switch (elementType) {
+ case BOOLEAN -> new MultivalueDedupeBoolean(ref).batchEncoder(batchSize);
+ case BYTES_REF -> new MultivalueDedupeBytesRef(ref).batchEncoder(batchSize);
+ case INT -> new MultivalueDedupeInt(ref).batchEncoder(batchSize);
+ case LONG -> new MultivalueDedupeLong(ref).batchEncoder(batchSize);
+ case DOUBLE -> new MultivalueDedupeDouble(ref).batchEncoder(batchSize);
+ default -> throw new IllegalArgumentException();
+ };
+ }
}
private abstract static class MvDedupeEvaluator implements EvalOperator.ExpressionEvaluator {
diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java
index 996561121df8f..8f320815f995e 100644
--- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java
+++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java
@@ -27,7 +27,6 @@
import org.elasticsearch.compute.data.IntBlock;
import org.elasticsearch.compute.data.IntVector;
import org.elasticsearch.compute.data.Page;
-import org.elasticsearch.compute.lucene.BlockOrdinalsReader;
import org.elasticsearch.compute.lucene.ValueSourceInfo;
import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator;
import org.elasticsearch.compute.operator.HashAggregationOperator.GroupSpec;
@@ -234,18 +233,31 @@ protected boolean lessThan(AggregatedResultIterator a, AggregatedResultIterator
};
final List aggregators = createGroupingAggregators();
try {
+ boolean seenNulls = false;
+ for (OrdinalSegmentAggregator agg : ordinalAggregators.values()) {
+ if (agg.seenNulls()) {
+ seenNulls = true;
+ for (int i = 0; i < aggregators.size(); i++) {
+ aggregators.get(i).addIntermediateRow(0, agg.aggregators.get(i), 0);
+ }
+ }
+ }
for (OrdinalSegmentAggregator agg : ordinalAggregators.values()) {
final AggregatedResultIterator it = agg.getResultIterator();
if (it.next()) {
pq.add(it);
}
}
- int position = -1;
+ final int startPosition = seenNulls ? 0 : -1;
+ int position = startPosition;
final BytesRefBuilder lastTerm = new BytesRefBuilder();
var blockBuilder = BytesRefBlock.newBlockBuilder(1);
+ if (seenNulls) {
+ blockBuilder.appendNull();
+ }
while (pq.size() > 0) {
final AggregatedResultIterator top = pq.top();
- if (position == -1 || lastTerm.get().equals(top.currentTerm) == false) {
+ if (position == startPosition || lastTerm.get().equals(top.currentTerm) == false) {
position++;
lastTerm.copyBytes(top.currentTerm);
blockBuilder.appendBytesRef(top.currentTerm);
@@ -338,11 +350,8 @@ void addInput(IntVector docs, Page page) {
if (BlockOrdinalsReader.canReuse(currentReader, docs.getInt(0)) == false) {
currentReader = new BlockOrdinalsReader(withOrdinals.ordinalsValues(leafReaderContext));
}
- final IntBlock ordinals = currentReader.readOrdinals(docs);
+ final IntBlock ordinals = currentReader.readOrdinalsAdded1(docs);
for (int p = 0; p < ordinals.getPositionCount(); p++) {
- if (ordinals.isNull(p)) {
- continue;
- }
int start = ordinals.getFirstValueIndex(p);
int end = start + ordinals.getValueCount(p);
for (int i = start; i < end; i++) {
@@ -350,8 +359,8 @@ void addInput(IntVector docs, Page page) {
visitedOrds.set(ord);
}
}
- for (GroupingAggregator aggregator : aggregators) {
- aggregator.prepareProcessPage(this, page).add(0, ordinals);
+ for (GroupingAggregatorFunction.AddInput addInput : prepared) {
+ addInput.add(0, ordinals);
}
} catch (IOException e) {
throw new UncheckedIOException(e);
@@ -362,6 +371,10 @@ AggregatedResultIterator getResultIterator() throws IOException {
return new AggregatedResultIterator(aggregators, visitedOrds, withOrdinals.ordinalsValues(leafReaderContext));
}
+ boolean seenNulls() {
+ return visitedOrds.get(0);
+ }
+
@Override
public BitArray seenGroupIds(BigArrays bigArrays) {
BitArray seen = new BitArray(0, bigArrays);
@@ -377,7 +390,7 @@ public void close() {
private static class AggregatedResultIterator {
private BytesRef currentTerm;
- private long currentOrd = -1;
+ private long currentOrd = 0;
private final List aggregators;
private final BitArray ords;
private final SortedSetDocValues dv;
@@ -395,8 +408,9 @@ int currentPosition() {
boolean next() throws IOException {
currentOrd = ords.nextSetBit(currentOrd + 1);
+ assert currentOrd > 0 : currentOrd;
if (currentOrd < Long.MAX_VALUE) {
- currentTerm = dv.lookupOrd(currentOrd);
+ currentTerm = dv.lookupOrd(currentOrd - 1);
return true;
} else {
currentTerm = null;
@@ -448,4 +462,49 @@ public void close() {
Releasables.close(extractor, aggregator);
}
}
+
+ static final class BlockOrdinalsReader {
+ private final SortedSetDocValues sortedSetDocValues;
+ private final Thread creationThread;
+
+ BlockOrdinalsReader(SortedSetDocValues sortedSetDocValues) {
+ this.sortedSetDocValues = sortedSetDocValues;
+ this.creationThread = Thread.currentThread();
+ }
+
+ IntBlock readOrdinalsAdded1(IntVector docs) throws IOException {
+ final int positionCount = docs.getPositionCount();
+ IntBlock.Builder builder = IntBlock.newBlockBuilder(positionCount);
+ for (int p = 0; p < positionCount; p++) {
+ int doc = docs.getInt(p);
+ if (false == sortedSetDocValues.advanceExact(doc)) {
+ builder.appendInt(0);
+ continue;
+ }
+ int count = sortedSetDocValues.docValueCount();
+ // TODO don't come this way if there are a zillion ords on the field
+ if (count == 1) {
+ builder.appendInt(Math.toIntExact(sortedSetDocValues.nextOrd() + 1));
+ continue;
+ }
+ builder.beginPositionEntry();
+ for (int i = 0; i < count; i++) {
+ builder.appendInt(Math.toIntExact(sortedSetDocValues.nextOrd() + 1));
+ }
+ builder.endPositionEntry();
+ }
+ return builder.build();
+ }
+
+ int docID() {
+ return sortedSetDocValues.docID();
+ }
+
+ /**
+ * Checks if the reader can be used to read a range documents starting with the given docID by the current thread.
+ */
+ static boolean canReuse(BlockOrdinalsReader reader, int startingDocID) {
+ return reader != null && reader.creationThread == Thread.currentThread() && reader.docID() <= startingDocID;
+ }
+ }
}
diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/TestBlockFactoryParameters.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/TestBlockFactoryParameters.java
deleted file mode 100644
index 8fa38b6864674..0000000000000
--- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/TestBlockFactoryParameters.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
- */
-
-package org.elasticsearch.compute;
-
-import org.elasticsearch.common.breaker.CircuitBreaker;
-import org.elasticsearch.common.unit.ByteSizeValue;
-import org.elasticsearch.common.util.BigArrays;
-import org.elasticsearch.common.util.MockBigArrays;
-import org.elasticsearch.common.util.PageCacheRecycler;
-import org.elasticsearch.compute.data.BlockFactoryParameters;
-import org.elasticsearch.indices.breaker.CircuitBreakerService;
-
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
-public class TestBlockFactoryParameters implements BlockFactoryParameters {
-
- final CircuitBreaker breaker;
- final BigArrays bigArrays;
-
- public TestBlockFactoryParameters() {
- breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1));
- var breakerService = mock(CircuitBreakerService.class);
- when(breakerService.getBreaker(CircuitBreaker.REQUEST)).thenReturn(breaker);
- bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, breakerService);
- }
-
- @Override
- public CircuitBreaker breaker() {
- return breaker;
- }
-
- @Override
- public BigArrays bigArrays() {
- return bigArrays;
- }
-}
diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java
index 7be79e73b5d9d..9c6c9d966b3f6 100644
--- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java
+++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java
@@ -61,16 +61,6 @@ public BlockFactory get() {
public String toString() {
return "1gb";
}
- }, new Supplier<>() {
- @Override
- public BlockFactory get() {
- return BlockFactory.getGlobalInstance();
- }
-
- @Override
- public String toString() {
- return "global";
- }
});
return l.stream().map(s -> new Object[] { s }).toList();
}
diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java
new file mode 100644
index 0000000000000..9893cd2b2a023
--- /dev/null
+++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java
@@ -0,0 +1,155 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.compute.lucene;
+
+import org.apache.lucene.document.SortedNumericDocValuesField;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.index.NoMergePolicy;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.tests.index.RandomIndexWriter;
+import org.elasticsearch.common.util.BigArrays;
+import org.elasticsearch.compute.data.BooleanBlock;
+import org.elasticsearch.compute.data.LongBlock;
+import org.elasticsearch.compute.data.Page;
+import org.elasticsearch.compute.operator.AnyOperatorTestCase;
+import org.elasticsearch.compute.operator.Driver;
+import org.elasticsearch.compute.operator.DriverContext;
+import org.elasticsearch.compute.operator.OperatorTestCase;
+import org.elasticsearch.compute.operator.PageConsumerOperator;
+import org.elasticsearch.core.IOUtils;
+import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy;
+import org.elasticsearch.index.query.SearchExecutionContext;
+import org.elasticsearch.search.internal.ContextIndexSearcher;
+import org.elasticsearch.search.internal.SearchContext;
+import org.junit.After;
+
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.function.Function;
+
+import static org.hamcrest.Matchers.hasSize;
+import static org.hamcrest.Matchers.is;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class LuceneCountOperatorTests extends AnyOperatorTestCase {
+ private Directory directory = newDirectory();
+ private IndexReader reader;
+
+ @After
+ public void closeIndex() throws IOException {
+ IOUtils.close(reader, directory);
+ }
+
+ @Override
+ protected LuceneCountOperator.Factory simple(BigArrays bigArrays) {
+ return simple(bigArrays, randomFrom(DataPartitioning.values()), between(1, 10_000), 100);
+ }
+
+ private LuceneCountOperator.Factory simple(BigArrays bigArrays, DataPartitioning dataPartitioning, int numDocs, int limit) {
+ int commitEvery = Math.max(1, numDocs / 10);
+ try (
+ RandomIndexWriter writer = new RandomIndexWriter(
+ random(),
+ directory,
+ newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE)
+ )
+ ) {
+ for (int d = 0; d < numDocs; d++) {
+ List doc = new ArrayList<>();
+ doc.add(new SortedNumericDocValuesField("s", d));
+ writer.addDocument(doc);
+ if (d % commitEvery == 0) {
+ writer.commit();
+ }
+ }
+ reader = writer.getReader();
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+
+ SearchContext ctx = mockSearchContext(reader);
+ SearchExecutionContext ectx = mock(SearchExecutionContext.class);
+ when(ctx.getSearchExecutionContext()).thenReturn(ectx);
+ when(ectx.getIndexReader()).thenReturn(reader);
+ Function queryFunction = c -> new MatchAllDocsQuery();
+ return new LuceneCountOperator.Factory(List.of(ctx), queryFunction, dataPartitioning, 1, limit);
+ }
+
+ @Override
+ protected String expectedToStringOfSimple() {
+ assumeFalse("can't support variable maxPageSize", true); // TODO allow testing this
+ return "LuceneCountOperator[shardId=0, maxPageSize=**random**]";
+ }
+
+ @Override
+ protected String expectedDescriptionOfSimple() {
+ assumeFalse("can't support variable maxPageSize", true); // TODO allow testing this
+ return """
+ LuceneCountOperator[dataPartitioning = SHARD, maxPageSize = **random**, limit = 100, sorts = [{"s":{"order":"asc"}}]]""";
+ }
+
+ // TODO tests for the other data partitioning configurations
+
+ public void testShardDataPartitioning() {
+ int size = between(1_000, 20_000);
+ int limit = between(10, size);
+ testCount(size, limit);
+ }
+
+ public void testEmpty() {
+ testCount(0, between(10, 10_000));
+ }
+
+ private void testCount(int size, int limit) {
+ DriverContext ctx = driverContext();
+ LuceneCountOperator.Factory factory = simple(nonBreakingBigArrays(), DataPartitioning.SHARD, size, limit);
+
+ List results = new ArrayList<>();
+ OperatorTestCase.runDriver(new Driver(ctx, factory.get(ctx), List.of(), new PageConsumerOperator(results::add), () -> {}));
+ OperatorTestCase.assertDriverContext(ctx);
+
+ assertThat(results, hasSize(1));
+ Page page = results.get(0);
+
+ assertThat(page.getPositionCount(), is(1));
+ assertThat(page.getBlockCount(), is(2));
+ LongBlock lb = page.getBlock(0);
+ assertThat(lb.getPositionCount(), is(1));
+ assertThat(lb.getLong(0), is((long) Math.min(size, limit)));
+ BooleanBlock bb = page.getBlock(1);
+ assertThat(bb.getBoolean(1), is(true));
+ }
+
+ /**
+ * Creates a mock search context with the given index reader.
+ * The returned mock search context can be used to test with {@link LuceneOperator}.
+ */
+ public static SearchContext mockSearchContext(IndexReader reader) {
+ try {
+ ContextIndexSearcher searcher = new ContextIndexSearcher(
+ reader,
+ IndexSearcher.getDefaultSimilarity(),
+ IndexSearcher.getDefaultQueryCache(),
+ TrivialQueryCachingPolicy.NEVER,
+ true
+ );
+ SearchContext searchContext = mock(SearchContext.class);
+ when(searchContext.searcher()).thenReturn(searcher);
+ return searchContext;
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ }
+}
diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java
index 57ea313b88dab..7a77d6bbb082c 100644
--- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java
+++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java
@@ -26,6 +26,10 @@ public static List collectPages(SourceOperator source) {
if (in == null) {
continue;
}
+ if (in.getPositionCount() == 0) {
+ in.releaseBlocks();
+ continue;
+ }
pages.add(in);
}
return pages;
diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java
index b8d19d34b53be..370412714157a 100644
--- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java
+++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java
@@ -10,6 +10,7 @@
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.BytesRefBuilder;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.BytesRefHash;
@@ -196,7 +197,7 @@ public void testHashWithPreviousValues() {
public void testBatchEncodeAll() {
int initCapacity = Math.toIntExact(ByteSizeValue.ofKb(10).getBytes());
BasicBlockTests.RandomBlock b = randomBlock();
- BatchEncoder encoder = MultivalueDedupe.batchEncoder(Block.Ref.floating(b.block()), initCapacity);
+ var encoder = (BatchEncoder.MVEncoder) MultivalueDedupe.batchEncoder(Block.Ref.floating(b.block()), initCapacity, false);
int valueOffset = 0;
for (int p = 0, positionOffset = Integer.MAX_VALUE; p < b.block().getPositionCount(); p++, positionOffset++) {
@@ -213,7 +214,7 @@ public void testBatchEncodeAll() {
public void testBatchEncoderStartSmall() {
assumeFalse("Booleans don't grow in the same way", elementType == ElementType.BOOLEAN);
BasicBlockTests.RandomBlock b = randomBlock();
- BatchEncoder encoder = MultivalueDedupe.batchEncoder(Block.Ref.floating(b.block()), 0);
+ var encoder = (BatchEncoder.MVEncoder) MultivalueDedupe.batchEncoder(Block.Ref.floating(b.block()), 0, false);
/*
* We run can't fit the first non-null position into our 0 bytes.
@@ -346,7 +347,9 @@ private int assertEncodedPosition(BasicBlockTests.RandomBlock b, BatchEncoder en
Block.Builder builder = elementType.newBlockBuilder(encoder.valueCount(offset));
BytesRef[] toDecode = new BytesRef[encoder.valueCount(offset)];
for (int i = 0; i < toDecode.length; i++) {
- toDecode[i] = encoder.read(valueOffset++, new BytesRef());
+ BytesRefBuilder dest = new BytesRefBuilder();
+ encoder.read(valueOffset++, dest);
+ toDecode[i] = dest.toBytesRef();
if (b.values().get(position) == null) {
// Nulls are encoded as 0 length values
assertThat(toDecode[i].length, equalTo(0));
diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java
index 4fc89e11b7b6b..776a2e732e5e9 100644
--- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java
+++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java
@@ -32,6 +32,7 @@
import static org.elasticsearch.test.MapMatcher.matchesMap;
import static org.elasticsearch.xpack.esql.CsvAssert.assertData;
import static org.elasticsearch.xpack.esql.CsvAssert.assertMetadata;
+import static org.elasticsearch.xpack.esql.CsvTestUtils.ExpectedResults;
import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled;
import static org.elasticsearch.xpack.esql.CsvTestUtils.loadCsvSpecValues;
import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.CSV_DATASET_MAP;
@@ -83,6 +84,10 @@ public static void wipeTestData() throws IOException {
}
}
+ public boolean logResults() {
+ return false;
+ }
+
public final void test() throws Throwable {
try {
assumeTrue("Test " + testName + " is not enabled", isEnabled(testName));
@@ -97,21 +102,29 @@ protected final void doTest() throws Throwable {
Map answer = runEsql(builder.query(testCase.query).build(), testCase.expectedWarnings);
var expectedColumnsWithValues = loadCsvSpecValues(testCase.expectedResults);
- assertNotNull(answer.get("columns"));
+ var metadata = answer.get("columns");
+ assertNotNull(metadata);
@SuppressWarnings("unchecked")
- var actualColumns = (List>) answer.get("columns");
- assertMetadata(expectedColumnsWithValues, actualColumns, LOGGER);
+ var actualColumns = (List>) metadata;
- assertNotNull(answer.get("values"));
+ Logger logger = logResults() ? LOGGER : null;
+ var values = answer.get("values");
+ assertNotNull(values);
@SuppressWarnings("unchecked")
- List> actualValues = (List>) answer.get("values");
- assertData(
- expectedColumnsWithValues,
- actualValues,
- testCase.ignoreOrder,
- LOGGER,
- value -> value == null ? "null" : value.toString()
- );
+ List> actualValues = (List>) values;
+
+ assertResults(expectedColumnsWithValues, actualColumns, actualValues, testCase.ignoreOrder, logger);
+ }
+
+ protected void assertResults(
+ ExpectedResults expected,
+ List> actualColumns,
+ List> actualValues,
+ boolean ignoreOrder,
+ Logger logger
+ ) {
+ assertMetadata(expected, actualColumns, logger);
+ assertData(expected, actualValues, testCase.ignoreOrder, logger, value -> value == null ? "null" : value.toString());
}
private Throwable reworkException(Throwable th) {
diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java
index b58a7770eef10..2bfe366d9f01e 100644
--- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java
+++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java
@@ -415,7 +415,7 @@ Iterator> values() {
}
}
- static void logMetaData(List actualColumnNames, List actualColumnTypes, Logger logger) {
+ public static void logMetaData(List actualColumnNames, List actualColumnTypes, Logger logger) {
// header
StringBuilder sb = new StringBuilder();
StringBuilder column = new StringBuilder();
diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec
index 55016a4cd2dc2..6405c082cf784 100644
--- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec
+++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec
@@ -533,3 +533,48 @@ c:l
;
+
+countAllGrouped
+from employees | stats c = count(*) by languages | rename languages as l | sort l DESC ;
+
+c:l | l:i
+10 |null
+21 |5
+18 |4
+17 |3
+19 |2
+15 |1
+;
+
+countAllAndOtherStatGrouped
+from employees | stats c = count(*), min = min(emp_no) by languages | sort languages;
+
+c:l | min:i | languages:i
+15 | 10005 | 1
+19 | 10001 | 2
+17 | 10006 | 3
+18 | 10003 | 4
+21 | 10002 | 5
+10 | 10020 | null
+;
+
+countAllWithEval
+from employees | rename languages as l | stats min = min(salary) by l | eval x = min + 1 | stats ca = count(*), cx = count(x) by l | sort l;
+
+ca:l | cx:l | l:i
+1 | 1 | 1
+1 | 1 | 2
+1 | 1 | 3
+1 | 1 | 4
+1 | 1 | 5
+1 | 1 | null
+;
+
+aggsWithoutStats
+from employees | stats by gender | sort gender;
+
+gender:keyword
+F
+M
+null
+;
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java
index 800e36949f5ea..714d80be531db 100644
--- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java
@@ -10,7 +10,6 @@
import org.elasticsearch.Build;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
-import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.support.WriteRequest;
@@ -34,6 +33,7 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
@@ -265,7 +265,7 @@ public void testFromStatsGroupingByKeywordWithNulls() {
EsqlQueryResponse results = run("from test | stats avg = avg(" + field + ") by color");
logger.info(results);
Assert.assertEquals(2, results.columns().size());
- Assert.assertEquals(4, getValuesList(results).size());
+ Assert.assertEquals(5, getValuesList(results).size());
// assert column metadata
assertEquals("avg", results.columns().get(0).name());
@@ -276,6 +276,7 @@ record Group(String color, Double avg) {
}
List expectedGroups = List.of(
+ new Group(null, 120.0),
new Group("blue", 42.0),
new Group("green", 44.0),
new Group("red", 43.0),
@@ -283,18 +284,10 @@ record Group(String color, Double avg) {
);
List actualGroups = getValuesList(results).stream()
.map(l -> new Group((String) l.get(1), (Double) l.get(0)))
- .sorted(comparing(c -> c.color))
+ .sorted(Comparator.comparing(c -> c.color, Comparator.nullsFirst(String::compareTo)))
.toList();
assertThat(actualGroups, equalTo(expectedGroups));
}
- for (int i = 0; i < 5; i++) {
- client().prepareBulk()
- .add(new DeleteRequest("test").id("no_color_" + i))
- .add(new DeleteRequest("test").id("no_count_red_" + i))
- .add(new DeleteRequest("test").id("no_count_yellow_" + i))
- .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
- .get();
- }
}
public void testFromStatsMultipleAggs() {
@@ -448,7 +441,48 @@ public void testFromEvalStats() {
assertEquals(0.034d, (double) getValuesList(results).get(0).get(0), 0.001d);
}
- public void testFromStatsThenEval() {
+ public void testUngroupedCountAll() {
+ EsqlQueryResponse results = run("from test | stats count(*)");
+ logger.info(results);
+ Assert.assertEquals(1, results.columns().size());
+ Assert.assertEquals(1, getValuesList(results).size());
+ assertEquals("count(*)", results.columns().get(0).name());
+ assertEquals("long", results.columns().get(0).type());
+ var values = getValuesList(results).get(0);
+ assertEquals(1, values.size());
+ assertEquals(40, (long) values.get(0));
+ }
+
+ public void testUngroupedCountAllWithFilter() {
+ EsqlQueryResponse results = run("from test | where data > 1 | stats count(*)");
+ logger.info(results);
+ Assert.assertEquals(1, results.columns().size());
+ Assert.assertEquals(1, getValuesList(results).size());
+ assertEquals("count(*)", results.columns().get(0).name());
+ assertEquals("long", results.columns().get(0).type());
+ var values = getValuesList(results).get(0);
+ assertEquals(1, values.size());
+ assertEquals(20, (long) values.get(0));
+ }
+
+ @AwaitsFix(bugUrl = "tracking down a 64b(long) memory leak")
+ public void testGroupedCountAllWithFilter() {
+ EsqlQueryResponse results = run("from test | where data > 1 | stats count(*) by data | sort data");
+ logger.info(results);
+ Assert.assertEquals(2, results.columns().size());
+ Assert.assertEquals(1, getValuesList(results).size());
+ assertEquals("count(*)", results.columns().get(0).name());
+ assertEquals("long", results.columns().get(0).type());
+ assertEquals("data", results.columns().get(1).name());
+ assertEquals("long", results.columns().get(1).type());
+ var values = getValuesList(results).get(0);
+ assertEquals(2, values.size());
+ assertEquals(20, (long) values.get(0));
+ assertEquals(2L, (long) values.get(1));
+ }
+
+ public void testFromStatsEvalWithPragma() {
+ assumeTrue("pragmas only enabled on snapshot builds", Build.current().isSnapshot());
EsqlQueryResponse results = run("from test | stats avg_count = avg(count) | eval x = avg_count + 7");
logger.info(results);
Assert.assertEquals(1, getValuesList(results).size());
@@ -521,11 +555,6 @@ public void testFilterWithNullAndEvalFromIndex() {
assertThat(results.columns(), hasItem(equalTo(new ColumnInfo("data", "long"))));
assertThat(results.columns(), hasItem(equalTo(new ColumnInfo("data_d", "double"))));
assertThat(results.columns(), hasItem(equalTo(new ColumnInfo("time", "long"))));
-
- // restore index to original pre-test state
- client().prepareBulk().add(new DeleteRequest("test").id("no_count")).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get();
- results = run("from test");
- Assert.assertEquals(40, getValuesList(results).size());
}
public void testMultiConditionalWhere() {
@@ -922,9 +951,6 @@ public void testInWithNullValue() {
}
public void testTopNPushedToLucene() {
- BulkRequestBuilder bulkDelete = client().prepareBulk();
- bulkDelete.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
-
for (int i = 5; i < 11; i++) {
var yellowDocId = "yellow_" + i;
var yellowNullCountDocId = "yellow_null_count_" + i;
@@ -938,11 +964,6 @@ public void testTopNPushedToLucene() {
if (randomBoolean()) {
client().admin().indices().prepareRefresh("test").get();
}
-
- // build the cleanup request now, as well, not to miss anything ;-)
- bulkDelete.add(new DeleteRequest("test").id(yellowDocId))
- .add(new DeleteRequest("test").id(yellowNullCountDocId))
- .add(new DeleteRequest("test").id(yellowNullDataDocId));
}
client().admin().indices().prepareRefresh("test").get();
diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java
index fa5a1617e9d61..f9d97cbd910e0 100644
--- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java
+++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/lookup/EnrichLookupIT.java
@@ -232,7 +232,7 @@ public void testMultipleMatches() {
static DriverContext driverContext() {
return new DriverContext(
new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(),
- BlockFactory.getGlobalInstance()
+ BlockFactory.getNonBreakingInstance()
);
}
}
diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4
index 8f07a8a5dcdea..044e920744375 100644
--- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4
+++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4
@@ -76,8 +76,12 @@ operatorExpression
primaryExpression
: constant #constantDefault
| qualifiedName #dereference
+ | functionExpression #function
| LP booleanExpression RP #parenthesizedExpression
- | identifier LP (booleanExpression (COMMA booleanExpression)*)? RP #functionExpression
+ ;
+
+functionExpression
+ : identifier LP (ASTERISK | (booleanExpression (COMMA booleanExpression)*))? RP
;
rowCommand
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java
index d488e51761e5e..e41d0f316e5f9 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java
@@ -192,6 +192,10 @@ private FunctionDefinition[][] functions() {
@Override
protected String normalize(String name) {
+ return normalizeName(name);
+ }
+
+ public static String normalizeName(String name) {
return name.toLowerCase(Locale.ROOT);
}
}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java
index 0f4d194d8016c..eb4b11f5e2e34 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java
@@ -7,15 +7,19 @@
package org.elasticsearch.xpack.esql.optimizer;
+import org.elasticsearch.core.Tuple;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException;
import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals;
import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals;
+import org.elasticsearch.xpack.esql.expression.function.aggregate.Count;
import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In;
import org.elasticsearch.xpack.esql.optimizer.PhysicalOptimizerRules.OptimizerRule;
import org.elasticsearch.xpack.esql.plan.physical.AggregateExec;
import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec;
import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec;
+import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec;
+import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.Stat;
import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec;
import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec;
import org.elasticsearch.xpack.esql.plan.physical.FilterExec;
@@ -23,15 +27,19 @@
import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan;
import org.elasticsearch.xpack.esql.plan.physical.TopNExec;
import org.elasticsearch.xpack.esql.plan.physical.UnaryExec;
+import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders;
import org.elasticsearch.xpack.esql.planner.PhysicalVerificationException;
import org.elasticsearch.xpack.esql.planner.PhysicalVerifier;
import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery;
import org.elasticsearch.xpack.ql.common.Failure;
+import org.elasticsearch.xpack.ql.expression.Alias;
import org.elasticsearch.xpack.ql.expression.Attribute;
+import org.elasticsearch.xpack.ql.expression.AttributeMap;
import org.elasticsearch.xpack.ql.expression.Expression;
import org.elasticsearch.xpack.ql.expression.Expressions;
import org.elasticsearch.xpack.ql.expression.FieldAttribute;
import org.elasticsearch.xpack.ql.expression.MetadataAttribute;
+import org.elasticsearch.xpack.ql.expression.NamedExpression;
import org.elasticsearch.xpack.ql.expression.Order;
import org.elasticsearch.xpack.ql.expression.TypedAttribute;
import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction;
@@ -48,6 +56,7 @@
import org.elasticsearch.xpack.ql.rule.Rule;
import org.elasticsearch.xpack.ql.util.Queries;
import org.elasticsearch.xpack.ql.util.Queries.Clause;
+import org.elasticsearch.xpack.ql.util.StringUtils;
import java.util.ArrayList;
import java.util.Collection;
@@ -58,6 +67,9 @@
import java.util.function.Supplier;
import static java.util.Arrays.asList;
+import static java.util.Collections.emptyList;
+import static java.util.Collections.singletonList;
+import static org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.StatsType.COUNT;
import static org.elasticsearch.xpack.ql.expression.predicate.Predicates.splitAnd;
import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection.UP;
@@ -90,6 +102,7 @@ protected List> rules(boolean optimizeForEsSource) {
esSourceRules.add(new PushTopNToSource());
esSourceRules.add(new PushLimitToSource());
esSourceRules.add(new PushFiltersToSource());
+ esSourceRules.add(new PushStatsToSource());
}
// execute the rules multiple times to improve the chances of things being pushed down
@@ -304,6 +317,68 @@ private List buildFieldSorts(List orders) {
}
}
+ /**
+ * Looks for the case where certain stats exist right before the query and thus can be pushed down.
+ */
+ private static class PushStatsToSource extends OptimizerRule {
+
+ @Override
+ protected PhysicalPlan rule(AggregateExec aggregateExec) {
+ PhysicalPlan plan = aggregateExec;
+ if (aggregateExec.child() instanceof EsQueryExec queryExec) {
+ var tuple = pushableStats(aggregateExec);
+
+ // TODO: handle case where some aggs cannot be pushed down by breaking the aggs into two sources (regular + stats) + union
+ // use the stats since the attributes are larger in size (due to seen)
+ if (tuple.v2().size() == aggregateExec.aggregates().size()) {
+ plan = new EsStatsQueryExec(
+ aggregateExec.source(),
+ queryExec.index(),
+ queryExec.query(),
+ queryExec.limit(),
+ tuple.v1(),
+ tuple.v2()
+ );
+ }
+ }
+ return plan;
+ }
+
+ private Tuple, List> pushableStats(AggregateExec aggregate) {
+ AttributeMap stats = new AttributeMap<>();
+ Tuple, List> tuple = new Tuple<>(new ArrayList(), new ArrayList());
+
+ if (aggregate.groupings().isEmpty()) {
+ for (NamedExpression agg : aggregate.aggregates()) {
+ var attribute = agg.toAttribute();
+ Stat stat = stats.computeIfAbsent(attribute, a -> {
+ if (agg instanceof Alias as) {
+ Expression child = as.child();
+ if (child instanceof Count count) {
+ var target = count.field();
+ // TODO: add count over field (has to be field attribute)
+ if (target.foldable()) {
+ return new Stat(StringUtils.WILDCARD, COUNT);
+ }
+ }
+ }
+ return null;
+ });
+ if (stat != null) {
+ List intermediateAttributes = AbstractPhysicalOperationProviders.intermediateAttributes(
+ singletonList(agg),
+ emptyList()
+ );
+ tuple.v1().addAll(intermediateAttributes);
+ tuple.v2().add(stat);
+ }
+ }
+ }
+
+ return tuple;
+ }
+ }
+
private static final class EsqlTranslatorHandler extends QlTranslatorHandler {
@Override
public Query wrapFunctionQuery(ScalarFunction sf, Expression field, Supplier querySupplier) {
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java
index d7b31cd220760..be46b6c6e1797 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java
@@ -127,7 +127,7 @@ public Vocabulary getVocabulary() {
}
- @SuppressWarnings("this-escape") public EsqlBaseLexer(CharStream input) {
+ public EsqlBaseLexer(CharStream input) {
super(input);
_interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp
index 349f31f7c476d..658e09ca4b190 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp
@@ -177,6 +177,7 @@ regexBooleanExpression
valueExpression
operatorExpression
primaryExpression
+functionExpression
rowCommand
fields
field
@@ -216,4 +217,4 @@ enrichWithClause
atn:
-[4, 1, 81, 501, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 102, 8, 1, 10, 1, 12, 1, 105, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 111, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 126, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 138, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 145, 8, 5, 10, 5, 12, 5, 148, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 155, 8, 5, 1, 5, 1, 5, 3, 5, 159, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 167, 8, 5, 10, 5, 12, 5, 170, 9, 5, 1, 6, 1, 6, 3, 6, 174, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 181, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 186, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 193, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 199, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 207, 8, 8, 10, 8, 12, 8, 210, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 223, 8, 9, 10, 9, 12, 9, 226, 9, 9, 3, 9, 228, 8, 9, 1, 9, 1, 9, 3, 9, 232, 8, 9, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 5, 11, 240, 8, 11, 10, 11, 12, 11, 243, 9, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 3, 12, 250, 8, 12, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 256, 8, 13, 10, 13, 12, 13, 259, 9, 13, 1, 13, 3, 13, 262, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 269, 8, 14, 10, 14, 12, 14, 272, 9, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 3, 16, 281, 8, 16, 1, 16, 1, 16, 3, 16, 285, 8, 16, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 291, 8, 17, 1, 18, 1, 18, 1, 18, 5, 18, 296, 8, 18, 10, 18, 12, 18, 299, 9, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 5, 20, 306, 8, 20, 10, 20, 12, 20, 309, 9, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 326, 8, 22, 10, 22, 12, 22, 329, 9, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 337, 8, 22, 10, 22, 12, 22, 340, 9, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 348, 8, 22, 10, 22, 12, 22, 351, 9, 22, 1, 22, 1, 22, 3, 22, 355, 8, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 364, 8, 24, 10, 24, 12, 24, 367, 9, 24, 1, 25, 1, 25, 3, 25, 371, 8, 25, 1, 25, 1, 25, 3, 25, 375, 8, 25, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 381, 8, 26, 10, 26, 12, 26, 384, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 390, 8, 26, 10, 26, 12, 26, 393, 9, 26, 3, 26, 395, 8, 26, 1, 27, 1, 27, 1, 27, 1, 27, 5, 27, 401, 8, 27, 10, 27, 12, 27, 404, 9, 27, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 410, 8, 28, 10, 28, 12, 28, 413, 9, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 3, 30, 423, 8, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 5, 33, 435, 8, 33, 10, 33, 12, 33, 438, 9, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 3, 36, 448, 8, 36, 1, 37, 3, 37, 451, 8, 37, 1, 37, 1, 37, 1, 38, 3, 38, 456, 8, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 3, 43, 475, 8, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 481, 8, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, 44, 487, 8, 44, 10, 44, 12, 44, 490, 9, 44, 3, 44, 492, 8, 44, 1, 45, 1, 45, 1, 45, 3, 45, 497, 8, 45, 1, 45, 1, 45, 1, 45, 0, 3, 2, 10, 16, 46, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 0, 8, 1, 0, 60, 61, 1, 0, 62, 64, 1, 0, 76, 77, 1, 0, 67, 68, 2, 0, 32, 32, 35, 35, 1, 0, 38, 39, 2, 0, 37, 37, 51, 51, 1, 0, 54, 59, 531, 0, 92, 1, 0, 0, 0, 2, 95, 1, 0, 0, 0, 4, 110, 1, 0, 0, 0, 6, 125, 1, 0, 0, 0, 8, 127, 1, 0, 0, 0, 10, 158, 1, 0, 0, 0, 12, 185, 1, 0, 0, 0, 14, 192, 1, 0, 0, 0, 16, 198, 1, 0, 0, 0, 18, 231, 1, 0, 0, 0, 20, 233, 1, 0, 0, 0, 22, 236, 1, 0, 0, 0, 24, 249, 1, 0, 0, 0, 26, 251, 1, 0, 0, 0, 28, 263, 1, 0, 0, 0, 30, 275, 1, 0, 0, 0, 32, 278, 1, 0, 0, 0, 34, 286, 1, 0, 0, 0, 36, 292, 1, 0, 0, 0, 38, 300, 1, 0, 0, 0, 40, 302, 1, 0, 0, 0, 42, 310, 1, 0, 0, 0, 44, 354, 1, 0, 0, 0, 46, 356, 1, 0, 0, 0, 48, 359, 1, 0, 0, 0, 50, 368, 1, 0, 0, 0, 52, 394, 1, 0, 0, 0, 54, 396, 1, 0, 0, 0, 56, 405, 1, 0, 0, 0, 58, 414, 1, 0, 0, 0, 60, 418, 1, 0, 0, 0, 62, 424, 1, 0, 0, 0, 64, 428, 1, 0, 0, 0, 66, 431, 1, 0, 0, 0, 68, 439, 1, 0, 0, 0, 70, 443, 1, 0, 0, 0, 72, 447, 1, 0, 0, 0, 74, 450, 1, 0, 0, 0, 76, 455, 1, 0, 0, 0, 78, 459, 1, 0, 0, 0, 80, 461, 1, 0, 0, 0, 82, 463, 1, 0, 0, 0, 84, 466, 1, 0, 0, 0, 86, 474, 1, 0, 0, 0, 88, 476, 1, 0, 0, 0, 90, 496, 1, 0, 0, 0, 92, 93, 3, 2, 1, 0, 93, 94, 5, 0, 0, 1, 94, 1, 1, 0, 0, 0, 95, 96, 6, 1, -1, 0, 96, 97, 3, 4, 2, 0, 97, 103, 1, 0, 0, 0, 98, 99, 10, 1, 0, 0, 99, 100, 5, 26, 0, 0, 100, 102, 3, 6, 3, 0, 101, 98, 1, 0, 0, 0, 102, 105, 1, 0, 0, 0, 103, 101, 1, 0, 0, 0, 103, 104, 1, 0, 0, 0, 104, 3, 1, 0, 0, 0, 105, 103, 1, 0, 0, 0, 106, 111, 3, 82, 41, 0, 107, 111, 3, 26, 13, 0, 108, 111, 3, 20, 10, 0, 109, 111, 3, 86, 43, 0, 110, 106, 1, 0, 0, 0, 110, 107, 1, 0, 0, 0, 110, 108, 1, 0, 0, 0, 110, 109, 1, 0, 0, 0, 111, 5, 1, 0, 0, 0, 112, 126, 3, 30, 15, 0, 113, 126, 3, 34, 17, 0, 114, 126, 3, 46, 23, 0, 115, 126, 3, 52, 26, 0, 116, 126, 3, 48, 24, 0, 117, 126, 3, 32, 16, 0, 118, 126, 3, 8, 4, 0, 119, 126, 3, 54, 27, 0, 120, 126, 3, 56, 28, 0, 121, 126, 3, 60, 30, 0, 122, 126, 3, 62, 31, 0, 123, 126, 3, 88, 44, 0, 124, 126, 3, 64, 32, 0, 125, 112, 1, 0, 0, 0, 125, 113, 1, 0, 0, 0, 125, 114, 1, 0, 0, 0, 125, 115, 1, 0, 0, 0, 125, 116, 1, 0, 0, 0, 125, 117, 1, 0, 0, 0, 125, 118, 1, 0, 0, 0, 125, 119, 1, 0, 0, 0, 125, 120, 1, 0, 0, 0, 125, 121, 1, 0, 0, 0, 125, 122, 1, 0, 0, 0, 125, 123, 1, 0, 0, 0, 125, 124, 1, 0, 0, 0, 126, 7, 1, 0, 0, 0, 127, 128, 5, 18, 0, 0, 128, 129, 3, 10, 5, 0, 129, 9, 1, 0, 0, 0, 130, 131, 6, 5, -1, 0, 131, 132, 5, 44, 0, 0, 132, 159, 3, 10, 5, 7, 133, 159, 3, 14, 7, 0, 134, 159, 3, 12, 6, 0, 135, 137, 3, 14, 7, 0, 136, 138, 5, 44, 0, 0, 137, 136, 1, 0, 0, 0, 137, 138, 1, 0, 0, 0, 138, 139, 1, 0, 0, 0, 139, 140, 5, 41, 0, 0, 140, 141, 5, 40, 0, 0, 141, 146, 3, 14, 7, 0, 142, 143, 5, 34, 0, 0, 143, 145, 3, 14, 7, 0, 144, 142, 1, 0, 0, 0, 145, 148, 1, 0, 0, 0, 146, 144, 1, 0, 0, 0, 146, 147, 1, 0, 0, 0, 147, 149, 1, 0, 0, 0, 148, 146, 1, 0, 0, 0, 149, 150, 5, 50, 0, 0, 150, 159, 1, 0, 0, 0, 151, 152, 3, 14, 7, 0, 152, 154, 5, 42, 0, 0, 153, 155, 5, 44, 0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 157, 5, 45, 0, 0, 157, 159, 1, 0, 0, 0, 158, 130, 1, 0, 0, 0, 158, 133, 1, 0, 0, 0, 158, 134, 1, 0, 0, 0, 158, 135, 1, 0, 0, 0, 158, 151, 1, 0, 0, 0, 159, 168, 1, 0, 0, 0, 160, 161, 10, 4, 0, 0, 161, 162, 5, 31, 0, 0, 162, 167, 3, 10, 5, 5, 163, 164, 10, 3, 0, 0, 164, 165, 5, 47, 0, 0, 165, 167, 3, 10, 5, 4, 166, 160, 1, 0, 0, 0, 166, 163, 1, 0, 0, 0, 167, 170, 1, 0, 0, 0, 168, 166, 1, 0, 0, 0, 168, 169, 1, 0, 0, 0, 169, 11, 1, 0, 0, 0, 170, 168, 1, 0, 0, 0, 171, 173, 3, 14, 7, 0, 172, 174, 5, 44, 0, 0, 173, 172, 1, 0, 0, 0, 173, 174, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 176, 5, 43, 0, 0, 176, 177, 3, 78, 39, 0, 177, 186, 1, 0, 0, 0, 178, 180, 3, 14, 7, 0, 179, 181, 5, 44, 0, 0, 180, 179, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182, 183, 5, 49, 0, 0, 183, 184, 3, 78, 39, 0, 184, 186, 1, 0, 0, 0, 185, 171, 1, 0, 0, 0, 185, 178, 1, 0, 0, 0, 186, 13, 1, 0, 0, 0, 187, 193, 3, 16, 8, 0, 188, 189, 3, 16, 8, 0, 189, 190, 3, 80, 40, 0, 190, 191, 3, 16, 8, 0, 191, 193, 1, 0, 0, 0, 192, 187, 1, 0, 0, 0, 192, 188, 1, 0, 0, 0, 193, 15, 1, 0, 0, 0, 194, 195, 6, 8, -1, 0, 195, 199, 3, 18, 9, 0, 196, 197, 7, 0, 0, 0, 197, 199, 3, 16, 8, 3, 198, 194, 1, 0, 0, 0, 198, 196, 1, 0, 0, 0, 199, 208, 1, 0, 0, 0, 200, 201, 10, 2, 0, 0, 201, 202, 7, 1, 0, 0, 202, 207, 3, 16, 8, 3, 203, 204, 10, 1, 0, 0, 204, 205, 7, 0, 0, 0, 205, 207, 3, 16, 8, 2, 206, 200, 1, 0, 0, 0, 206, 203, 1, 0, 0, 0, 207, 210, 1, 0, 0, 0, 208, 206, 1, 0, 0, 0, 208, 209, 1, 0, 0, 0, 209, 17, 1, 0, 0, 0, 210, 208, 1, 0, 0, 0, 211, 232, 3, 44, 22, 0, 212, 232, 3, 40, 20, 0, 213, 214, 5, 40, 0, 0, 214, 215, 3, 10, 5, 0, 215, 216, 5, 50, 0, 0, 216, 232, 1, 0, 0, 0, 217, 218, 3, 42, 21, 0, 218, 227, 5, 40, 0, 0, 219, 224, 3, 10, 5, 0, 220, 221, 5, 34, 0, 0, 221, 223, 3, 10, 5, 0, 222, 220, 1, 0, 0, 0, 223, 226, 1, 0, 0, 0, 224, 222, 1, 0, 0, 0, 224, 225, 1, 0, 0, 0, 225, 228, 1, 0, 0, 0, 226, 224, 1, 0, 0, 0, 227, 219, 1, 0, 0, 0, 227, 228, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 230, 5, 50, 0, 0, 230, 232, 1, 0, 0, 0, 231, 211, 1, 0, 0, 0, 231, 212, 1, 0, 0, 0, 231, 213, 1, 0, 0, 0, 231, 217, 1, 0, 0, 0, 232, 19, 1, 0, 0, 0, 233, 234, 5, 14, 0, 0, 234, 235, 3, 22, 11, 0, 235, 21, 1, 0, 0, 0, 236, 241, 3, 24, 12, 0, 237, 238, 5, 34, 0, 0, 238, 240, 3, 24, 12, 0, 239, 237, 1, 0, 0, 0, 240, 243, 1, 0, 0, 0, 241, 239, 1, 0, 0, 0, 241, 242, 1, 0, 0, 0, 242, 23, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 244, 250, 3, 10, 5, 0, 245, 246, 3, 40, 20, 0, 246, 247, 5, 33, 0, 0, 247, 248, 3, 10, 5, 0, 248, 250, 1, 0, 0, 0, 249, 244, 1, 0, 0, 0, 249, 245, 1, 0, 0, 0, 250, 25, 1, 0, 0, 0, 251, 252, 5, 6, 0, 0, 252, 257, 3, 38, 19, 0, 253, 254, 5, 34, 0, 0, 254, 256, 3, 38, 19, 0, 255, 253, 1, 0, 0, 0, 256, 259, 1, 0, 0, 0, 257, 255, 1, 0, 0, 0, 257, 258, 1, 0, 0, 0, 258, 261, 1, 0, 0, 0, 259, 257, 1, 0, 0, 0, 260, 262, 3, 28, 14, 0, 261, 260, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 27, 1, 0, 0, 0, 263, 264, 5, 65, 0, 0, 264, 265, 5, 73, 0, 0, 265, 270, 3, 38, 19, 0, 266, 267, 5, 34, 0, 0, 267, 269, 3, 38, 19, 0, 268, 266, 1, 0, 0, 0, 269, 272, 1, 0, 0, 0, 270, 268, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 273, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 273, 274, 5, 66, 0, 0, 274, 29, 1, 0, 0, 0, 275, 276, 5, 4, 0, 0, 276, 277, 3, 22, 11, 0, 277, 31, 1, 0, 0, 0, 278, 280, 5, 17, 0, 0, 279, 281, 3, 22, 11, 0, 280, 279, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 283, 5, 30, 0, 0, 283, 285, 3, 36, 18, 0, 284, 282, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 33, 1, 0, 0, 0, 286, 287, 5, 8, 0, 0, 287, 290, 3, 22, 11, 0, 288, 289, 5, 30, 0, 0, 289, 291, 3, 36, 18, 0, 290, 288, 1, 0, 0, 0, 290, 291, 1, 0, 0, 0, 291, 35, 1, 0, 0, 0, 292, 297, 3, 40, 20, 0, 293, 294, 5, 34, 0, 0, 294, 296, 3, 40, 20, 0, 295, 293, 1, 0, 0, 0, 296, 299, 1, 0, 0, 0, 297, 295, 1, 0, 0, 0, 297, 298, 1, 0, 0, 0, 298, 37, 1, 0, 0, 0, 299, 297, 1, 0, 0, 0, 300, 301, 7, 2, 0, 0, 301, 39, 1, 0, 0, 0, 302, 307, 3, 42, 21, 0, 303, 304, 5, 36, 0, 0, 304, 306, 3, 42, 21, 0, 305, 303, 1, 0, 0, 0, 306, 309, 1, 0, 0, 0, 307, 305, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308, 41, 1, 0, 0, 0, 309, 307, 1, 0, 0, 0, 310, 311, 7, 3, 0, 0, 311, 43, 1, 0, 0, 0, 312, 355, 5, 45, 0, 0, 313, 314, 3, 76, 38, 0, 314, 315, 5, 67, 0, 0, 315, 355, 1, 0, 0, 0, 316, 355, 3, 74, 37, 0, 317, 355, 3, 76, 38, 0, 318, 355, 3, 70, 35, 0, 319, 355, 5, 48, 0, 0, 320, 355, 3, 78, 39, 0, 321, 322, 5, 65, 0, 0, 322, 327, 3, 72, 36, 0, 323, 324, 5, 34, 0, 0, 324, 326, 3, 72, 36, 0, 325, 323, 1, 0, 0, 0, 326, 329, 1, 0, 0, 0, 327, 325, 1, 0, 0, 0, 327, 328, 1, 0, 0, 0, 328, 330, 1, 0, 0, 0, 329, 327, 1, 0, 0, 0, 330, 331, 5, 66, 0, 0, 331, 355, 1, 0, 0, 0, 332, 333, 5, 65, 0, 0, 333, 338, 3, 70, 35, 0, 334, 335, 5, 34, 0, 0, 335, 337, 3, 70, 35, 0, 336, 334, 1, 0, 0, 0, 337, 340, 1, 0, 0, 0, 338, 336, 1, 0, 0, 0, 338, 339, 1, 0, 0, 0, 339, 341, 1, 0, 0, 0, 340, 338, 1, 0, 0, 0, 341, 342, 5, 66, 0, 0, 342, 355, 1, 0, 0, 0, 343, 344, 5, 65, 0, 0, 344, 349, 3, 78, 39, 0, 345, 346, 5, 34, 0, 0, 346, 348, 3, 78, 39, 0, 347, 345, 1, 0, 0, 0, 348, 351, 1, 0, 0, 0, 349, 347, 1, 0, 0, 0, 349, 350, 1, 0, 0, 0, 350, 352, 1, 0, 0, 0, 351, 349, 1, 0, 0, 0, 352, 353, 5, 66, 0, 0, 353, 355, 1, 0, 0, 0, 354, 312, 1, 0, 0, 0, 354, 313, 1, 0, 0, 0, 354, 316, 1, 0, 0, 0, 354, 317, 1, 0, 0, 0, 354, 318, 1, 0, 0, 0, 354, 319, 1, 0, 0, 0, 354, 320, 1, 0, 0, 0, 354, 321, 1, 0, 0, 0, 354, 332, 1, 0, 0, 0, 354, 343, 1, 0, 0, 0, 355, 45, 1, 0, 0, 0, 356, 357, 5, 10, 0, 0, 357, 358, 5, 28, 0, 0, 358, 47, 1, 0, 0, 0, 359, 360, 5, 16, 0, 0, 360, 365, 3, 50, 25, 0, 361, 362, 5, 34, 0, 0, 362, 364, 3, 50, 25, 0, 363, 361, 1, 0, 0, 0, 364, 367, 1, 0, 0, 0, 365, 363, 1, 0, 0, 0, 365, 366, 1, 0, 0, 0, 366, 49, 1, 0, 0, 0, 367, 365, 1, 0, 0, 0, 368, 370, 3, 10, 5, 0, 369, 371, 7, 4, 0, 0, 370, 369, 1, 0, 0, 0, 370, 371, 1, 0, 0, 0, 371, 374, 1, 0, 0, 0, 372, 373, 5, 46, 0, 0, 373, 375, 7, 5, 0, 0, 374, 372, 1, 0, 0, 0, 374, 375, 1, 0, 0, 0, 375, 51, 1, 0, 0, 0, 376, 377, 5, 9, 0, 0, 377, 382, 3, 38, 19, 0, 378, 379, 5, 34, 0, 0, 379, 381, 3, 38, 19, 0, 380, 378, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 382, 383, 1, 0, 0, 0, 383, 395, 1, 0, 0, 0, 384, 382, 1, 0, 0, 0, 385, 386, 5, 12, 0, 0, 386, 391, 3, 38, 19, 0, 387, 388, 5, 34, 0, 0, 388, 390, 3, 38, 19, 0, 389, 387, 1, 0, 0, 0, 390, 393, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 395, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 394, 376, 1, 0, 0, 0, 394, 385, 1, 0, 0, 0, 395, 53, 1, 0, 0, 0, 396, 397, 5, 2, 0, 0, 397, 402, 3, 38, 19, 0, 398, 399, 5, 34, 0, 0, 399, 401, 3, 38, 19, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 55, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 406, 5, 13, 0, 0, 406, 411, 3, 58, 29, 0, 407, 408, 5, 34, 0, 0, 408, 410, 3, 58, 29, 0, 409, 407, 1, 0, 0, 0, 410, 413, 1, 0, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 57, 1, 0, 0, 0, 413, 411, 1, 0, 0, 0, 414, 415, 3, 38, 19, 0, 415, 416, 5, 72, 0, 0, 416, 417, 3, 38, 19, 0, 417, 59, 1, 0, 0, 0, 418, 419, 5, 1, 0, 0, 419, 420, 3, 18, 9, 0, 420, 422, 3, 78, 39, 0, 421, 423, 3, 66, 33, 0, 422, 421, 1, 0, 0, 0, 422, 423, 1, 0, 0, 0, 423, 61, 1, 0, 0, 0, 424, 425, 5, 7, 0, 0, 425, 426, 3, 18, 9, 0, 426, 427, 3, 78, 39, 0, 427, 63, 1, 0, 0, 0, 428, 429, 5, 11, 0, 0, 429, 430, 3, 38, 19, 0, 430, 65, 1, 0, 0, 0, 431, 436, 3, 68, 34, 0, 432, 433, 5, 34, 0, 0, 433, 435, 3, 68, 34, 0, 434, 432, 1, 0, 0, 0, 435, 438, 1, 0, 0, 0, 436, 434, 1, 0, 0, 0, 436, 437, 1, 0, 0, 0, 437, 67, 1, 0, 0, 0, 438, 436, 1, 0, 0, 0, 439, 440, 3, 42, 21, 0, 440, 441, 5, 33, 0, 0, 441, 442, 3, 44, 22, 0, 442, 69, 1, 0, 0, 0, 443, 444, 7, 6, 0, 0, 444, 71, 1, 0, 0, 0, 445, 448, 3, 74, 37, 0, 446, 448, 3, 76, 38, 0, 447, 445, 1, 0, 0, 0, 447, 446, 1, 0, 0, 0, 448, 73, 1, 0, 0, 0, 449, 451, 7, 0, 0, 0, 450, 449, 1, 0, 0, 0, 450, 451, 1, 0, 0, 0, 451, 452, 1, 0, 0, 0, 452, 453, 5, 29, 0, 0, 453, 75, 1, 0, 0, 0, 454, 456, 7, 0, 0, 0, 455, 454, 1, 0, 0, 0, 455, 456, 1, 0, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 5, 28, 0, 0, 458, 77, 1, 0, 0, 0, 459, 460, 5, 27, 0, 0, 460, 79, 1, 0, 0, 0, 461, 462, 7, 7, 0, 0, 462, 81, 1, 0, 0, 0, 463, 464, 5, 5, 0, 0, 464, 465, 3, 84, 42, 0, 465, 83, 1, 0, 0, 0, 466, 467, 5, 65, 0, 0, 467, 468, 3, 2, 1, 0, 468, 469, 5, 66, 0, 0, 469, 85, 1, 0, 0, 0, 470, 471, 5, 15, 0, 0, 471, 475, 5, 52, 0, 0, 472, 473, 5, 15, 0, 0, 473, 475, 5, 53, 0, 0, 474, 470, 1, 0, 0, 0, 474, 472, 1, 0, 0, 0, 475, 87, 1, 0, 0, 0, 476, 477, 5, 3, 0, 0, 477, 480, 3, 38, 19, 0, 478, 479, 5, 74, 0, 0, 479, 481, 3, 38, 19, 0, 480, 478, 1, 0, 0, 0, 480, 481, 1, 0, 0, 0, 481, 491, 1, 0, 0, 0, 482, 483, 5, 75, 0, 0, 483, 488, 3, 90, 45, 0, 484, 485, 5, 34, 0, 0, 485, 487, 3, 90, 45, 0, 486, 484, 1, 0, 0, 0, 487, 490, 1, 0, 0, 0, 488, 486, 1, 0, 0, 0, 488, 489, 1, 0, 0, 0, 489, 492, 1, 0, 0, 0, 490, 488, 1, 0, 0, 0, 491, 482, 1, 0, 0, 0, 491, 492, 1, 0, 0, 0, 492, 89, 1, 0, 0, 0, 493, 494, 3, 38, 19, 0, 494, 495, 5, 33, 0, 0, 495, 497, 1, 0, 0, 0, 496, 493, 1, 0, 0, 0, 496, 497, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 499, 3, 38, 19, 0, 499, 91, 1, 0, 0, 0, 51, 103, 110, 125, 137, 146, 154, 158, 166, 168, 173, 180, 185, 192, 198, 206, 208, 224, 227, 231, 241, 249, 257, 261, 270, 280, 284, 290, 297, 307, 327, 338, 349, 354, 365, 370, 374, 382, 391, 394, 402, 411, 422, 436, 447, 450, 455, 474, 480, 488, 491, 496]
\ No newline at end of file
+[4, 1, 81, 505, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 104, 8, 1, 10, 1, 12, 1, 107, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 113, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 128, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 140, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 147, 8, 5, 10, 5, 12, 5, 150, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 157, 8, 5, 1, 5, 1, 5, 3, 5, 161, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 169, 8, 5, 10, 5, 12, 5, 172, 9, 5, 1, 6, 1, 6, 3, 6, 176, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 183, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 188, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 195, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 201, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 209, 8, 8, 10, 8, 12, 8, 212, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 221, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 229, 8, 10, 10, 10, 12, 10, 232, 9, 10, 3, 10, 234, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 244, 8, 12, 10, 12, 12, 12, 247, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 254, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 260, 8, 14, 10, 14, 12, 14, 263, 9, 14, 1, 14, 3, 14, 266, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 273, 8, 15, 10, 15, 12, 15, 276, 9, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 3, 17, 285, 8, 17, 1, 17, 1, 17, 3, 17, 289, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 295, 8, 18, 1, 19, 1, 19, 1, 19, 5, 19, 300, 8, 19, 10, 19, 12, 19, 303, 9, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 5, 21, 310, 8, 21, 10, 21, 12, 21, 313, 9, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 330, 8, 23, 10, 23, 12, 23, 333, 9, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 341, 8, 23, 10, 23, 12, 23, 344, 9, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 352, 8, 23, 10, 23, 12, 23, 355, 9, 23, 1, 23, 1, 23, 3, 23, 359, 8, 23, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 368, 8, 25, 10, 25, 12, 25, 371, 9, 25, 1, 26, 1, 26, 3, 26, 375, 8, 26, 1, 26, 1, 26, 3, 26, 379, 8, 26, 1, 27, 1, 27, 1, 27, 1, 27, 5, 27, 385, 8, 27, 10, 27, 12, 27, 388, 9, 27, 1, 27, 1, 27, 1, 27, 1, 27, 5, 27, 394, 8, 27, 10, 27, 12, 27, 397, 9, 27, 3, 27, 399, 8, 27, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 405, 8, 28, 10, 28, 12, 28, 408, 9, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 414, 8, 29, 10, 29, 12, 29, 417, 9, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 3, 31, 427, 8, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 5, 34, 439, 8, 34, 10, 34, 12, 34, 442, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 37, 1, 37, 3, 37, 452, 8, 37, 1, 38, 3, 38, 455, 8, 38, 1, 38, 1, 38, 1, 39, 3, 39, 460, 8, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 479, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 3, 45, 485, 8, 45, 1, 45, 1, 45, 1, 45, 1, 45, 5, 45, 491, 8, 45, 10, 45, 12, 45, 494, 9, 45, 3, 45, 496, 8, 45, 1, 46, 1, 46, 1, 46, 3, 46, 501, 8, 46, 1, 46, 1, 46, 1, 46, 0, 3, 2, 10, 16, 47, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 0, 8, 1, 0, 60, 61, 1, 0, 62, 64, 1, 0, 76, 77, 1, 0, 67, 68, 2, 0, 32, 32, 35, 35, 1, 0, 38, 39, 2, 0, 37, 37, 51, 51, 1, 0, 54, 59, 535, 0, 94, 1, 0, 0, 0, 2, 97, 1, 0, 0, 0, 4, 112, 1, 0, 0, 0, 6, 127, 1, 0, 0, 0, 8, 129, 1, 0, 0, 0, 10, 160, 1, 0, 0, 0, 12, 187, 1, 0, 0, 0, 14, 194, 1, 0, 0, 0, 16, 200, 1, 0, 0, 0, 18, 220, 1, 0, 0, 0, 20, 222, 1, 0, 0, 0, 22, 237, 1, 0, 0, 0, 24, 240, 1, 0, 0, 0, 26, 253, 1, 0, 0, 0, 28, 255, 1, 0, 0, 0, 30, 267, 1, 0, 0, 0, 32, 279, 1, 0, 0, 0, 34, 282, 1, 0, 0, 0, 36, 290, 1, 0, 0, 0, 38, 296, 1, 0, 0, 0, 40, 304, 1, 0, 0, 0, 42, 306, 1, 0, 0, 0, 44, 314, 1, 0, 0, 0, 46, 358, 1, 0, 0, 0, 48, 360, 1, 0, 0, 0, 50, 363, 1, 0, 0, 0, 52, 372, 1, 0, 0, 0, 54, 398, 1, 0, 0, 0, 56, 400, 1, 0, 0, 0, 58, 409, 1, 0, 0, 0, 60, 418, 1, 0, 0, 0, 62, 422, 1, 0, 0, 0, 64, 428, 1, 0, 0, 0, 66, 432, 1, 0, 0, 0, 68, 435, 1, 0, 0, 0, 70, 443, 1, 0, 0, 0, 72, 447, 1, 0, 0, 0, 74, 451, 1, 0, 0, 0, 76, 454, 1, 0, 0, 0, 78, 459, 1, 0, 0, 0, 80, 463, 1, 0, 0, 0, 82, 465, 1, 0, 0, 0, 84, 467, 1, 0, 0, 0, 86, 470, 1, 0, 0, 0, 88, 478, 1, 0, 0, 0, 90, 480, 1, 0, 0, 0, 92, 500, 1, 0, 0, 0, 94, 95, 3, 2, 1, 0, 95, 96, 5, 0, 0, 1, 96, 1, 1, 0, 0, 0, 97, 98, 6, 1, -1, 0, 98, 99, 3, 4, 2, 0, 99, 105, 1, 0, 0, 0, 100, 101, 10, 1, 0, 0, 101, 102, 5, 26, 0, 0, 102, 104, 3, 6, 3, 0, 103, 100, 1, 0, 0, 0, 104, 107, 1, 0, 0, 0, 105, 103, 1, 0, 0, 0, 105, 106, 1, 0, 0, 0, 106, 3, 1, 0, 0, 0, 107, 105, 1, 0, 0, 0, 108, 113, 3, 84, 42, 0, 109, 113, 3, 28, 14, 0, 110, 113, 3, 22, 11, 0, 111, 113, 3, 88, 44, 0, 112, 108, 1, 0, 0, 0, 112, 109, 1, 0, 0, 0, 112, 110, 1, 0, 0, 0, 112, 111, 1, 0, 0, 0, 113, 5, 1, 0, 0, 0, 114, 128, 3, 32, 16, 0, 115, 128, 3, 36, 18, 0, 116, 128, 3, 48, 24, 0, 117, 128, 3, 54, 27, 0, 118, 128, 3, 50, 25, 0, 119, 128, 3, 34, 17, 0, 120, 128, 3, 8, 4, 0, 121, 128, 3, 56, 28, 0, 122, 128, 3, 58, 29, 0, 123, 128, 3, 62, 31, 0, 124, 128, 3, 64, 32, 0, 125, 128, 3, 90, 45, 0, 126, 128, 3, 66, 33, 0, 127, 114, 1, 0, 0, 0, 127, 115, 1, 0, 0, 0, 127, 116, 1, 0, 0, 0, 127, 117, 1, 0, 0, 0, 127, 118, 1, 0, 0, 0, 127, 119, 1, 0, 0, 0, 127, 120, 1, 0, 0, 0, 127, 121, 1, 0, 0, 0, 127, 122, 1, 0, 0, 0, 127, 123, 1, 0, 0, 0, 127, 124, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 126, 1, 0, 0, 0, 128, 7, 1, 0, 0, 0, 129, 130, 5, 18, 0, 0, 130, 131, 3, 10, 5, 0, 131, 9, 1, 0, 0, 0, 132, 133, 6, 5, -1, 0, 133, 134, 5, 44, 0, 0, 134, 161, 3, 10, 5, 7, 135, 161, 3, 14, 7, 0, 136, 161, 3, 12, 6, 0, 137, 139, 3, 14, 7, 0, 138, 140, 5, 44, 0, 0, 139, 138, 1, 0, 0, 0, 139, 140, 1, 0, 0, 0, 140, 141, 1, 0, 0, 0, 141, 142, 5, 41, 0, 0, 142, 143, 5, 40, 0, 0, 143, 148, 3, 14, 7, 0, 144, 145, 5, 34, 0, 0, 145, 147, 3, 14, 7, 0, 146, 144, 1, 0, 0, 0, 147, 150, 1, 0, 0, 0, 148, 146, 1, 0, 0, 0, 148, 149, 1, 0, 0, 0, 149, 151, 1, 0, 0, 0, 150, 148, 1, 0, 0, 0, 151, 152, 5, 50, 0, 0, 152, 161, 1, 0, 0, 0, 153, 154, 3, 14, 7, 0, 154, 156, 5, 42, 0, 0, 155, 157, 5, 44, 0, 0, 156, 155, 1, 0, 0, 0, 156, 157, 1, 0, 0, 0, 157, 158, 1, 0, 0, 0, 158, 159, 5, 45, 0, 0, 159, 161, 1, 0, 0, 0, 160, 132, 1, 0, 0, 0, 160, 135, 1, 0, 0, 0, 160, 136, 1, 0, 0, 0, 160, 137, 1, 0, 0, 0, 160, 153, 1, 0, 0, 0, 161, 170, 1, 0, 0, 0, 162, 163, 10, 4, 0, 0, 163, 164, 5, 31, 0, 0, 164, 169, 3, 10, 5, 5, 165, 166, 10, 3, 0, 0, 166, 167, 5, 47, 0, 0, 167, 169, 3, 10, 5, 4, 168, 162, 1, 0, 0, 0, 168, 165, 1, 0, 0, 0, 169, 172, 1, 0, 0, 0, 170, 168, 1, 0, 0, 0, 170, 171, 1, 0, 0, 0, 171, 11, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 175, 3, 14, 7, 0, 174, 176, 5, 44, 0, 0, 175, 174, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 178, 5, 43, 0, 0, 178, 179, 3, 80, 40, 0, 179, 188, 1, 0, 0, 0, 180, 182, 3, 14, 7, 0, 181, 183, 5, 44, 0, 0, 182, 181, 1, 0, 0, 0, 182, 183, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 185, 5, 49, 0, 0, 185, 186, 3, 80, 40, 0, 186, 188, 1, 0, 0, 0, 187, 173, 1, 0, 0, 0, 187, 180, 1, 0, 0, 0, 188, 13, 1, 0, 0, 0, 189, 195, 3, 16, 8, 0, 190, 191, 3, 16, 8, 0, 191, 192, 3, 82, 41, 0, 192, 193, 3, 16, 8, 0, 193, 195, 1, 0, 0, 0, 194, 189, 1, 0, 0, 0, 194, 190, 1, 0, 0, 0, 195, 15, 1, 0, 0, 0, 196, 197, 6, 8, -1, 0, 197, 201, 3, 18, 9, 0, 198, 199, 7, 0, 0, 0, 199, 201, 3, 16, 8, 3, 200, 196, 1, 0, 0, 0, 200, 198, 1, 0, 0, 0, 201, 210, 1, 0, 0, 0, 202, 203, 10, 2, 0, 0, 203, 204, 7, 1, 0, 0, 204, 209, 3, 16, 8, 3, 205, 206, 10, 1, 0, 0, 206, 207, 7, 0, 0, 0, 207, 209, 3, 16, 8, 2, 208, 202, 1, 0, 0, 0, 208, 205, 1, 0, 0, 0, 209, 212, 1, 0, 0, 0, 210, 208, 1, 0, 0, 0, 210, 211, 1, 0, 0, 0, 211, 17, 1, 0, 0, 0, 212, 210, 1, 0, 0, 0, 213, 221, 3, 46, 23, 0, 214, 221, 3, 42, 21, 0, 215, 221, 3, 20, 10, 0, 216, 217, 5, 40, 0, 0, 217, 218, 3, 10, 5, 0, 218, 219, 5, 50, 0, 0, 219, 221, 1, 0, 0, 0, 220, 213, 1, 0, 0, 0, 220, 214, 1, 0, 0, 0, 220, 215, 1, 0, 0, 0, 220, 216, 1, 0, 0, 0, 221, 19, 1, 0, 0, 0, 222, 223, 3, 44, 22, 0, 223, 233, 5, 40, 0, 0, 224, 234, 5, 62, 0, 0, 225, 230, 3, 10, 5, 0, 226, 227, 5, 34, 0, 0, 227, 229, 3, 10, 5, 0, 228, 226, 1, 0, 0, 0, 229, 232, 1, 0, 0, 0, 230, 228, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 234, 1, 0, 0, 0, 232, 230, 1, 0, 0, 0, 233, 224, 1, 0, 0, 0, 233, 225, 1, 0, 0, 0, 233, 234, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, 236, 5, 50, 0, 0, 236, 21, 1, 0, 0, 0, 237, 238, 5, 14, 0, 0, 238, 239, 3, 24, 12, 0, 239, 23, 1, 0, 0, 0, 240, 245, 3, 26, 13, 0, 241, 242, 5, 34, 0, 0, 242, 244, 3, 26, 13, 0, 243, 241, 1, 0, 0, 0, 244, 247, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 245, 246, 1, 0, 0, 0, 246, 25, 1, 0, 0, 0, 247, 245, 1, 0, 0, 0, 248, 254, 3, 10, 5, 0, 249, 250, 3, 42, 21, 0, 250, 251, 5, 33, 0, 0, 251, 252, 3, 10, 5, 0, 252, 254, 1, 0, 0, 0, 253, 248, 1, 0, 0, 0, 253, 249, 1, 0, 0, 0, 254, 27, 1, 0, 0, 0, 255, 256, 5, 6, 0, 0, 256, 261, 3, 40, 20, 0, 257, 258, 5, 34, 0, 0, 258, 260, 3, 40, 20, 0, 259, 257, 1, 0, 0, 0, 260, 263, 1, 0, 0, 0, 261, 259, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 265, 1, 0, 0, 0, 263, 261, 1, 0, 0, 0, 264, 266, 3, 30, 15, 0, 265, 264, 1, 0, 0, 0, 265, 266, 1, 0, 0, 0, 266, 29, 1, 0, 0, 0, 267, 268, 5, 65, 0, 0, 268, 269, 5, 73, 0, 0, 269, 274, 3, 40, 20, 0, 270, 271, 5, 34, 0, 0, 271, 273, 3, 40, 20, 0, 272, 270, 1, 0, 0, 0, 273, 276, 1, 0, 0, 0, 274, 272, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 277, 1, 0, 0, 0, 276, 274, 1, 0, 0, 0, 277, 278, 5, 66, 0, 0, 278, 31, 1, 0, 0, 0, 279, 280, 5, 4, 0, 0, 280, 281, 3, 24, 12, 0, 281, 33, 1, 0, 0, 0, 282, 284, 5, 17, 0, 0, 283, 285, 3, 24, 12, 0, 284, 283, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 288, 1, 0, 0, 0, 286, 287, 5, 30, 0, 0, 287, 289, 3, 38, 19, 0, 288, 286, 1, 0, 0, 0, 288, 289, 1, 0, 0, 0, 289, 35, 1, 0, 0, 0, 290, 291, 5, 8, 0, 0, 291, 294, 3, 24, 12, 0, 292, 293, 5, 30, 0, 0, 293, 295, 3, 38, 19, 0, 294, 292, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 37, 1, 0, 0, 0, 296, 301, 3, 42, 21, 0, 297, 298, 5, 34, 0, 0, 298, 300, 3, 42, 21, 0, 299, 297, 1, 0, 0, 0, 300, 303, 1, 0, 0, 0, 301, 299, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 39, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0, 304, 305, 7, 2, 0, 0, 305, 41, 1, 0, 0, 0, 306, 311, 3, 44, 22, 0, 307, 308, 5, 36, 0, 0, 308, 310, 3, 44, 22, 0, 309, 307, 1, 0, 0, 0, 310, 313, 1, 0, 0, 0, 311, 309, 1, 0, 0, 0, 311, 312, 1, 0, 0, 0, 312, 43, 1, 0, 0, 0, 313, 311, 1, 0, 0, 0, 314, 315, 7, 3, 0, 0, 315, 45, 1, 0, 0, 0, 316, 359, 5, 45, 0, 0, 317, 318, 3, 78, 39, 0, 318, 319, 5, 67, 0, 0, 319, 359, 1, 0, 0, 0, 320, 359, 3, 76, 38, 0, 321, 359, 3, 78, 39, 0, 322, 359, 3, 72, 36, 0, 323, 359, 5, 48, 0, 0, 324, 359, 3, 80, 40, 0, 325, 326, 5, 65, 0, 0, 326, 331, 3, 74, 37, 0, 327, 328, 5, 34, 0, 0, 328, 330, 3, 74, 37, 0, 329, 327, 1, 0, 0, 0, 330, 333, 1, 0, 0, 0, 331, 329, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 334, 1, 0, 0, 0, 333, 331, 1, 0, 0, 0, 334, 335, 5, 66, 0, 0, 335, 359, 1, 0, 0, 0, 336, 337, 5, 65, 0, 0, 337, 342, 3, 72, 36, 0, 338, 339, 5, 34, 0, 0, 339, 341, 3, 72, 36, 0, 340, 338, 1, 0, 0, 0, 341, 344, 1, 0, 0, 0, 342, 340, 1, 0, 0, 0, 342, 343, 1, 0, 0, 0, 343, 345, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 345, 346, 5, 66, 0, 0, 346, 359, 1, 0, 0, 0, 347, 348, 5, 65, 0, 0, 348, 353, 3, 80, 40, 0, 349, 350, 5, 34, 0, 0, 350, 352, 3, 80, 40, 0, 351, 349, 1, 0, 0, 0, 352, 355, 1, 0, 0, 0, 353, 351, 1, 0, 0, 0, 353, 354, 1, 0, 0, 0, 354, 356, 1, 0, 0, 0, 355, 353, 1, 0, 0, 0, 356, 357, 5, 66, 0, 0, 357, 359, 1, 0, 0, 0, 358, 316, 1, 0, 0, 0, 358, 317, 1, 0, 0, 0, 358, 320, 1, 0, 0, 0, 358, 321, 1, 0, 0, 0, 358, 322, 1, 0, 0, 0, 358, 323, 1, 0, 0, 0, 358, 324, 1, 0, 0, 0, 358, 325, 1, 0, 0, 0, 358, 336, 1, 0, 0, 0, 358, 347, 1, 0, 0, 0, 359, 47, 1, 0, 0, 0, 360, 361, 5, 10, 0, 0, 361, 362, 5, 28, 0, 0, 362, 49, 1, 0, 0, 0, 363, 364, 5, 16, 0, 0, 364, 369, 3, 52, 26, 0, 365, 366, 5, 34, 0, 0, 366, 368, 3, 52, 26, 0, 367, 365, 1, 0, 0, 0, 368, 371, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 51, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 372, 374, 3, 10, 5, 0, 373, 375, 7, 4, 0, 0, 374, 373, 1, 0, 0, 0, 374, 375, 1, 0, 0, 0, 375, 378, 1, 0, 0, 0, 376, 377, 5, 46, 0, 0, 377, 379, 7, 5, 0, 0, 378, 376, 1, 0, 0, 0, 378, 379, 1, 0, 0, 0, 379, 53, 1, 0, 0, 0, 380, 381, 5, 9, 0, 0, 381, 386, 3, 40, 20, 0, 382, 383, 5, 34, 0, 0, 383, 385, 3, 40, 20, 0, 384, 382, 1, 0, 0, 0, 385, 388, 1, 0, 0, 0, 386, 384, 1, 0, 0, 0, 386, 387, 1, 0, 0, 0, 387, 399, 1, 0, 0, 0, 388, 386, 1, 0, 0, 0, 389, 390, 5, 12, 0, 0, 390, 395, 3, 40, 20, 0, 391, 392, 5, 34, 0, 0, 392, 394, 3, 40, 20, 0, 393, 391, 1, 0, 0, 0, 394, 397, 1, 0, 0, 0, 395, 393, 1, 0, 0, 0, 395, 396, 1, 0, 0, 0, 396, 399, 1, 0, 0, 0, 397, 395, 1, 0, 0, 0, 398, 380, 1, 0, 0, 0, 398, 389, 1, 0, 0, 0, 399, 55, 1, 0, 0, 0, 400, 401, 5, 2, 0, 0, 401, 406, 3, 40, 20, 0, 402, 403, 5, 34, 0, 0, 403, 405, 3, 40, 20, 0, 404, 402, 1, 0, 0, 0, 405, 408, 1, 0, 0, 0, 406, 404, 1, 0, 0, 0, 406, 407, 1, 0, 0, 0, 407, 57, 1, 0, 0, 0, 408, 406, 1, 0, 0, 0, 409, 410, 5, 13, 0, 0, 410, 415, 3, 60, 30, 0, 411, 412, 5, 34, 0, 0, 412, 414, 3, 60, 30, 0, 413, 411, 1, 0, 0, 0, 414, 417, 1, 0, 0, 0, 415, 413, 1, 0, 0, 0, 415, 416, 1, 0, 0, 0, 416, 59, 1, 0, 0, 0, 417, 415, 1, 0, 0, 0, 418, 419, 3, 40, 20, 0, 419, 420, 5, 72, 0, 0, 420, 421, 3, 40, 20, 0, 421, 61, 1, 0, 0, 0, 422, 423, 5, 1, 0, 0, 423, 424, 3, 18, 9, 0, 424, 426, 3, 80, 40, 0, 425, 427, 3, 68, 34, 0, 426, 425, 1, 0, 0, 0, 426, 427, 1, 0, 0, 0, 427, 63, 1, 0, 0, 0, 428, 429, 5, 7, 0, 0, 429, 430, 3, 18, 9, 0, 430, 431, 3, 80, 40, 0, 431, 65, 1, 0, 0, 0, 432, 433, 5, 11, 0, 0, 433, 434, 3, 40, 20, 0, 434, 67, 1, 0, 0, 0, 435, 440, 3, 70, 35, 0, 436, 437, 5, 34, 0, 0, 437, 439, 3, 70, 35, 0, 438, 436, 1, 0, 0, 0, 439, 442, 1, 0, 0, 0, 440, 438, 1, 0, 0, 0, 440, 441, 1, 0, 0, 0, 441, 69, 1, 0, 0, 0, 442, 440, 1, 0, 0, 0, 443, 444, 3, 44, 22, 0, 444, 445, 5, 33, 0, 0, 445, 446, 3, 46, 23, 0, 446, 71, 1, 0, 0, 0, 447, 448, 7, 6, 0, 0, 448, 73, 1, 0, 0, 0, 449, 452, 3, 76, 38, 0, 450, 452, 3, 78, 39, 0, 451, 449, 1, 0, 0, 0, 451, 450, 1, 0, 0, 0, 452, 75, 1, 0, 0, 0, 453, 455, 7, 0, 0, 0, 454, 453, 1, 0, 0, 0, 454, 455, 1, 0, 0, 0, 455, 456, 1, 0, 0, 0, 456, 457, 5, 29, 0, 0, 457, 77, 1, 0, 0, 0, 458, 460, 7, 0, 0, 0, 459, 458, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 461, 1, 0, 0, 0, 461, 462, 5, 28, 0, 0, 462, 79, 1, 0, 0, 0, 463, 464, 5, 27, 0, 0, 464, 81, 1, 0, 0, 0, 465, 466, 7, 7, 0, 0, 466, 83, 1, 0, 0, 0, 467, 468, 5, 5, 0, 0, 468, 469, 3, 86, 43, 0, 469, 85, 1, 0, 0, 0, 470, 471, 5, 65, 0, 0, 471, 472, 3, 2, 1, 0, 472, 473, 5, 66, 0, 0, 473, 87, 1, 0, 0, 0, 474, 475, 5, 15, 0, 0, 475, 479, 5, 52, 0, 0, 476, 477, 5, 15, 0, 0, 477, 479, 5, 53, 0, 0, 478, 474, 1, 0, 0, 0, 478, 476, 1, 0, 0, 0, 479, 89, 1, 0, 0, 0, 480, 481, 5, 3, 0, 0, 481, 484, 3, 40, 20, 0, 482, 483, 5, 74, 0, 0, 483, 485, 3, 40, 20, 0, 484, 482, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 495, 1, 0, 0, 0, 486, 487, 5, 75, 0, 0, 487, 492, 3, 92, 46, 0, 488, 489, 5, 34, 0, 0, 489, 491, 3, 92, 46, 0, 490, 488, 1, 0, 0, 0, 491, 494, 1, 0, 0, 0, 492, 490, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 496, 1, 0, 0, 0, 494, 492, 1, 0, 0, 0, 495, 486, 1, 0, 0, 0, 495, 496, 1, 0, 0, 0, 496, 91, 1, 0, 0, 0, 497, 498, 3, 40, 20, 0, 498, 499, 5, 33, 0, 0, 499, 501, 1, 0, 0, 0, 500, 497, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 3, 40, 20, 0, 503, 93, 1, 0, 0, 0, 51, 105, 112, 127, 139, 148, 156, 160, 168, 170, 175, 182, 187, 194, 200, 208, 210, 220, 230, 233, 245, 253, 261, 265, 274, 284, 288, 294, 301, 311, 331, 342, 353, 358, 369, 374, 378, 386, 395, 398, 406, 415, 426, 440, 451, 454, 459, 478, 484, 492, 495, 500]
\ No newline at end of file
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java
index 79cca599aabac..49d9abcc087c7 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java
@@ -34,28 +34,29 @@ public class EsqlBaseParser extends Parser {
RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3,
RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6,
RULE_valueExpression = 7, RULE_operatorExpression = 8, RULE_primaryExpression = 9,
- RULE_rowCommand = 10, RULE_fields = 11, RULE_field = 12, RULE_fromCommand = 13,
- RULE_metadata = 14, RULE_evalCommand = 15, RULE_statsCommand = 16, RULE_inlinestatsCommand = 17,
- RULE_grouping = 18, RULE_sourceIdentifier = 19, RULE_qualifiedName = 20,
- RULE_identifier = 21, RULE_constant = 22, RULE_limitCommand = 23, RULE_sortCommand = 24,
- RULE_orderExpression = 25, RULE_keepCommand = 26, RULE_dropCommand = 27,
- RULE_renameCommand = 28, RULE_renameClause = 29, RULE_dissectCommand = 30,
- RULE_grokCommand = 31, RULE_mvExpandCommand = 32, RULE_commandOptions = 33,
- RULE_commandOption = 34, RULE_booleanValue = 35, RULE_numericValue = 36,
- RULE_decimalValue = 37, RULE_integerValue = 38, RULE_string = 39, RULE_comparisonOperator = 40,
- RULE_explainCommand = 41, RULE_subqueryExpression = 42, RULE_showCommand = 43,
- RULE_enrichCommand = 44, RULE_enrichWithClause = 45;
+ RULE_functionExpression = 10, RULE_rowCommand = 11, RULE_fields = 12,
+ RULE_field = 13, RULE_fromCommand = 14, RULE_metadata = 15, RULE_evalCommand = 16,
+ RULE_statsCommand = 17, RULE_inlinestatsCommand = 18, RULE_grouping = 19,
+ RULE_sourceIdentifier = 20, RULE_qualifiedName = 21, RULE_identifier = 22,
+ RULE_constant = 23, RULE_limitCommand = 24, RULE_sortCommand = 25, RULE_orderExpression = 26,
+ RULE_keepCommand = 27, RULE_dropCommand = 28, RULE_renameCommand = 29,
+ RULE_renameClause = 30, RULE_dissectCommand = 31, RULE_grokCommand = 32,
+ RULE_mvExpandCommand = 33, RULE_commandOptions = 34, RULE_commandOption = 35,
+ RULE_booleanValue = 36, RULE_numericValue = 37, RULE_decimalValue = 38,
+ RULE_integerValue = 39, RULE_string = 40, RULE_comparisonOperator = 41,
+ RULE_explainCommand = 42, RULE_subqueryExpression = 43, RULE_showCommand = 44,
+ RULE_enrichCommand = 45, RULE_enrichWithClause = 46;
private static String[] makeRuleNames() {
return new String[] {
"singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand",
"booleanExpression", "regexBooleanExpression", "valueExpression", "operatorExpression",
- "primaryExpression", "rowCommand", "fields", "field", "fromCommand",
- "metadata", "evalCommand", "statsCommand", "inlinestatsCommand", "grouping",
- "sourceIdentifier", "qualifiedName", "identifier", "constant", "limitCommand",
- "sortCommand", "orderExpression", "keepCommand", "dropCommand", "renameCommand",
- "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", "commandOptions",
- "commandOption", "booleanValue", "numericValue", "decimalValue", "integerValue",
- "string", "comparisonOperator", "explainCommand", "subqueryExpression",
+ "primaryExpression", "functionExpression", "rowCommand", "fields", "field",
+ "fromCommand", "metadata", "evalCommand", "statsCommand", "inlinestatsCommand",
+ "grouping", "sourceIdentifier", "qualifiedName", "identifier", "constant",
+ "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand",
+ "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand",
+ "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue",
+ "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression",
"showCommand", "enrichCommand", "enrichWithClause"
};
}
@@ -138,7 +139,7 @@ public Vocabulary getVocabulary() {
@Override
public ATN getATN() { return _ATN; }
- @SuppressWarnings("this-escape") public EsqlBaseParser(TokenStream input) {
+ public EsqlBaseParser(TokenStream input) {
super(input);
_interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
@@ -174,9 +175,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio
try {
enterOuterAlt(_localctx, 1);
{
- setState(92);
+ setState(94);
query(0);
- setState(93);
+ setState(95);
match(EOF);
}
}
@@ -212,7 +213,7 @@ public QueryContext query() {
public ProcessingCommandContext processingCommand() {
return getRuleContext(ProcessingCommandContext.class,0);
}
- @SuppressWarnings("this-escape") public CompositeQueryContext(QueryContext ctx) { copyFrom(ctx); }
+ public CompositeQueryContext(QueryContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterCompositeQuery(this);
@@ -232,7 +233,7 @@ public static class SingleCommandQueryContext extends QueryContext {
public SourceCommandContext sourceCommand() {
return getRuleContext(SourceCommandContext.class,0);
}
- @SuppressWarnings("this-escape") public SingleCommandQueryContext(QueryContext ctx) { copyFrom(ctx); }
+ public SingleCommandQueryContext(QueryContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterSingleCommandQuery(this);
@@ -268,11 +269,11 @@ private QueryContext query(int _p) throws RecognitionException {
_ctx = _localctx;
_prevctx = _localctx;
- setState(96);
+ setState(98);
sourceCommand();
}
_ctx.stop = _input.LT(-1);
- setState(103);
+ setState(105);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,0,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
@@ -283,16 +284,16 @@ private QueryContext query(int _p) throws RecognitionException {
{
_localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState));
pushNewRecursionContext(_localctx, _startState, RULE_query);
- setState(98);
+ setState(100);
if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)");
- setState(99);
+ setState(101);
match(PIPE);
- setState(100);
+ setState(102);
processingCommand();
}
}
}
- setState(105);
+ setState(107);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,0,_ctx);
}
@@ -346,34 +347,34 @@ public final SourceCommandContext sourceCommand() throws RecognitionException {
SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState());
enterRule(_localctx, 4, RULE_sourceCommand);
try {
- setState(110);
+ setState(112);
_errHandler.sync(this);
switch (_input.LA(1)) {
case EXPLAIN:
enterOuterAlt(_localctx, 1);
{
- setState(106);
+ setState(108);
explainCommand();
}
break;
case FROM:
enterOuterAlt(_localctx, 2);
{
- setState(107);
+ setState(109);
fromCommand();
}
break;
case ROW:
enterOuterAlt(_localctx, 3);
{
- setState(108);
+ setState(110);
rowCommand();
}
break;
case SHOW:
enterOuterAlt(_localctx, 4);
{
- setState(109);
+ setState(111);
showCommand();
}
break;
@@ -456,27 +457,27 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce
ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState());
enterRule(_localctx, 6, RULE_processingCommand);
try {
- setState(125);
+ setState(127);
_errHandler.sync(this);
switch (_input.LA(1)) {
case EVAL:
enterOuterAlt(_localctx, 1);
{
- setState(112);
+ setState(114);
evalCommand();
}
break;
case INLINESTATS:
enterOuterAlt(_localctx, 2);
{
- setState(113);
+ setState(115);
inlinestatsCommand();
}
break;
case LIMIT:
enterOuterAlt(_localctx, 3);
{
- setState(114);
+ setState(116);
limitCommand();
}
break;
@@ -484,70 +485,70 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce
case PROJECT:
enterOuterAlt(_localctx, 4);
{
- setState(115);
+ setState(117);
keepCommand();
}
break;
case SORT:
enterOuterAlt(_localctx, 5);
{
- setState(116);
+ setState(118);
sortCommand();
}
break;
case STATS:
enterOuterAlt(_localctx, 6);
{
- setState(117);
+ setState(119);
statsCommand();
}
break;
case WHERE:
enterOuterAlt(_localctx, 7);
{
- setState(118);
+ setState(120);
whereCommand();
}
break;
case DROP:
enterOuterAlt(_localctx, 8);
{
- setState(119);
+ setState(121);
dropCommand();
}
break;
case RENAME:
enterOuterAlt(_localctx, 9);
{
- setState(120);
+ setState(122);
renameCommand();
}
break;
case DISSECT:
enterOuterAlt(_localctx, 10);
{
- setState(121);
+ setState(123);
dissectCommand();
}
break;
case GROK:
enterOuterAlt(_localctx, 11);
{
- setState(122);
+ setState(124);
grokCommand();
}
break;
case ENRICH:
enterOuterAlt(_localctx, 12);
{
- setState(123);
+ setState(125);
enrichCommand();
}
break;
case MV_EXPAND:
enterOuterAlt(_localctx, 13);
{
- setState(124);
+ setState(126);
mvExpandCommand();
}
break;
@@ -597,9 +598,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException {
try {
enterOuterAlt(_localctx, 1);
{
- setState(127);
+ setState(129);
match(WHERE);
- setState(128);
+ setState(130);
booleanExpression(0);
}
}
@@ -632,7 +633,7 @@ public static class LogicalNotContext extends BooleanExpressionContext {
public BooleanExpressionContext booleanExpression() {
return getRuleContext(BooleanExpressionContext.class,0);
}
- @SuppressWarnings("this-escape") public LogicalNotContext(BooleanExpressionContext ctx) { copyFrom(ctx); }
+ public LogicalNotContext(BooleanExpressionContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterLogicalNot(this);
@@ -652,7 +653,7 @@ public static class BooleanDefaultContext extends BooleanExpressionContext {
public ValueExpressionContext valueExpression() {
return getRuleContext(ValueExpressionContext.class,0);
}
- @SuppressWarnings("this-escape") public BooleanDefaultContext(BooleanExpressionContext ctx) { copyFrom(ctx); }
+ public BooleanDefaultContext(BooleanExpressionContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterBooleanDefault(this);
@@ -675,7 +676,7 @@ public ValueExpressionContext valueExpression() {
public TerminalNode IS() { return getToken(EsqlBaseParser.IS, 0); }
public TerminalNode NULL() { return getToken(EsqlBaseParser.NULL, 0); }
public TerminalNode NOT() { return getToken(EsqlBaseParser.NOT, 0); }
- @SuppressWarnings("this-escape") public IsNullContext(BooleanExpressionContext ctx) { copyFrom(ctx); }
+ public IsNullContext(BooleanExpressionContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterIsNull(this);
@@ -695,7 +696,7 @@ public static class RegexExpressionContext extends BooleanExpressionContext {
public RegexBooleanExpressionContext regexBooleanExpression() {
return getRuleContext(RegexBooleanExpressionContext.class,0);
}
- @SuppressWarnings("this-escape") public RegexExpressionContext(BooleanExpressionContext ctx) { copyFrom(ctx); }
+ public RegexExpressionContext(BooleanExpressionContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterRegexExpression(this);
@@ -726,7 +727,7 @@ public ValueExpressionContext valueExpression(int i) {
public TerminalNode COMMA(int i) {
return getToken(EsqlBaseParser.COMMA, i);
}
- @SuppressWarnings("this-escape") public LogicalInContext(BooleanExpressionContext ctx) { copyFrom(ctx); }
+ public LogicalInContext(BooleanExpressionContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterLogicalIn(this);
@@ -754,7 +755,7 @@ public BooleanExpressionContext booleanExpression(int i) {
}
public TerminalNode AND() { return getToken(EsqlBaseParser.AND, 0); }
public TerminalNode OR() { return getToken(EsqlBaseParser.OR, 0); }
- @SuppressWarnings("this-escape") public LogicalBinaryContext(BooleanExpressionContext ctx) { copyFrom(ctx); }
+ public LogicalBinaryContext(BooleanExpressionContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterLogicalBinary(this);
@@ -786,7 +787,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(158);
+ setState(160);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) {
case 1:
@@ -795,9 +796,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc
_ctx = _localctx;
_prevctx = _localctx;
- setState(131);
+ setState(133);
match(NOT);
- setState(132);
+ setState(134);
booleanExpression(7);
}
break;
@@ -806,7 +807,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc
_localctx = new BooleanDefaultContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(133);
+ setState(135);
valueExpression();
}
break;
@@ -815,7 +816,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc
_localctx = new RegexExpressionContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(134);
+ setState(136);
regexBooleanExpression();
}
break;
@@ -824,41 +825,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc
_localctx = new LogicalInContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(135);
- valueExpression();
setState(137);
+ valueExpression();
+ setState(139);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==NOT) {
{
- setState(136);
+ setState(138);
match(NOT);
}
}
- setState(139);
+ setState(141);
match(IN);
- setState(140);
+ setState(142);
match(LP);
- setState(141);
+ setState(143);
valueExpression();
- setState(146);
+ setState(148);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==COMMA) {
{
{
- setState(142);
+ setState(144);
match(COMMA);
- setState(143);
+ setState(145);
valueExpression();
}
}
- setState(148);
+ setState(150);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(149);
+ setState(151);
match(RP);
}
break;
@@ -867,27 +868,27 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc
_localctx = new IsNullContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(151);
+ setState(153);
valueExpression();
- setState(152);
- match(IS);
setState(154);
+ match(IS);
+ setState(156);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==NOT) {
{
- setState(153);
+ setState(155);
match(NOT);
}
}
- setState(156);
+ setState(158);
match(NULL);
}
break;
}
_ctx.stop = _input.LT(-1);
- setState(168);
+ setState(170);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,8,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
@@ -895,7 +896,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc
if ( _parseListeners!=null ) triggerExitRuleEvent();
_prevctx = _localctx;
{
- setState(166);
+ setState(168);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) {
case 1:
@@ -903,11 +904,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc
_localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState));
((LogicalBinaryContext)_localctx).left = _prevctx;
pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression);
- setState(160);
+ setState(162);
if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)");
- setState(161);
+ setState(163);
((LogicalBinaryContext)_localctx).operator = match(AND);
- setState(162);
+ setState(164);
((LogicalBinaryContext)_localctx).right = booleanExpression(5);
}
break;
@@ -916,18 +917,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc
_localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState));
((LogicalBinaryContext)_localctx).left = _prevctx;
pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression);
- setState(163);
+ setState(165);
if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)");
- setState(164);
+ setState(166);
((LogicalBinaryContext)_localctx).operator = match(OR);
- setState(165);
+ setState(167);
((LogicalBinaryContext)_localctx).right = booleanExpression(4);
}
break;
}
}
}
- setState(170);
+ setState(172);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,8,_ctx);
}
@@ -981,48 +982,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog
enterRule(_localctx, 12, RULE_regexBooleanExpression);
int _la;
try {
- setState(185);
+ setState(187);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
- setState(171);
- valueExpression();
setState(173);
+ valueExpression();
+ setState(175);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==NOT) {
{
- setState(172);
+ setState(174);
match(NOT);
}
}
- setState(175);
+ setState(177);
((RegexBooleanExpressionContext)_localctx).kind = match(LIKE);
- setState(176);
+ setState(178);
((RegexBooleanExpressionContext)_localctx).pattern = string();
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
- setState(178);
- valueExpression();
setState(180);
+ valueExpression();
+ setState(182);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==NOT) {
{
- setState(179);
+ setState(181);
match(NOT);
}
}
- setState(182);
+ setState(184);
((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE);
- setState(183);
+ setState(185);
((RegexBooleanExpressionContext)_localctx).pattern = string();
}
break;
@@ -1056,7 +1057,7 @@ public static class ValueExpressionDefaultContext extends ValueExpressionContext
public OperatorExpressionContext operatorExpression() {
return getRuleContext(OperatorExpressionContext.class,0);
}
- @SuppressWarnings("this-escape") public ValueExpressionDefaultContext(ValueExpressionContext ctx) { copyFrom(ctx); }
+ public ValueExpressionDefaultContext(ValueExpressionContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterValueExpressionDefault(this);
@@ -1084,7 +1085,7 @@ public List operatorExpression() {
public OperatorExpressionContext operatorExpression(int i) {
return getRuleContext(OperatorExpressionContext.class,i);
}
- @SuppressWarnings("this-escape") public ComparisonContext(ValueExpressionContext ctx) { copyFrom(ctx); }
+ public ComparisonContext(ValueExpressionContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterComparison(this);
@@ -1104,14 +1105,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio
ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState());
enterRule(_localctx, 14, RULE_valueExpression);
try {
- setState(192);
+ setState(194);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) {
case 1:
_localctx = new ValueExpressionDefaultContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(187);
+ setState(189);
operatorExpression(0);
}
break;
@@ -1119,11 +1120,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio
_localctx = new ComparisonContext(_localctx);
enterOuterAlt(_localctx, 2);
{
- setState(188);
+ setState(190);
((ComparisonContext)_localctx).left = operatorExpression(0);
- setState(189);
+ setState(191);
comparisonOperator();
- setState(190);
+ setState(192);
((ComparisonContext)_localctx).right = operatorExpression(0);
}
break;
@@ -1157,7 +1158,7 @@ public static class OperatorExpressionDefaultContext extends OperatorExpressionC
public PrimaryExpressionContext primaryExpression() {
return getRuleContext(PrimaryExpressionContext.class,0);
}
- @SuppressWarnings("this-escape") public OperatorExpressionDefaultContext(OperatorExpressionContext ctx) { copyFrom(ctx); }
+ public OperatorExpressionDefaultContext(OperatorExpressionContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterOperatorExpressionDefault(this);
@@ -1188,7 +1189,7 @@ public OperatorExpressionContext operatorExpression(int i) {
public TerminalNode PERCENT() { return getToken(EsqlBaseParser.PERCENT, 0); }
public TerminalNode PLUS() { return getToken(EsqlBaseParser.PLUS, 0); }
public TerminalNode MINUS() { return getToken(EsqlBaseParser.MINUS, 0); }
- @SuppressWarnings("this-escape") public ArithmeticBinaryContext(OperatorExpressionContext ctx) { copyFrom(ctx); }
+ public ArithmeticBinaryContext(OperatorExpressionContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterArithmeticBinary(this);
@@ -1211,7 +1212,7 @@ public OperatorExpressionContext operatorExpression() {
}
public TerminalNode MINUS() { return getToken(EsqlBaseParser.MINUS, 0); }
public TerminalNode PLUS() { return getToken(EsqlBaseParser.PLUS, 0); }
- @SuppressWarnings("this-escape") public ArithmeticUnaryContext(OperatorExpressionContext ctx) { copyFrom(ctx); }
+ public ArithmeticUnaryContext(OperatorExpressionContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterArithmeticUnary(this);
@@ -1243,7 +1244,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(198);
+ setState(200);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) {
case 1:
@@ -1252,7 +1253,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE
_ctx = _localctx;
_prevctx = _localctx;
- setState(195);
+ setState(197);
primaryExpression();
}
break;
@@ -1261,7 +1262,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE
_localctx = new ArithmeticUnaryContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(196);
+ setState(198);
((ArithmeticUnaryContext)_localctx).operator = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==PLUS || _la==MINUS) ) {
@@ -1272,13 +1273,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE
_errHandler.reportMatch(this);
consume();
}
- setState(197);
+ setState(199);
operatorExpression(3);
}
break;
}
_ctx.stop = _input.LT(-1);
- setState(208);
+ setState(210);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,15,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
@@ -1286,7 +1287,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE
if ( _parseListeners!=null ) triggerExitRuleEvent();
_prevctx = _localctx;
{
- setState(206);
+ setState(208);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) {
case 1:
@@ -1294,9 +1295,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE
_localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState));
((ArithmeticBinaryContext)_localctx).left = _prevctx;
pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression);
- setState(200);
+ setState(202);
if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)");
- setState(201);
+ setState(203);
((ArithmeticBinaryContext)_localctx).operator = _input.LT(1);
_la = _input.LA(1);
if ( !((((_la - 62)) & ~0x3f) == 0 && ((1L << (_la - 62)) & 7L) != 0) ) {
@@ -1307,7 +1308,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE
_errHandler.reportMatch(this);
consume();
}
- setState(202);
+ setState(204);
((ArithmeticBinaryContext)_localctx).right = operatorExpression(3);
}
break;
@@ -1316,9 +1317,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE
_localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState));
((ArithmeticBinaryContext)_localctx).left = _prevctx;
pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression);
- setState(203);
+ setState(205);
if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)");
- setState(204);
+ setState(206);
((ArithmeticBinaryContext)_localctx).operator = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==PLUS || _la==MINUS) ) {
@@ -1329,14 +1330,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE
_errHandler.reportMatch(this);
consume();
}
- setState(205);
+ setState(207);
((ArithmeticBinaryContext)_localctx).right = operatorExpression(2);
}
break;
}
}
}
- setState(210);
+ setState(212);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,15,_ctx);
}
@@ -1370,7 +1371,7 @@ public static class DereferenceContext extends PrimaryExpressionContext {
public QualifiedNameContext qualifiedName() {
return getRuleContext(QualifiedNameContext.class,0);
}
- @SuppressWarnings("this-escape") public DereferenceContext(PrimaryExpressionContext ctx) { copyFrom(ctx); }
+ public DereferenceContext(PrimaryExpressionContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterDereference(this);
@@ -1390,7 +1391,7 @@ public static class ConstantDefaultContext extends PrimaryExpressionContext {
public ConstantContext constant() {
return getRuleContext(ConstantContext.class,0);
}
- @SuppressWarnings("this-escape") public ConstantDefaultContext(PrimaryExpressionContext ctx) { copyFrom(ctx); }
+ public ConstantDefaultContext(PrimaryExpressionContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterConstantDefault(this);
@@ -1412,7 +1413,7 @@ public BooleanExpressionContext booleanExpression() {
return getRuleContext(BooleanExpressionContext.class,0);
}
public TerminalNode RP() { return getToken(EsqlBaseParser.RP, 0); }
- @SuppressWarnings("this-escape") public ParenthesizedExpressionContext(PrimaryExpressionContext ctx) { copyFrom(ctx); }
+ public ParenthesizedExpressionContext(PrimaryExpressionContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterParenthesizedExpression(this);
@@ -1428,34 +1429,22 @@ public T accept(ParseTreeVisitor extends T> visitor) {
}
}
@SuppressWarnings("CheckReturnValue")
- public static class FunctionExpressionContext extends PrimaryExpressionContext {
- public IdentifierContext identifier() {
- return getRuleContext(IdentifierContext.class,0);
- }
- public TerminalNode LP() { return getToken(EsqlBaseParser.LP, 0); }
- public TerminalNode RP() { return getToken(EsqlBaseParser.RP, 0); }
- public List booleanExpression() {
- return getRuleContexts(BooleanExpressionContext.class);
+ public static class FunctionContext extends PrimaryExpressionContext {
+ public FunctionExpressionContext functionExpression() {
+ return getRuleContext(FunctionExpressionContext.class,0);
}
- public BooleanExpressionContext booleanExpression(int i) {
- return getRuleContext(BooleanExpressionContext.class,i);
- }
- public List COMMA() { return getTokens(EsqlBaseParser.COMMA); }
- public TerminalNode COMMA(int i) {
- return getToken(EsqlBaseParser.COMMA, i);
- }
- @SuppressWarnings("this-escape") public FunctionExpressionContext(PrimaryExpressionContext ctx) { copyFrom(ctx); }
+ public FunctionContext(PrimaryExpressionContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterFunctionExpression(this);
+ if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterFunction(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitFunctionExpression(this);
+ if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitFunction(this);
}
@Override
public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor extends T>)visitor).visitFunctionExpression(this);
+ if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor extends T>)visitor).visitFunction(this);
else return visitor.visitChildren(this);
}
}
@@ -1463,16 +1452,15 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final PrimaryExpressionContext primaryExpression() throws RecognitionException {
PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, getState());
enterRule(_localctx, 18, RULE_primaryExpression);
- int _la;
try {
- setState(231);
+ setState(220);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,18,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) {
case 1:
_localctx = new ConstantDefaultContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(211);
+ setState(213);
constant();
}
break;
@@ -1480,60 +1468,144 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce
_localctx = new DereferenceContext(_localctx);
enterOuterAlt(_localctx, 2);
{
- setState(212);
+ setState(214);
qualifiedName();
}
break;
case 3:
- _localctx = new ParenthesizedExpressionContext(_localctx);
+ _localctx = new FunctionContext(_localctx);
enterOuterAlt(_localctx, 3);
{
- setState(213);
- match(LP);
- setState(214);
- booleanExpression(0);
setState(215);
- match(RP);
+ functionExpression();
}
break;
case 4:
- _localctx = new FunctionExpressionContext(_localctx);
+ _localctx = new ParenthesizedExpressionContext(_localctx);
enterOuterAlt(_localctx, 4);
{
+ setState(216);
+ match(LP);
setState(217);
- identifier();
+ booleanExpression(0);
setState(218);
- match(LP);
- setState(227);
+ match(RP);
+ }
+ break;
+ }
+ }
+ catch (RecognitionException re) {
+ _localctx.exception = re;
+ _errHandler.reportError(this, re);
+ _errHandler.recover(this, re);
+ }
+ finally {
+ exitRule();
+ }
+ return _localctx;
+ }
+
+ @SuppressWarnings("CheckReturnValue")
+ public static class FunctionExpressionContext extends ParserRuleContext {
+ public IdentifierContext identifier() {
+ return getRuleContext(IdentifierContext.class,0);
+ }
+ public TerminalNode LP() { return getToken(EsqlBaseParser.LP, 0); }
+ public TerminalNode RP() { return getToken(EsqlBaseParser.RP, 0); }
+ public TerminalNode ASTERISK() { return getToken(EsqlBaseParser.ASTERISK, 0); }
+ public List booleanExpression() {
+ return getRuleContexts(BooleanExpressionContext.class);
+ }
+ public BooleanExpressionContext booleanExpression(int i) {
+ return getRuleContext(BooleanExpressionContext.class,i);
+ }
+ public List COMMA() { return getTokens(EsqlBaseParser.COMMA); }
+ public TerminalNode COMMA(int i) {
+ return getToken(EsqlBaseParser.COMMA, i);
+ }
+ public FunctionExpressionContext(ParserRuleContext parent, int invokingState) {
+ super(parent, invokingState);
+ }
+ @Override public int getRuleIndex() { return RULE_functionExpression; }
+ @Override
+ public void enterRule(ParseTreeListener listener) {
+ if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterFunctionExpression(this);
+ }
+ @Override
+ public void exitRule(ParseTreeListener listener) {
+ if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitFunctionExpression(this);
+ }
+ @Override
+ public T accept(ParseTreeVisitor extends T> visitor) {
+ if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor extends T>)visitor).visitFunctionExpression(this);
+ else return visitor.visitChildren(this);
+ }
+ }
+
+ public final FunctionExpressionContext functionExpression() throws RecognitionException {
+ FunctionExpressionContext _localctx = new FunctionExpressionContext(_ctx, getState());
+ enterRule(_localctx, 20, RULE_functionExpression);
+ int _la;
+ try {
+ enterOuterAlt(_localctx, 1);
+ {
+ setState(222);
+ identifier();
+ setState(223);
+ match(LP);
+ setState(233);
+ _errHandler.sync(this);
+ switch (_input.LA(1)) {
+ case ASTERISK:
+ {
+ setState(224);
+ match(ASTERISK);
+ }
+ break;
+ case STRING:
+ case INTEGER_LITERAL:
+ case DECIMAL_LITERAL:
+ case FALSE:
+ case LP:
+ case NOT:
+ case NULL:
+ case PARAM:
+ case TRUE:
+ case PLUS:
+ case MINUS:
+ case OPENING_BRACKET:
+ case UNQUOTED_IDENTIFIER:
+ case QUOTED_IDENTIFIER:
+ {
+ {
+ setState(225);
+ booleanExpression(0);
+ setState(230);
_errHandler.sync(this);
_la = _input.LA(1);
- if ((((_la - 27)) & ~0x3f) == 0 && ((1L << (_la - 27)) & 3599201870855L) != 0) {
+ while (_la==COMMA) {
+ {
{
- setState(219);
+ setState(226);
+ match(COMMA);
+ setState(227);
booleanExpression(0);
- setState(224);
- _errHandler.sync(this);
- _la = _input.LA(1);
- while (_la==COMMA) {
- {
- {
- setState(220);
- match(COMMA);
- setState(221);
- booleanExpression(0);
- }
- }
- setState(226);
- _errHandler.sync(this);
- _la = _input.LA(1);
}
}
+ setState(232);
+ _errHandler.sync(this);
+ _la = _input.LA(1);
+ }
}
-
- setState(229);
- match(RP);
}
break;
+ case RP:
+ break;
+ default:
+ break;
+ }
+ setState(235);
+ match(RP);
}
}
catch (RecognitionException re) {
@@ -1574,13 +1646,13 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final RowCommandContext rowCommand() throws RecognitionException {
RowCommandContext _localctx = new RowCommandContext(_ctx, getState());
- enterRule(_localctx, 20, RULE_rowCommand);
+ enterRule(_localctx, 22, RULE_rowCommand);
try {
enterOuterAlt(_localctx, 1);
{
- setState(233);
+ setState(237);
match(ROW);
- setState(234);
+ setState(238);
fields();
}
}
@@ -1628,28 +1700,28 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final FieldsContext fields() throws RecognitionException {
FieldsContext _localctx = new FieldsContext(_ctx, getState());
- enterRule(_localctx, 22, RULE_fields);
+ enterRule(_localctx, 24, RULE_fields);
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(236);
+ setState(240);
field();
- setState(241);
+ setState(245);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,19,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(237);
+ setState(241);
match(COMMA);
- setState(238);
+ setState(242);
field();
}
}
}
- setState(243);
+ setState(247);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,19,_ctx);
}
@@ -1696,26 +1768,26 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final FieldContext field() throws RecognitionException {
FieldContext _localctx = new FieldContext(_ctx, getState());
- enterRule(_localctx, 24, RULE_field);
+ enterRule(_localctx, 26, RULE_field);
try {
- setState(249);
+ setState(253);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,20,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
- setState(244);
+ setState(248);
booleanExpression(0);
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
- setState(245);
+ setState(249);
qualifiedName();
- setState(246);
+ setState(250);
match(ASSIGN);
- setState(247);
+ setState(251);
booleanExpression(0);
}
break;
@@ -1769,39 +1841,39 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final FromCommandContext fromCommand() throws RecognitionException {
FromCommandContext _localctx = new FromCommandContext(_ctx, getState());
- enterRule(_localctx, 26, RULE_fromCommand);
+ enterRule(_localctx, 28, RULE_fromCommand);
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(251);
+ setState(255);
match(FROM);
- setState(252);
+ setState(256);
sourceIdentifier();
- setState(257);
+ setState(261);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,21,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(253);
+ setState(257);
match(COMMA);
- setState(254);
+ setState(258);
sourceIdentifier();
}
}
}
- setState(259);
+ setState(263);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,21,_ctx);
}
- setState(261);
+ setState(265);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) {
case 1:
{
- setState(260);
+ setState(264);
metadata();
}
break;
@@ -1855,34 +1927,34 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final MetadataContext metadata() throws RecognitionException {
MetadataContext _localctx = new MetadataContext(_ctx, getState());
- enterRule(_localctx, 28, RULE_metadata);
+ enterRule(_localctx, 30, RULE_metadata);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(263);
+ setState(267);
match(OPENING_BRACKET);
- setState(264);
+ setState(268);
match(METADATA);
- setState(265);
+ setState(269);
sourceIdentifier();
- setState(270);
+ setState(274);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==COMMA) {
{
{
- setState(266);
+ setState(270);
match(COMMA);
- setState(267);
+ setState(271);
sourceIdentifier();
}
}
- setState(272);
+ setState(276);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(273);
+ setState(277);
match(CLOSING_BRACKET);
}
}
@@ -1924,13 +1996,13 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final EvalCommandContext evalCommand() throws RecognitionException {
EvalCommandContext _localctx = new EvalCommandContext(_ctx, getState());
- enterRule(_localctx, 30, RULE_evalCommand);
+ enterRule(_localctx, 32, RULE_evalCommand);
try {
enterOuterAlt(_localctx, 1);
{
- setState(275);
+ setState(279);
match(EVAL);
- setState(276);
+ setState(280);
fields();
}
}
@@ -1976,30 +2048,30 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final StatsCommandContext statsCommand() throws RecognitionException {
StatsCommandContext _localctx = new StatsCommandContext(_ctx, getState());
- enterRule(_localctx, 32, RULE_statsCommand);
+ enterRule(_localctx, 34, RULE_statsCommand);
try {
enterOuterAlt(_localctx, 1);
{
- setState(278);
+ setState(282);
match(STATS);
- setState(280);
+ setState(284);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) {
case 1:
{
- setState(279);
+ setState(283);
fields();
}
break;
}
- setState(284);
+ setState(288);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) {
case 1:
{
- setState(282);
+ setState(286);
match(BY);
- setState(283);
+ setState(287);
grouping();
}
break;
@@ -2048,22 +2120,22 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionException {
InlinestatsCommandContext _localctx = new InlinestatsCommandContext(_ctx, getState());
- enterRule(_localctx, 34, RULE_inlinestatsCommand);
+ enterRule(_localctx, 36, RULE_inlinestatsCommand);
try {
enterOuterAlt(_localctx, 1);
{
- setState(286);
+ setState(290);
match(INLINESTATS);
- setState(287);
+ setState(291);
fields();
- setState(290);
+ setState(294);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) {
case 1:
{
- setState(288);
+ setState(292);
match(BY);
- setState(289);
+ setState(293);
grouping();
}
break;
@@ -2114,28 +2186,28 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final GroupingContext grouping() throws RecognitionException {
GroupingContext _localctx = new GroupingContext(_ctx, getState());
- enterRule(_localctx, 36, RULE_grouping);
+ enterRule(_localctx, 38, RULE_grouping);
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(292);
+ setState(296);
qualifiedName();
- setState(297);
+ setState(301);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,27,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(293);
+ setState(297);
match(COMMA);
- setState(294);
+ setState(298);
qualifiedName();
}
}
}
- setState(299);
+ setState(303);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,27,_ctx);
}
@@ -2177,12 +2249,12 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final SourceIdentifierContext sourceIdentifier() throws RecognitionException {
SourceIdentifierContext _localctx = new SourceIdentifierContext(_ctx, getState());
- enterRule(_localctx, 38, RULE_sourceIdentifier);
+ enterRule(_localctx, 40, RULE_sourceIdentifier);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(300);
+ setState(304);
_la = _input.LA(1);
if ( !(_la==SRC_UNQUOTED_IDENTIFIER || _la==SRC_QUOTED_IDENTIFIER) ) {
_errHandler.recoverInline(this);
@@ -2238,28 +2310,28 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final QualifiedNameContext qualifiedName() throws RecognitionException {
QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState());
- enterRule(_localctx, 40, RULE_qualifiedName);
+ enterRule(_localctx, 42, RULE_qualifiedName);
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(302);
+ setState(306);
identifier();
- setState(307);
+ setState(311);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,28,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(303);
+ setState(307);
match(DOT);
- setState(304);
+ setState(308);
identifier();
}
}
}
- setState(309);
+ setState(313);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,28,_ctx);
}
@@ -2301,12 +2373,12 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final IdentifierContext identifier() throws RecognitionException {
IdentifierContext _localctx = new IdentifierContext(_ctx, getState());
- enterRule(_localctx, 42, RULE_identifier);
+ enterRule(_localctx, 44, RULE_identifier);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(310);
+ setState(314);
_la = _input.LA(1);
if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) {
_errHandler.recoverInline(this);
@@ -2355,7 +2427,7 @@ public BooleanValueContext booleanValue(int i) {
public TerminalNode COMMA(int i) {
return getToken(EsqlBaseParser.COMMA, i);
}
- @SuppressWarnings("this-escape") public BooleanArrayLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
+ public BooleanArrayLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterBooleanArrayLiteral(this);
@@ -2375,7 +2447,7 @@ public static class DecimalLiteralContext extends ConstantContext {
public DecimalValueContext decimalValue() {
return getRuleContext(DecimalValueContext.class,0);
}
- @SuppressWarnings("this-escape") public DecimalLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
+ public DecimalLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterDecimalLiteral(this);
@@ -2393,7 +2465,7 @@ public T accept(ParseTreeVisitor extends T> visitor) {
@SuppressWarnings("CheckReturnValue")
public static class NullLiteralContext extends ConstantContext {
public TerminalNode NULL() { return getToken(EsqlBaseParser.NULL, 0); }
- @SuppressWarnings("this-escape") public NullLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
+ public NullLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterNullLiteral(this);
@@ -2414,7 +2486,7 @@ public IntegerValueContext integerValue() {
return getRuleContext(IntegerValueContext.class,0);
}
public TerminalNode UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.UNQUOTED_IDENTIFIER, 0); }
- @SuppressWarnings("this-escape") public QualifiedIntegerLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
+ public QualifiedIntegerLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterQualifiedIntegerLiteral(this);
@@ -2443,7 +2515,7 @@ public StringContext string(int i) {
public TerminalNode COMMA(int i) {
return getToken(EsqlBaseParser.COMMA, i);
}
- @SuppressWarnings("this-escape") public StringArrayLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
+ public StringArrayLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterStringArrayLiteral(this);
@@ -2463,7 +2535,7 @@ public static class StringLiteralContext extends ConstantContext {
public StringContext string() {
return getRuleContext(StringContext.class,0);
}
- @SuppressWarnings("this-escape") public StringLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
+ public StringLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterStringLiteral(this);
@@ -2492,7 +2564,7 @@ public NumericValueContext numericValue(int i) {
public TerminalNode COMMA(int i) {
return getToken(EsqlBaseParser.COMMA, i);
}
- @SuppressWarnings("this-escape") public NumericArrayLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
+ public NumericArrayLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterNumericArrayLiteral(this);
@@ -2510,7 +2582,7 @@ public T accept(ParseTreeVisitor extends T> visitor) {
@SuppressWarnings("CheckReturnValue")
public static class InputParamContext extends ConstantContext {
public TerminalNode PARAM() { return getToken(EsqlBaseParser.PARAM, 0); }
- @SuppressWarnings("this-escape") public InputParamContext(ConstantContext ctx) { copyFrom(ctx); }
+ public InputParamContext(ConstantContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterInputParam(this);
@@ -2530,7 +2602,7 @@ public static class IntegerLiteralContext extends ConstantContext {
public IntegerValueContext integerValue() {
return getRuleContext(IntegerValueContext.class,0);
}
- @SuppressWarnings("this-escape") public IntegerLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
+ public IntegerLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterIntegerLiteral(this);
@@ -2550,7 +2622,7 @@ public static class BooleanLiteralContext extends ConstantContext {
public BooleanValueContext booleanValue() {
return getRuleContext(BooleanValueContext.class,0);
}
- @SuppressWarnings("this-escape") public BooleanLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
+ public BooleanLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterBooleanLiteral(this);
@@ -2568,17 +2640,17 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final ConstantContext constant() throws RecognitionException {
ConstantContext _localctx = new ConstantContext(_ctx, getState());
- enterRule(_localctx, 44, RULE_constant);
+ enterRule(_localctx, 46, RULE_constant);
int _la;
try {
- setState(354);
+ setState(358);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) {
case 1:
_localctx = new NullLiteralContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(312);
+ setState(316);
match(NULL);
}
break;
@@ -2586,9 +2658,9 @@ public final ConstantContext constant() throws RecognitionException {
_localctx = new QualifiedIntegerLiteralContext(_localctx);
enterOuterAlt(_localctx, 2);
{
- setState(313);
+ setState(317);
integerValue();
- setState(314);
+ setState(318);
match(UNQUOTED_IDENTIFIER);
}
break;
@@ -2596,7 +2668,7 @@ public final ConstantContext constant() throws RecognitionException {
_localctx = new DecimalLiteralContext(_localctx);
enterOuterAlt(_localctx, 3);
{
- setState(316);
+ setState(320);
decimalValue();
}
break;
@@ -2604,7 +2676,7 @@ public final ConstantContext constant() throws RecognitionException {
_localctx = new IntegerLiteralContext(_localctx);
enterOuterAlt(_localctx, 4);
{
- setState(317);
+ setState(321);
integerValue();
}
break;
@@ -2612,7 +2684,7 @@ public final ConstantContext constant() throws RecognitionException {
_localctx = new BooleanLiteralContext(_localctx);
enterOuterAlt(_localctx, 5);
{
- setState(318);
+ setState(322);
booleanValue();
}
break;
@@ -2620,7 +2692,7 @@ public final ConstantContext constant() throws RecognitionException {
_localctx = new InputParamContext(_localctx);
enterOuterAlt(_localctx, 6);
{
- setState(319);
+ setState(323);
match(PARAM);
}
break;
@@ -2628,7 +2700,7 @@ public final ConstantContext constant() throws RecognitionException {
_localctx = new StringLiteralContext(_localctx);
enterOuterAlt(_localctx, 7);
{
- setState(320);
+ setState(324);
string();
}
break;
@@ -2636,27 +2708,27 @@ public final ConstantContext constant() throws RecognitionException {
_localctx = new NumericArrayLiteralContext(_localctx);
enterOuterAlt(_localctx, 8);
{
- setState(321);
+ setState(325);
match(OPENING_BRACKET);
- setState(322);
+ setState(326);
numericValue();
- setState(327);
+ setState(331);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==COMMA) {
{
{
- setState(323);
+ setState(327);
match(COMMA);
- setState(324);
+ setState(328);
numericValue();
}
}
- setState(329);
+ setState(333);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(330);
+ setState(334);
match(CLOSING_BRACKET);
}
break;
@@ -2664,27 +2736,27 @@ public final ConstantContext constant() throws RecognitionException {
_localctx = new BooleanArrayLiteralContext(_localctx);
enterOuterAlt(_localctx, 9);
{
- setState(332);
+ setState(336);
match(OPENING_BRACKET);
- setState(333);
+ setState(337);
booleanValue();
- setState(338);
+ setState(342);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==COMMA) {
{
{
- setState(334);
+ setState(338);
match(COMMA);
- setState(335);
+ setState(339);
booleanValue();
}
}
- setState(340);
+ setState(344);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(341);
+ setState(345);
match(CLOSING_BRACKET);
}
break;
@@ -2692,27 +2764,27 @@ public final ConstantContext constant() throws RecognitionException {
_localctx = new StringArrayLiteralContext(_localctx);
enterOuterAlt(_localctx, 10);
{
- setState(343);
+ setState(347);
match(OPENING_BRACKET);
- setState(344);
+ setState(348);
string();
- setState(349);
+ setState(353);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==COMMA) {
{
{
- setState(345);
+ setState(349);
match(COMMA);
- setState(346);
+ setState(350);
string();
}
}
- setState(351);
+ setState(355);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(352);
+ setState(356);
match(CLOSING_BRACKET);
}
break;
@@ -2754,13 +2826,13 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final LimitCommandContext limitCommand() throws RecognitionException {
LimitCommandContext _localctx = new LimitCommandContext(_ctx, getState());
- enterRule(_localctx, 46, RULE_limitCommand);
+ enterRule(_localctx, 48, RULE_limitCommand);
try {
enterOuterAlt(_localctx, 1);
{
- setState(356);
+ setState(360);
match(LIMIT);
- setState(357);
+ setState(361);
match(INTEGER_LITERAL);
}
}
@@ -2809,30 +2881,30 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final SortCommandContext sortCommand() throws RecognitionException {
SortCommandContext _localctx = new SortCommandContext(_ctx, getState());
- enterRule(_localctx, 48, RULE_sortCommand);
+ enterRule(_localctx, 50, RULE_sortCommand);
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(359);
+ setState(363);
match(SORT);
- setState(360);
+ setState(364);
orderExpression();
- setState(365);
+ setState(369);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,33,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(361);
+ setState(365);
match(COMMA);
- setState(362);
+ setState(366);
orderExpression();
}
}
}
- setState(367);
+ setState(371);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,33,_ctx);
}
@@ -2882,19 +2954,19 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final OrderExpressionContext orderExpression() throws RecognitionException {
OrderExpressionContext _localctx = new OrderExpressionContext(_ctx, getState());
- enterRule(_localctx, 50, RULE_orderExpression);
+ enterRule(_localctx, 52, RULE_orderExpression);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(368);
+ setState(372);
booleanExpression(0);
- setState(370);
+ setState(374);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) {
case 1:
{
- setState(369);
+ setState(373);
((OrderExpressionContext)_localctx).ordering = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==ASC || _la==DESC) ) {
@@ -2908,14 +2980,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio
}
break;
}
- setState(374);
+ setState(378);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) {
case 1:
{
- setState(372);
+ setState(376);
match(NULLS);
- setState(373);
+ setState(377);
((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==FIRST || _la==LAST) ) {
@@ -2977,34 +3049,34 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final KeepCommandContext keepCommand() throws RecognitionException {
KeepCommandContext _localctx = new KeepCommandContext(_ctx, getState());
- enterRule(_localctx, 52, RULE_keepCommand);
+ enterRule(_localctx, 54, RULE_keepCommand);
try {
int _alt;
- setState(394);
+ setState(398);
_errHandler.sync(this);
switch (_input.LA(1)) {
case KEEP:
enterOuterAlt(_localctx, 1);
{
- setState(376);
+ setState(380);
match(KEEP);
- setState(377);
+ setState(381);
sourceIdentifier();
- setState(382);
+ setState(386);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,36,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(378);
+ setState(382);
match(COMMA);
- setState(379);
+ setState(383);
sourceIdentifier();
}
}
}
- setState(384);
+ setState(388);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,36,_ctx);
}
@@ -3013,25 +3085,25 @@ public final KeepCommandContext keepCommand() throws RecognitionException {
case PROJECT:
enterOuterAlt(_localctx, 2);
{
- setState(385);
+ setState(389);
match(PROJECT);
- setState(386);
+ setState(390);
sourceIdentifier();
- setState(391);
+ setState(395);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,37,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(387);
+ setState(391);
match(COMMA);
- setState(388);
+ setState(392);
sourceIdentifier();
}
}
}
- setState(393);
+ setState(397);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,37,_ctx);
}
@@ -3086,30 +3158,30 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final DropCommandContext dropCommand() throws RecognitionException {
DropCommandContext _localctx = new DropCommandContext(_ctx, getState());
- enterRule(_localctx, 54, RULE_dropCommand);
+ enterRule(_localctx, 56, RULE_dropCommand);
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(396);
+ setState(400);
match(DROP);
- setState(397);
+ setState(401);
sourceIdentifier();
- setState(402);
+ setState(406);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,39,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(398);
+ setState(402);
match(COMMA);
- setState(399);
+ setState(403);
sourceIdentifier();
}
}
}
- setState(404);
+ setState(408);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,39,_ctx);
}
@@ -3160,30 +3232,30 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final RenameCommandContext renameCommand() throws RecognitionException {
RenameCommandContext _localctx = new RenameCommandContext(_ctx, getState());
- enterRule(_localctx, 56, RULE_renameCommand);
+ enterRule(_localctx, 58, RULE_renameCommand);
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(405);
+ setState(409);
match(RENAME);
- setState(406);
+ setState(410);
renameClause();
- setState(411);
+ setState(415);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,40,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(407);
+ setState(411);
match(COMMA);
- setState(408);
+ setState(412);
renameClause();
}
}
}
- setState(413);
+ setState(417);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,40,_ctx);
}
@@ -3232,15 +3304,15 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final RenameClauseContext renameClause() throws RecognitionException {
RenameClauseContext _localctx = new RenameClauseContext(_ctx, getState());
- enterRule(_localctx, 58, RULE_renameClause);
+ enterRule(_localctx, 60, RULE_renameClause);
try {
enterOuterAlt(_localctx, 1);
{
- setState(414);
+ setState(418);
((RenameClauseContext)_localctx).oldName = sourceIdentifier();
- setState(415);
+ setState(419);
match(AS);
- setState(416);
+ setState(420);
((RenameClauseContext)_localctx).newName = sourceIdentifier();
}
}
@@ -3288,22 +3360,22 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final DissectCommandContext dissectCommand() throws RecognitionException {
DissectCommandContext _localctx = new DissectCommandContext(_ctx, getState());
- enterRule(_localctx, 60, RULE_dissectCommand);
+ enterRule(_localctx, 62, RULE_dissectCommand);
try {
enterOuterAlt(_localctx, 1);
{
- setState(418);
+ setState(422);
match(DISSECT);
- setState(419);
+ setState(423);
primaryExpression();
- setState(420);
+ setState(424);
string();
- setState(422);
+ setState(426);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) {
case 1:
{
- setState(421);
+ setState(425);
commandOptions();
}
break;
@@ -3351,15 +3423,15 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final GrokCommandContext grokCommand() throws RecognitionException {
GrokCommandContext _localctx = new GrokCommandContext(_ctx, getState());
- enterRule(_localctx, 62, RULE_grokCommand);
+ enterRule(_localctx, 64, RULE_grokCommand);
try {
enterOuterAlt(_localctx, 1);
{
- setState(424);
+ setState(428);
match(GROK);
- setState(425);
+ setState(429);
primaryExpression();
- setState(426);
+ setState(430);
string();
}
}
@@ -3401,13 +3473,13 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final MvExpandCommandContext mvExpandCommand() throws RecognitionException {
MvExpandCommandContext _localctx = new MvExpandCommandContext(_ctx, getState());
- enterRule(_localctx, 64, RULE_mvExpandCommand);
+ enterRule(_localctx, 66, RULE_mvExpandCommand);
try {
enterOuterAlt(_localctx, 1);
{
- setState(428);
+ setState(432);
match(MV_EXPAND);
- setState(429);
+ setState(433);
sourceIdentifier();
}
}
@@ -3455,28 +3527,28 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final CommandOptionsContext commandOptions() throws RecognitionException {
CommandOptionsContext _localctx = new CommandOptionsContext(_ctx, getState());
- enterRule(_localctx, 66, RULE_commandOptions);
+ enterRule(_localctx, 68, RULE_commandOptions);
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(431);
+ setState(435);
commandOption();
- setState(436);
+ setState(440);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,42,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(432);
+ setState(436);
match(COMMA);
- setState(433);
+ setState(437);
commandOption();
}
}
}
- setState(438);
+ setState(442);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,42,_ctx);
}
@@ -3523,15 +3595,15 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final CommandOptionContext commandOption() throws RecognitionException {
CommandOptionContext _localctx = new CommandOptionContext(_ctx, getState());
- enterRule(_localctx, 68, RULE_commandOption);
+ enterRule(_localctx, 70, RULE_commandOption);
try {
enterOuterAlt(_localctx, 1);
{
- setState(439);
+ setState(443);
identifier();
- setState(440);
+ setState(444);
match(ASSIGN);
- setState(441);
+ setState(445);
constant();
}
}
@@ -3571,12 +3643,12 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final BooleanValueContext booleanValue() throws RecognitionException {
BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState());
- enterRule(_localctx, 70, RULE_booleanValue);
+ enterRule(_localctx, 72, RULE_booleanValue);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(443);
+ setState(447);
_la = _input.LA(1);
if ( !(_la==FALSE || _la==TRUE) ) {
_errHandler.recoverInline(this);
@@ -3628,22 +3700,22 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final NumericValueContext numericValue() throws RecognitionException {
NumericValueContext _localctx = new NumericValueContext(_ctx, getState());
- enterRule(_localctx, 72, RULE_numericValue);
+ enterRule(_localctx, 74, RULE_numericValue);
try {
- setState(447);
+ setState(451);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
- setState(445);
+ setState(449);
decimalValue();
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
- setState(446);
+ setState(450);
integerValue();
}
break;
@@ -3686,17 +3758,17 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final DecimalValueContext decimalValue() throws RecognitionException {
DecimalValueContext _localctx = new DecimalValueContext(_ctx, getState());
- enterRule(_localctx, 74, RULE_decimalValue);
+ enterRule(_localctx, 76, RULE_decimalValue);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(450);
+ setState(454);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==PLUS || _la==MINUS) {
{
- setState(449);
+ setState(453);
_la = _input.LA(1);
if ( !(_la==PLUS || _la==MINUS) ) {
_errHandler.recoverInline(this);
@@ -3709,7 +3781,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException {
}
}
- setState(452);
+ setState(456);
match(DECIMAL_LITERAL);
}
}
@@ -3750,17 +3822,17 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final IntegerValueContext integerValue() throws RecognitionException {
IntegerValueContext _localctx = new IntegerValueContext(_ctx, getState());
- enterRule(_localctx, 76, RULE_integerValue);
+ enterRule(_localctx, 78, RULE_integerValue);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(455);
+ setState(459);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==PLUS || _la==MINUS) {
{
- setState(454);
+ setState(458);
_la = _input.LA(1);
if ( !(_la==PLUS || _la==MINUS) ) {
_errHandler.recoverInline(this);
@@ -3773,7 +3845,7 @@ public final IntegerValueContext integerValue() throws RecognitionException {
}
}
- setState(457);
+ setState(461);
match(INTEGER_LITERAL);
}
}
@@ -3812,11 +3884,11 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final StringContext string() throws RecognitionException {
StringContext _localctx = new StringContext(_ctx, getState());
- enterRule(_localctx, 78, RULE_string);
+ enterRule(_localctx, 80, RULE_string);
try {
enterOuterAlt(_localctx, 1);
{
- setState(459);
+ setState(463);
match(STRING);
}
}
@@ -3860,12 +3932,12 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final ComparisonOperatorContext comparisonOperator() throws RecognitionException {
ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState());
- enterRule(_localctx, 80, RULE_comparisonOperator);
+ enterRule(_localctx, 82, RULE_comparisonOperator);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(461);
+ setState(465);
_la = _input.LA(1);
if ( !(((_la) & ~0x3f) == 0 && ((1L << _la) & 1134907106097364992L) != 0) ) {
_errHandler.recoverInline(this);
@@ -3915,13 +3987,13 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final ExplainCommandContext explainCommand() throws RecognitionException {
ExplainCommandContext _localctx = new ExplainCommandContext(_ctx, getState());
- enterRule(_localctx, 82, RULE_explainCommand);
+ enterRule(_localctx, 84, RULE_explainCommand);
try {
enterOuterAlt(_localctx, 1);
{
- setState(463);
+ setState(467);
match(EXPLAIN);
- setState(464);
+ setState(468);
subqueryExpression();
}
}
@@ -3964,15 +4036,15 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final SubqueryExpressionContext subqueryExpression() throws RecognitionException {
SubqueryExpressionContext _localctx = new SubqueryExpressionContext(_ctx, getState());
- enterRule(_localctx, 84, RULE_subqueryExpression);
+ enterRule(_localctx, 86, RULE_subqueryExpression);
try {
enterOuterAlt(_localctx, 1);
{
- setState(466);
+ setState(470);
match(OPENING_BRACKET);
- setState(467);
+ setState(471);
query(0);
- setState(468);
+ setState(472);
match(CLOSING_BRACKET);
}
}
@@ -4003,7 +4075,7 @@ public void copyFrom(ShowCommandContext ctx) {
public static class ShowInfoContext extends ShowCommandContext {
public TerminalNode SHOW() { return getToken(EsqlBaseParser.SHOW, 0); }
public TerminalNode INFO() { return getToken(EsqlBaseParser.INFO, 0); }
- @SuppressWarnings("this-escape") public ShowInfoContext(ShowCommandContext ctx) { copyFrom(ctx); }
+ public ShowInfoContext(ShowCommandContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterShowInfo(this);
@@ -4022,7 +4094,7 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public static class ShowFunctionsContext extends ShowCommandContext {
public TerminalNode SHOW() { return getToken(EsqlBaseParser.SHOW, 0); }
public TerminalNode FUNCTIONS() { return getToken(EsqlBaseParser.FUNCTIONS, 0); }
- @SuppressWarnings("this-escape") public ShowFunctionsContext(ShowCommandContext ctx) { copyFrom(ctx); }
+ public ShowFunctionsContext(ShowCommandContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterShowFunctions(this);
@@ -4040,18 +4112,18 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final ShowCommandContext showCommand() throws RecognitionException {
ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState());
- enterRule(_localctx, 86, RULE_showCommand);
+ enterRule(_localctx, 88, RULE_showCommand);
try {
- setState(474);
+ setState(478);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) {
case 1:
_localctx = new ShowInfoContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(470);
+ setState(474);
match(SHOW);
- setState(471);
+ setState(475);
match(INFO);
}
break;
@@ -4059,9 +4131,9 @@ public final ShowCommandContext showCommand() throws RecognitionException {
_localctx = new ShowFunctionsContext(_localctx);
enterOuterAlt(_localctx, 2);
{
- setState(472);
+ setState(476);
match(SHOW);
- setState(473);
+ setState(477);
match(FUNCTIONS);
}
break;
@@ -4122,51 +4194,51 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final EnrichCommandContext enrichCommand() throws RecognitionException {
EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState());
- enterRule(_localctx, 88, RULE_enrichCommand);
+ enterRule(_localctx, 90, RULE_enrichCommand);
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(476);
+ setState(480);
match(ENRICH);
- setState(477);
+ setState(481);
((EnrichCommandContext)_localctx).policyName = sourceIdentifier();
- setState(480);
+ setState(484);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) {
case 1:
{
- setState(478);
+ setState(482);
match(ON);
- setState(479);
+ setState(483);
((EnrichCommandContext)_localctx).matchField = sourceIdentifier();
}
break;
}
- setState(491);
+ setState(495);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) {
case 1:
{
- setState(482);
+ setState(486);
match(WITH);
- setState(483);
+ setState(487);
enrichWithClause();
- setState(488);
+ setState(492);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,48,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(484);
+ setState(488);
match(COMMA);
- setState(485);
+ setState(489);
enrichWithClause();
}
}
}
- setState(490);
+ setState(494);
_errHandler.sync(this);
_alt = getInterpreter().adaptivePredict(_input,48,_ctx);
}
@@ -4218,23 +4290,23 @@ public T accept(ParseTreeVisitor extends T> visitor) {
public final EnrichWithClauseContext enrichWithClause() throws RecognitionException {
EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState());
- enterRule(_localctx, 90, RULE_enrichWithClause);
+ enterRule(_localctx, 92, RULE_enrichWithClause);
try {
enterOuterAlt(_localctx, 1);
{
- setState(496);
+ setState(500);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) {
case 1:
{
- setState(493);
+ setState(497);
((EnrichWithClauseContext)_localctx).newName = sourceIdentifier();
- setState(494);
+ setState(498);
match(ASSIGN);
}
break;
}
- setState(498);
+ setState(502);
((EnrichWithClauseContext)_localctx).enrichField = sourceIdentifier();
}
}
@@ -4287,7 +4359,7 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx,
}
public static final String _serializedATN =
- "\u0004\u0001Q\u01f5\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+
+ "\u0004\u0001Q\u01f9\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+
"\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+
"\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+
"\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+
@@ -4300,316 +4372,319 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx,
"\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002"+
"#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+
"(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+
- "-\u0007-\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001"+
- "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001f\b\u0001\n\u0001"+
- "\f\u0001i\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003"+
- "\u0002o\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+
- "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+
- "\u0003\u0001\u0003\u0001\u0003\u0003\u0003~\b\u0003\u0001\u0004\u0001"+
- "\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+
- "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u008a\b\u0005\u0001\u0005\u0001"+
- "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u0091\b\u0005\n"+
- "\u0005\f\u0005\u0094\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+
- "\u0005\u0001\u0005\u0003\u0005\u009b\b\u0005\u0001\u0005\u0001\u0005\u0003"+
- "\u0005\u009f\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+
- "\u0005\u0001\u0005\u0005\u0005\u00a7\b\u0005\n\u0005\f\u0005\u00aa\t\u0005"+
- "\u0001\u0006\u0001\u0006\u0003\u0006\u00ae\b\u0006\u0001\u0006\u0001\u0006"+
- "\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00b5\b\u0006\u0001\u0006"+
- "\u0001\u0006\u0001\u0006\u0003\u0006\u00ba\b\u0006\u0001\u0007\u0001\u0007"+
- "\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007\u00c1\b\u0007\u0001\b"+
- "\u0001\b\u0001\b\u0001\b\u0003\b\u00c7\b\b\u0001\b\u0001\b\u0001\b\u0001"+
- "\b\u0001\b\u0001\b\u0005\b\u00cf\b\b\n\b\f\b\u00d2\t\b\u0001\t\u0001\t"+
- "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+
- "\t\u0005\t\u00df\b\t\n\t\f\t\u00e2\t\t\u0003\t\u00e4\b\t\u0001\t\u0001"+
- "\t\u0003\t\u00e8\b\t\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001"+
- "\u000b\u0005\u000b\u00f0\b\u000b\n\u000b\f\u000b\u00f3\t\u000b\u0001\f"+
- "\u0001\f\u0001\f\u0001\f\u0001\f\u0003\f\u00fa\b\f\u0001\r\u0001\r\u0001"+
- "\r\u0001\r\u0005\r\u0100\b\r\n\r\f\r\u0103\t\r\u0001\r\u0003\r\u0106\b"+
- "\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0005\u000e"+
- "\u010d\b\u000e\n\u000e\f\u000e\u0110\t\u000e\u0001\u000e\u0001\u000e\u0001"+
- "\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0003\u0010\u0119"+
- "\b\u0010\u0001\u0010\u0001\u0010\u0003\u0010\u011d\b\u0010\u0001\u0011"+
- "\u0001\u0011\u0001\u0011\u0001\u0011\u0003\u0011\u0123\b\u0011\u0001\u0012"+
- "\u0001\u0012\u0001\u0012\u0005\u0012\u0128\b\u0012\n\u0012\f\u0012\u012b"+
- "\t\u0012\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0005"+
- "\u0014\u0132\b\u0014\n\u0014\f\u0014\u0135\t\u0014\u0001\u0015\u0001\u0015"+
- "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016"+
- "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016"+
- "\u0001\u0016\u0005\u0016\u0146\b\u0016\n\u0016\f\u0016\u0149\t\u0016\u0001"+
- "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0005"+
- "\u0016\u0151\b\u0016\n\u0016\f\u0016\u0154\t\u0016\u0001\u0016\u0001\u0016"+
- "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u015c\b\u0016"+
- "\n\u0016\f\u0016\u015f\t\u0016\u0001\u0016\u0001\u0016\u0003\u0016\u0163"+
- "\b\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001"+
- "\u0018\u0001\u0018\u0005\u0018\u016c\b\u0018\n\u0018\f\u0018\u016f\t\u0018"+
- "\u0001\u0019\u0001\u0019\u0003\u0019\u0173\b\u0019\u0001\u0019\u0001\u0019"+
- "\u0003\u0019\u0177\b\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+
- "\u0005\u001a\u017d\b\u001a\n\u001a\f\u001a\u0180\t\u001a\u0001\u001a\u0001"+
- "\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u0186\b\u001a\n\u001a\f\u001a"+
- "\u0189\t\u001a\u0003\u001a\u018b\b\u001a\u0001\u001b\u0001\u001b\u0001"+
- "\u001b\u0001\u001b\u0005\u001b\u0191\b\u001b\n\u001b\f\u001b\u0194\t\u001b"+
- "\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0005\u001c\u019a\b\u001c"+
- "\n\u001c\f\u001c\u019d\t\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0001"+
- "\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0003\u001e\u01a7"+
- "\b\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001 \u0001"+
- " \u0001 \u0001!\u0001!\u0001!\u0005!\u01b3\b!\n!\f!\u01b6\t!\u0001\"\u0001"+
- "\"\u0001\"\u0001\"\u0001#\u0001#\u0001$\u0001$\u0003$\u01c0\b$\u0001%"+
- "\u0003%\u01c3\b%\u0001%\u0001%\u0001&\u0003&\u01c8\b&\u0001&\u0001&\u0001"+
- "\'\u0001\'\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001"+
- "*\u0001+\u0001+\u0001+\u0001+\u0003+\u01db\b+\u0001,\u0001,\u0001,\u0001"+
- ",\u0003,\u01e1\b,\u0001,\u0001,\u0001,\u0001,\u0005,\u01e7\b,\n,\f,\u01ea"+
- "\t,\u0003,\u01ec\b,\u0001-\u0001-\u0001-\u0003-\u01f1\b-\u0001-\u0001"+
- "-\u0001-\u0000\u0003\u0002\n\u0010.\u0000\u0002\u0004\u0006\b\n\f\u000e"+
- "\u0010\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDF"+
- "HJLNPRTVXZ\u0000\b\u0001\u0000<=\u0001\u0000>@\u0001\u0000LM\u0001\u0000"+
- "CD\u0002\u0000 ##\u0001\u0000&\'\u0002\u0000%%33\u0001\u00006;\u0213"+
- "\u0000\\\u0001\u0000\u0000\u0000\u0002_\u0001\u0000\u0000\u0000\u0004"+
- "n\u0001\u0000\u0000\u0000\u0006}\u0001\u0000\u0000\u0000\b\u007f\u0001"+
- "\u0000\u0000\u0000\n\u009e\u0001\u0000\u0000\u0000\f\u00b9\u0001\u0000"+
- "\u0000\u0000\u000e\u00c0\u0001\u0000\u0000\u0000\u0010\u00c6\u0001\u0000"+
- "\u0000\u0000\u0012\u00e7\u0001\u0000\u0000\u0000\u0014\u00e9\u0001\u0000"+
- "\u0000\u0000\u0016\u00ec\u0001\u0000\u0000\u0000\u0018\u00f9\u0001\u0000"+
- "\u0000\u0000\u001a\u00fb\u0001\u0000\u0000\u0000\u001c\u0107\u0001\u0000"+
- "\u0000\u0000\u001e\u0113\u0001\u0000\u0000\u0000 \u0116\u0001\u0000\u0000"+
- "\u0000\"\u011e\u0001\u0000\u0000\u0000$\u0124\u0001\u0000\u0000\u0000"+
- "&\u012c\u0001\u0000\u0000\u0000(\u012e\u0001\u0000\u0000\u0000*\u0136"+
- "\u0001\u0000\u0000\u0000,\u0162\u0001\u0000\u0000\u0000.\u0164\u0001\u0000"+
- "\u0000\u00000\u0167\u0001\u0000\u0000\u00002\u0170\u0001\u0000\u0000\u0000"+
- "4\u018a\u0001\u0000\u0000\u00006\u018c\u0001\u0000\u0000\u00008\u0195"+
- "\u0001\u0000\u0000\u0000:\u019e\u0001\u0000\u0000\u0000<\u01a2\u0001\u0000"+
- "\u0000\u0000>\u01a8\u0001\u0000\u0000\u0000@\u01ac\u0001\u0000\u0000\u0000"+
- "B\u01af\u0001\u0000\u0000\u0000D\u01b7\u0001\u0000\u0000\u0000F\u01bb"+
- "\u0001\u0000\u0000\u0000H\u01bf\u0001\u0000\u0000\u0000J\u01c2\u0001\u0000"+
- "\u0000\u0000L\u01c7\u0001\u0000\u0000\u0000N\u01cb\u0001\u0000\u0000\u0000"+
- "P\u01cd\u0001\u0000\u0000\u0000R\u01cf\u0001\u0000\u0000\u0000T\u01d2"+
- "\u0001\u0000\u0000\u0000V\u01da\u0001\u0000\u0000\u0000X\u01dc\u0001\u0000"+
- "\u0000\u0000Z\u01f0\u0001\u0000\u0000\u0000\\]\u0003\u0002\u0001\u0000"+
- "]^\u0005\u0000\u0000\u0001^\u0001\u0001\u0000\u0000\u0000_`\u0006\u0001"+
- "\uffff\uffff\u0000`a\u0003\u0004\u0002\u0000ag\u0001\u0000\u0000\u0000"+
- "bc\n\u0001\u0000\u0000cd\u0005\u001a\u0000\u0000df\u0003\u0006\u0003\u0000"+
- "eb\u0001\u0000\u0000\u0000fi\u0001\u0000\u0000\u0000ge\u0001\u0000\u0000"+
- "\u0000gh\u0001\u0000\u0000\u0000h\u0003\u0001\u0000\u0000\u0000ig\u0001"+
- "\u0000\u0000\u0000jo\u0003R)\u0000ko\u0003\u001a\r\u0000lo\u0003\u0014"+
- "\n\u0000mo\u0003V+\u0000nj\u0001\u0000\u0000\u0000nk\u0001\u0000\u0000"+
- "\u0000nl\u0001\u0000\u0000\u0000nm\u0001\u0000\u0000\u0000o\u0005\u0001"+
- "\u0000\u0000\u0000p~\u0003\u001e\u000f\u0000q~\u0003\"\u0011\u0000r~\u0003"+
- ".\u0017\u0000s~\u00034\u001a\u0000t~\u00030\u0018\u0000u~\u0003 \u0010"+
- "\u0000v~\u0003\b\u0004\u0000w~\u00036\u001b\u0000x~\u00038\u001c\u0000"+
- "y~\u0003<\u001e\u0000z~\u0003>\u001f\u0000{~\u0003X,\u0000|~\u0003@ \u0000"+
- "}p\u0001\u0000\u0000\u0000}q\u0001\u0000\u0000\u0000}r\u0001\u0000\u0000"+
- "\u0000}s\u0001\u0000\u0000\u0000}t\u0001\u0000\u0000\u0000}u\u0001\u0000"+
- "\u0000\u0000}v\u0001\u0000\u0000\u0000}w\u0001\u0000\u0000\u0000}x\u0001"+
- "\u0000\u0000\u0000}y\u0001\u0000\u0000\u0000}z\u0001\u0000\u0000\u0000"+
- "}{\u0001\u0000\u0000\u0000}|\u0001\u0000\u0000\u0000~\u0007\u0001\u0000"+
- "\u0000\u0000\u007f\u0080\u0005\u0012\u0000\u0000\u0080\u0081\u0003\n\u0005"+
- "\u0000\u0081\t\u0001\u0000\u0000\u0000\u0082\u0083\u0006\u0005\uffff\uffff"+
- "\u0000\u0083\u0084\u0005,\u0000\u0000\u0084\u009f\u0003\n\u0005\u0007"+
- "\u0085\u009f\u0003\u000e\u0007\u0000\u0086\u009f\u0003\f\u0006\u0000\u0087"+
- "\u0089\u0003\u000e\u0007\u0000\u0088\u008a\u0005,\u0000\u0000\u0089\u0088"+
- "\u0001\u0000\u0000\u0000\u0089\u008a\u0001\u0000\u0000\u0000\u008a\u008b"+
- "\u0001\u0000\u0000\u0000\u008b\u008c\u0005)\u0000\u0000\u008c\u008d\u0005"+
- "(\u0000\u0000\u008d\u0092\u0003\u000e\u0007\u0000\u008e\u008f\u0005\""+
- "\u0000\u0000\u008f\u0091\u0003\u000e\u0007\u0000\u0090\u008e\u0001\u0000"+
- "\u0000\u0000\u0091\u0094\u0001\u0000\u0000\u0000\u0092\u0090\u0001\u0000"+
- "\u0000\u0000\u0092\u0093\u0001\u0000\u0000\u0000\u0093\u0095\u0001\u0000"+
- "\u0000\u0000\u0094\u0092\u0001\u0000\u0000\u0000\u0095\u0096\u00052\u0000"+
- "\u0000\u0096\u009f\u0001\u0000\u0000\u0000\u0097\u0098\u0003\u000e\u0007"+
- "\u0000\u0098\u009a\u0005*\u0000\u0000\u0099\u009b\u0005,\u0000\u0000\u009a"+
- "\u0099\u0001\u0000\u0000\u0000\u009a\u009b\u0001\u0000\u0000\u0000\u009b"+
- "\u009c\u0001\u0000\u0000\u0000\u009c\u009d\u0005-\u0000\u0000\u009d\u009f"+
- "\u0001\u0000\u0000\u0000\u009e\u0082\u0001\u0000\u0000\u0000\u009e\u0085"+
- "\u0001\u0000\u0000\u0000\u009e\u0086\u0001\u0000\u0000\u0000\u009e\u0087"+
- "\u0001\u0000\u0000\u0000\u009e\u0097\u0001\u0000\u0000\u0000\u009f\u00a8"+
- "\u0001\u0000\u0000\u0000\u00a0\u00a1\n\u0004\u0000\u0000\u00a1\u00a2\u0005"+
- "\u001f\u0000\u0000\u00a2\u00a7\u0003\n\u0005\u0005\u00a3\u00a4\n\u0003"+
- "\u0000\u0000\u00a4\u00a5\u0005/\u0000\u0000\u00a5\u00a7\u0003\n\u0005"+
- "\u0004\u00a6\u00a0\u0001\u0000\u0000\u0000\u00a6\u00a3\u0001\u0000\u0000"+
- "\u0000\u00a7\u00aa\u0001\u0000\u0000\u0000\u00a8\u00a6\u0001\u0000\u0000"+
- "\u0000\u00a8\u00a9\u0001\u0000\u0000\u0000\u00a9\u000b\u0001\u0000\u0000"+
- "\u0000\u00aa\u00a8\u0001\u0000\u0000\u0000\u00ab\u00ad\u0003\u000e\u0007"+
- "\u0000\u00ac\u00ae\u0005,\u0000\u0000\u00ad\u00ac\u0001\u0000\u0000\u0000"+
- "\u00ad\u00ae\u0001\u0000\u0000\u0000\u00ae\u00af\u0001\u0000\u0000\u0000"+
- "\u00af\u00b0\u0005+\u0000\u0000\u00b0\u00b1\u0003N\'\u0000\u00b1\u00ba"+
- "\u0001\u0000\u0000\u0000\u00b2\u00b4\u0003\u000e\u0007\u0000\u00b3\u00b5"+
- "\u0005,\u0000\u0000\u00b4\u00b3\u0001\u0000\u0000\u0000\u00b4\u00b5\u0001"+
- "\u0000\u0000\u0000\u00b5\u00b6\u0001\u0000\u0000\u0000\u00b6\u00b7\u0005"+
- "1\u0000\u0000\u00b7\u00b8\u0003N\'\u0000\u00b8\u00ba\u0001\u0000\u0000"+
- "\u0000\u00b9\u00ab\u0001\u0000\u0000\u0000\u00b9\u00b2\u0001\u0000\u0000"+
- "\u0000\u00ba\r\u0001\u0000\u0000\u0000\u00bb\u00c1\u0003\u0010\b\u0000"+
- "\u00bc\u00bd\u0003\u0010\b\u0000\u00bd\u00be\u0003P(\u0000\u00be\u00bf"+
- "\u0003\u0010\b\u0000\u00bf\u00c1\u0001\u0000\u0000\u0000\u00c0\u00bb\u0001"+
- "\u0000\u0000\u0000\u00c0\u00bc\u0001\u0000\u0000\u0000\u00c1\u000f\u0001"+
- "\u0000\u0000\u0000\u00c2\u00c3\u0006\b\uffff\uffff\u0000\u00c3\u00c7\u0003"+
- "\u0012\t\u0000\u00c4\u00c5\u0007\u0000\u0000\u0000\u00c5\u00c7\u0003\u0010"+
- "\b\u0003\u00c6\u00c2\u0001\u0000\u0000\u0000\u00c6\u00c4\u0001\u0000\u0000"+
- "\u0000\u00c7\u00d0\u0001\u0000\u0000\u0000\u00c8\u00c9\n\u0002\u0000\u0000"+
- "\u00c9\u00ca\u0007\u0001\u0000\u0000\u00ca\u00cf\u0003\u0010\b\u0003\u00cb"+
- "\u00cc\n\u0001\u0000\u0000\u00cc\u00cd\u0007\u0000\u0000\u0000\u00cd\u00cf"+
- "\u0003\u0010\b\u0002\u00ce\u00c8\u0001\u0000\u0000\u0000\u00ce\u00cb\u0001"+
- "\u0000\u0000\u0000\u00cf\u00d2\u0001\u0000\u0000\u0000\u00d0\u00ce\u0001"+
- "\u0000\u0000\u0000\u00d0\u00d1\u0001\u0000\u0000\u0000\u00d1\u0011\u0001"+
- "\u0000\u0000\u0000\u00d2\u00d0\u0001\u0000\u0000\u0000\u00d3\u00e8\u0003"+
- ",\u0016\u0000\u00d4\u00e8\u0003(\u0014\u0000\u00d5\u00d6\u0005(\u0000"+
- "\u0000\u00d6\u00d7\u0003\n\u0005\u0000\u00d7\u00d8\u00052\u0000\u0000"+
- "\u00d8\u00e8\u0001\u0000\u0000\u0000\u00d9\u00da\u0003*\u0015\u0000\u00da"+
- "\u00e3\u0005(\u0000\u0000\u00db\u00e0\u0003\n\u0005\u0000\u00dc\u00dd"+
- "\u0005\"\u0000\u0000\u00dd\u00df\u0003\n\u0005\u0000\u00de\u00dc\u0001"+
- "\u0000\u0000\u0000\u00df\u00e2\u0001\u0000\u0000\u0000\u00e0\u00de\u0001"+
- "\u0000\u0000\u0000\u00e0\u00e1\u0001\u0000\u0000\u0000\u00e1\u00e4\u0001"+
- "\u0000\u0000\u0000\u00e2\u00e0\u0001\u0000\u0000\u0000\u00e3\u00db\u0001"+
- "\u0000\u0000\u0000\u00e3\u00e4\u0001\u0000\u0000\u0000\u00e4\u00e5\u0001"+
- "\u0000\u0000\u0000\u00e5\u00e6\u00052\u0000\u0000\u00e6\u00e8\u0001\u0000"+
- "\u0000\u0000\u00e7\u00d3\u0001\u0000\u0000\u0000\u00e7\u00d4\u0001\u0000"+
- "\u0000\u0000\u00e7\u00d5\u0001\u0000\u0000\u0000\u00e7\u00d9\u0001\u0000"+
- "\u0000\u0000\u00e8\u0013\u0001\u0000\u0000\u0000\u00e9\u00ea\u0005\u000e"+
- "\u0000\u0000\u00ea\u00eb\u0003\u0016\u000b\u0000\u00eb\u0015\u0001\u0000"+
- "\u0000\u0000\u00ec\u00f1\u0003\u0018\f\u0000\u00ed\u00ee\u0005\"\u0000"+
- "\u0000\u00ee\u00f0\u0003\u0018\f\u0000\u00ef\u00ed\u0001\u0000\u0000\u0000"+
- "\u00f0\u00f3\u0001\u0000\u0000\u0000\u00f1\u00ef\u0001\u0000\u0000\u0000"+
- "\u00f1\u00f2\u0001\u0000\u0000\u0000\u00f2\u0017\u0001\u0000\u0000\u0000"+
- "\u00f3\u00f1\u0001\u0000\u0000\u0000\u00f4\u00fa\u0003\n\u0005\u0000\u00f5"+
- "\u00f6\u0003(\u0014\u0000\u00f6\u00f7\u0005!\u0000\u0000\u00f7\u00f8\u0003"+
- "\n\u0005\u0000\u00f8\u00fa\u0001\u0000\u0000\u0000\u00f9\u00f4\u0001\u0000"+
- "\u0000\u0000\u00f9\u00f5\u0001\u0000\u0000\u0000\u00fa\u0019\u0001\u0000"+
- "\u0000\u0000\u00fb\u00fc\u0005\u0006\u0000\u0000\u00fc\u0101\u0003&\u0013"+
- "\u0000\u00fd\u00fe\u0005\"\u0000\u0000\u00fe\u0100\u0003&\u0013\u0000"+
- "\u00ff\u00fd\u0001\u0000\u0000\u0000\u0100\u0103\u0001\u0000\u0000\u0000"+
- "\u0101\u00ff\u0001\u0000\u0000\u0000\u0101\u0102\u0001\u0000\u0000\u0000"+
- "\u0102\u0105\u0001\u0000\u0000\u0000\u0103\u0101\u0001\u0000\u0000\u0000"+
- "\u0104\u0106\u0003\u001c\u000e\u0000\u0105\u0104\u0001\u0000\u0000\u0000"+
- "\u0105\u0106\u0001\u0000\u0000\u0000\u0106\u001b\u0001\u0000\u0000\u0000"+
- "\u0107\u0108\u0005A\u0000\u0000\u0108\u0109\u0005I\u0000\u0000\u0109\u010e"+
- "\u0003&\u0013\u0000\u010a\u010b\u0005\"\u0000\u0000\u010b\u010d\u0003"+
- "&\u0013\u0000\u010c\u010a\u0001\u0000\u0000\u0000\u010d\u0110\u0001\u0000"+
- "\u0000\u0000\u010e\u010c\u0001\u0000\u0000\u0000\u010e\u010f\u0001\u0000"+
- "\u0000\u0000\u010f\u0111\u0001\u0000\u0000\u0000\u0110\u010e\u0001\u0000"+
- "\u0000\u0000\u0111\u0112\u0005B\u0000\u0000\u0112\u001d\u0001\u0000\u0000"+
- "\u0000\u0113\u0114\u0005\u0004\u0000\u0000\u0114\u0115\u0003\u0016\u000b"+
- "\u0000\u0115\u001f\u0001\u0000\u0000\u0000\u0116\u0118\u0005\u0011\u0000"+
- "\u0000\u0117\u0119\u0003\u0016\u000b\u0000\u0118\u0117\u0001\u0000\u0000"+
- "\u0000\u0118\u0119\u0001\u0000\u0000\u0000\u0119\u011c\u0001\u0000\u0000"+
- "\u0000\u011a\u011b\u0005\u001e\u0000\u0000\u011b\u011d\u0003$\u0012\u0000"+
- "\u011c\u011a\u0001\u0000\u0000\u0000\u011c\u011d\u0001\u0000\u0000\u0000"+
- "\u011d!\u0001\u0000\u0000\u0000\u011e\u011f\u0005\b\u0000\u0000\u011f"+
- "\u0122\u0003\u0016\u000b\u0000\u0120\u0121\u0005\u001e\u0000\u0000\u0121"+
- "\u0123\u0003$\u0012\u0000\u0122\u0120\u0001\u0000\u0000\u0000\u0122\u0123"+
- "\u0001\u0000\u0000\u0000\u0123#\u0001\u0000\u0000\u0000\u0124\u0129\u0003"+
- "(\u0014\u0000\u0125\u0126\u0005\"\u0000\u0000\u0126\u0128\u0003(\u0014"+
- "\u0000\u0127\u0125\u0001\u0000\u0000\u0000\u0128\u012b\u0001\u0000\u0000"+
- "\u0000\u0129\u0127\u0001\u0000\u0000\u0000\u0129\u012a\u0001\u0000\u0000"+
- "\u0000\u012a%\u0001\u0000\u0000\u0000\u012b\u0129\u0001\u0000\u0000\u0000"+
- "\u012c\u012d\u0007\u0002\u0000\u0000\u012d\'\u0001\u0000\u0000\u0000\u012e"+
- "\u0133\u0003*\u0015\u0000\u012f\u0130\u0005$\u0000\u0000\u0130\u0132\u0003"+
- "*\u0015\u0000\u0131\u012f\u0001\u0000\u0000\u0000\u0132\u0135\u0001\u0000"+
- "\u0000\u0000\u0133\u0131\u0001\u0000\u0000\u0000\u0133\u0134\u0001\u0000"+
- "\u0000\u0000\u0134)\u0001\u0000\u0000\u0000\u0135\u0133\u0001\u0000\u0000"+
- "\u0000\u0136\u0137\u0007\u0003\u0000\u0000\u0137+\u0001\u0000\u0000\u0000"+
- "\u0138\u0163\u0005-\u0000\u0000\u0139\u013a\u0003L&\u0000\u013a\u013b"+
- "\u0005C\u0000\u0000\u013b\u0163\u0001\u0000\u0000\u0000\u013c\u0163\u0003"+
- "J%\u0000\u013d\u0163\u0003L&\u0000\u013e\u0163\u0003F#\u0000\u013f\u0163"+
- "\u00050\u0000\u0000\u0140\u0163\u0003N\'\u0000\u0141\u0142\u0005A\u0000"+
- "\u0000\u0142\u0147\u0003H$\u0000\u0143\u0144\u0005\"\u0000\u0000\u0144"+
- "\u0146\u0003H$\u0000\u0145\u0143\u0001\u0000\u0000\u0000\u0146\u0149\u0001"+
- "\u0000\u0000\u0000\u0147\u0145\u0001\u0000\u0000\u0000\u0147\u0148\u0001"+
- "\u0000\u0000\u0000\u0148\u014a\u0001\u0000\u0000\u0000\u0149\u0147\u0001"+
- "\u0000\u0000\u0000\u014a\u014b\u0005B\u0000\u0000\u014b\u0163\u0001\u0000"+
- "\u0000\u0000\u014c\u014d\u0005A\u0000\u0000\u014d\u0152\u0003F#\u0000"+
- "\u014e\u014f\u0005\"\u0000\u0000\u014f\u0151\u0003F#\u0000\u0150\u014e"+
- "\u0001\u0000\u0000\u0000\u0151\u0154\u0001\u0000\u0000\u0000\u0152\u0150"+
- "\u0001\u0000\u0000\u0000\u0152\u0153\u0001\u0000\u0000\u0000\u0153\u0155"+
- "\u0001\u0000\u0000\u0000\u0154\u0152\u0001\u0000\u0000\u0000\u0155\u0156"+
- "\u0005B\u0000\u0000\u0156\u0163\u0001\u0000\u0000\u0000\u0157\u0158\u0005"+
- "A\u0000\u0000\u0158\u015d\u0003N\'\u0000\u0159\u015a\u0005\"\u0000\u0000"+
- "\u015a\u015c\u0003N\'\u0000\u015b\u0159\u0001\u0000\u0000\u0000\u015c"+
- "\u015f\u0001\u0000\u0000\u0000\u015d\u015b\u0001\u0000\u0000\u0000\u015d"+
- "\u015e\u0001\u0000\u0000\u0000\u015e\u0160\u0001\u0000\u0000\u0000\u015f"+
- "\u015d\u0001\u0000\u0000\u0000\u0160\u0161\u0005B\u0000\u0000\u0161\u0163"+
- "\u0001\u0000\u0000\u0000\u0162\u0138\u0001\u0000\u0000\u0000\u0162\u0139"+
- "\u0001\u0000\u0000\u0000\u0162\u013c\u0001\u0000\u0000\u0000\u0162\u013d"+
- "\u0001\u0000\u0000\u0000\u0162\u013e\u0001\u0000\u0000\u0000\u0162\u013f"+
- "\u0001\u0000\u0000\u0000\u0162\u0140\u0001\u0000\u0000\u0000\u0162\u0141"+
- "\u0001\u0000\u0000\u0000\u0162\u014c\u0001\u0000\u0000\u0000\u0162\u0157"+
- "\u0001\u0000\u0000\u0000\u0163-\u0001\u0000\u0000\u0000\u0164\u0165\u0005"+
- "\n\u0000\u0000\u0165\u0166\u0005\u001c\u0000\u0000\u0166/\u0001\u0000"+
- "\u0000\u0000\u0167\u0168\u0005\u0010\u0000\u0000\u0168\u016d\u00032\u0019"+
- "\u0000\u0169\u016a\u0005\"\u0000\u0000\u016a\u016c\u00032\u0019\u0000"+
- "\u016b\u0169\u0001\u0000\u0000\u0000\u016c\u016f\u0001\u0000\u0000\u0000"+
- "\u016d\u016b\u0001\u0000\u0000\u0000\u016d\u016e\u0001\u0000\u0000\u0000"+
- "\u016e1\u0001\u0000\u0000\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u0170"+
- "\u0172\u0003\n\u0005\u0000\u0171\u0173\u0007\u0004\u0000\u0000\u0172\u0171"+
- "\u0001\u0000\u0000\u0000\u0172\u0173\u0001\u0000\u0000\u0000\u0173\u0176"+
- "\u0001\u0000\u0000\u0000\u0174\u0175\u0005.\u0000\u0000\u0175\u0177\u0007"+
- "\u0005\u0000\u0000\u0176\u0174\u0001\u0000\u0000\u0000\u0176\u0177\u0001"+
- "\u0000\u0000\u0000\u01773\u0001\u0000\u0000\u0000\u0178\u0179\u0005\t"+
- "\u0000\u0000\u0179\u017e\u0003&\u0013\u0000\u017a\u017b\u0005\"\u0000"+
- "\u0000\u017b\u017d\u0003&\u0013\u0000\u017c\u017a\u0001\u0000\u0000\u0000"+
- "\u017d\u0180\u0001\u0000\u0000\u0000\u017e\u017c\u0001\u0000\u0000\u0000"+
- "\u017e\u017f\u0001\u0000\u0000\u0000\u017f\u018b\u0001\u0000\u0000\u0000"+
- "\u0180\u017e\u0001\u0000\u0000\u0000\u0181\u0182\u0005\f\u0000\u0000\u0182"+
- "\u0187\u0003&\u0013\u0000\u0183\u0184\u0005\"\u0000\u0000\u0184\u0186"+
- "\u0003&\u0013\u0000\u0185\u0183\u0001\u0000\u0000\u0000\u0186\u0189\u0001"+
- "\u0000\u0000\u0000\u0187\u0185\u0001\u0000\u0000\u0000\u0187\u0188\u0001"+
- "\u0000\u0000\u0000\u0188\u018b\u0001\u0000\u0000\u0000\u0189\u0187\u0001"+
- "\u0000\u0000\u0000\u018a\u0178\u0001\u0000\u0000\u0000\u018a\u0181\u0001"+
- "\u0000\u0000\u0000\u018b5\u0001\u0000\u0000\u0000\u018c\u018d\u0005\u0002"+
- "\u0000\u0000\u018d\u0192\u0003&\u0013\u0000\u018e\u018f\u0005\"\u0000"+
- "\u0000\u018f\u0191\u0003&\u0013\u0000\u0190\u018e\u0001\u0000\u0000\u0000"+
- "\u0191\u0194\u0001\u0000\u0000\u0000\u0192\u0190\u0001\u0000\u0000\u0000"+
- "\u0192\u0193\u0001\u0000\u0000\u0000\u01937\u0001\u0000\u0000\u0000\u0194"+
- "\u0192\u0001\u0000\u0000\u0000\u0195\u0196\u0005\r\u0000\u0000\u0196\u019b"+
- "\u0003:\u001d\u0000\u0197\u0198\u0005\"\u0000\u0000\u0198\u019a\u0003"+
- ":\u001d\u0000\u0199\u0197\u0001\u0000\u0000\u0000\u019a\u019d\u0001\u0000"+
- "\u0000\u0000\u019b\u0199\u0001\u0000\u0000\u0000\u019b\u019c\u0001\u0000"+
- "\u0000\u0000\u019c9\u0001\u0000\u0000\u0000\u019d\u019b\u0001\u0000\u0000"+
- "\u0000\u019e\u019f\u0003&\u0013\u0000\u019f\u01a0\u0005H\u0000\u0000\u01a0"+
- "\u01a1\u0003&\u0013\u0000\u01a1;\u0001\u0000\u0000\u0000\u01a2\u01a3\u0005"+
- "\u0001\u0000\u0000\u01a3\u01a4\u0003\u0012\t\u0000\u01a4\u01a6\u0003N"+
- "\'\u0000\u01a5\u01a7\u0003B!\u0000\u01a6\u01a5\u0001\u0000\u0000\u0000"+
- "\u01a6\u01a7\u0001\u0000\u0000\u0000\u01a7=\u0001\u0000\u0000\u0000\u01a8"+
- "\u01a9\u0005\u0007\u0000\u0000\u01a9\u01aa\u0003\u0012\t\u0000\u01aa\u01ab"+
- "\u0003N\'\u0000\u01ab?\u0001\u0000\u0000\u0000\u01ac\u01ad\u0005\u000b"+
- "\u0000\u0000\u01ad\u01ae\u0003&\u0013\u0000\u01aeA\u0001\u0000\u0000\u0000"+
- "\u01af\u01b4\u0003D\"\u0000\u01b0\u01b1\u0005\"\u0000\u0000\u01b1\u01b3"+
- "\u0003D\"\u0000\u01b2\u01b0\u0001\u0000\u0000\u0000\u01b3\u01b6\u0001"+
- "\u0000\u0000\u0000\u01b4\u01b2\u0001\u0000\u0000\u0000\u01b4\u01b5\u0001"+
- "\u0000\u0000\u0000\u01b5C\u0001\u0000\u0000\u0000\u01b6\u01b4\u0001\u0000"+
- "\u0000\u0000\u01b7\u01b8\u0003*\u0015\u0000\u01b8\u01b9\u0005!\u0000\u0000"+
- "\u01b9\u01ba\u0003,\u0016\u0000\u01baE\u0001\u0000\u0000\u0000\u01bb\u01bc"+
- "\u0007\u0006\u0000\u0000\u01bcG\u0001\u0000\u0000\u0000\u01bd\u01c0\u0003"+
- "J%\u0000\u01be\u01c0\u0003L&\u0000\u01bf\u01bd\u0001\u0000\u0000\u0000"+
- "\u01bf\u01be\u0001\u0000\u0000\u0000\u01c0I\u0001\u0000\u0000\u0000\u01c1"+
- "\u01c3\u0007\u0000\u0000\u0000\u01c2\u01c1\u0001\u0000\u0000\u0000\u01c2"+
- "\u01c3\u0001\u0000\u0000\u0000\u01c3\u01c4\u0001\u0000\u0000\u0000\u01c4"+
- "\u01c5\u0005\u001d\u0000\u0000\u01c5K\u0001\u0000\u0000\u0000\u01c6\u01c8"+
- "\u0007\u0000\u0000\u0000\u01c7\u01c6\u0001\u0000\u0000\u0000\u01c7\u01c8"+
- "\u0001\u0000\u0000\u0000\u01c8\u01c9\u0001\u0000\u0000\u0000\u01c9\u01ca"+
- "\u0005\u001c\u0000\u0000\u01caM\u0001\u0000\u0000\u0000\u01cb\u01cc\u0005"+
- "\u001b\u0000\u0000\u01ccO\u0001\u0000\u0000\u0000\u01cd\u01ce\u0007\u0007"+
- "\u0000\u0000\u01ceQ\u0001\u0000\u0000\u0000\u01cf\u01d0\u0005\u0005\u0000"+
- "\u0000\u01d0\u01d1\u0003T*\u0000\u01d1S\u0001\u0000\u0000\u0000\u01d2"+
- "\u01d3\u0005A\u0000\u0000\u01d3\u01d4\u0003\u0002\u0001\u0000\u01d4\u01d5"+
- "\u0005B\u0000\u0000\u01d5U\u0001\u0000\u0000\u0000\u01d6\u01d7\u0005\u000f"+
- "\u0000\u0000\u01d7\u01db\u00054\u0000\u0000\u01d8\u01d9\u0005\u000f\u0000"+
- "\u0000\u01d9\u01db\u00055\u0000\u0000\u01da\u01d6\u0001\u0000\u0000\u0000"+
- "\u01da\u01d8\u0001\u0000\u0000\u0000\u01dbW\u0001\u0000\u0000\u0000\u01dc"+
- "\u01dd\u0005\u0003\u0000\u0000\u01dd\u01e0\u0003&\u0013\u0000\u01de\u01df"+
- "\u0005J\u0000\u0000\u01df\u01e1\u0003&\u0013\u0000\u01e0\u01de\u0001\u0000"+
- "\u0000\u0000\u01e0\u01e1\u0001\u0000\u0000\u0000\u01e1\u01eb\u0001\u0000"+
- "\u0000\u0000\u01e2\u01e3\u0005K\u0000\u0000\u01e3\u01e8\u0003Z-\u0000"+
- "\u01e4\u01e5\u0005\"\u0000\u0000\u01e5\u01e7\u0003Z-\u0000\u01e6\u01e4"+
- "\u0001\u0000\u0000\u0000\u01e7\u01ea\u0001\u0000\u0000\u0000\u01e8\u01e6"+
- "\u0001\u0000\u0000\u0000\u01e8\u01e9\u0001\u0000\u0000\u0000\u01e9\u01ec"+
- "\u0001\u0000\u0000\u0000\u01ea\u01e8\u0001\u0000\u0000\u0000\u01eb\u01e2"+
- "\u0001\u0000\u0000\u0000\u01eb\u01ec\u0001\u0000\u0000\u0000\u01ecY\u0001"+
- "\u0000\u0000\u0000\u01ed\u01ee\u0003&\u0013\u0000\u01ee\u01ef\u0005!\u0000"+
- "\u0000\u01ef\u01f1\u0001\u0000\u0000\u0000\u01f0\u01ed\u0001\u0000\u0000"+
- "\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000"+
- "\u0000\u01f2\u01f3\u0003&\u0013\u0000\u01f3[\u0001\u0000\u0000\u00003"+
- "gn}\u0089\u0092\u009a\u009e\u00a6\u00a8\u00ad\u00b4\u00b9\u00c0\u00c6"+
- "\u00ce\u00d0\u00e0\u00e3\u00e7\u00f1\u00f9\u0101\u0105\u010e\u0118\u011c"+
- "\u0122\u0129\u0133\u0147\u0152\u015d\u0162\u016d\u0172\u0176\u017e\u0187"+
- "\u018a\u0192\u019b\u01a6\u01b4\u01bf\u01c2\u01c7\u01da\u01e0\u01e8\u01eb"+
- "\u01f0";
+ "-\u0007-\u0002.\u0007.\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001"+
+ "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001"+
+ "h\b\u0001\n\u0001\f\u0001k\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002"+
+ "\u0001\u0002\u0003\u0002q\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003"+
+ "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+
+ "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u0080\b\u0003"+
+ "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005"+
+ "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u008c\b\u0005"+
+ "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005"+
+ "\u0093\b\u0005\n\u0005\f\u0005\u0096\t\u0005\u0001\u0005\u0001\u0005\u0001"+
+ "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u009d\b\u0005\u0001\u0005\u0001"+
+ "\u0005\u0003\u0005\u00a1\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+
+ "\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00a9\b\u0005\n\u0005\f\u0005"+
+ "\u00ac\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006\u00b0\b\u0006\u0001"+
+ "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00b7"+
+ "\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00bc\b\u0006"+
+ "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007"+
+ "\u00c3\b\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00c9\b\b\u0001"+
+ "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0005\b\u00d1\b\b\n\b\f\b\u00d4"+
+ "\t\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00dd"+
+ "\b\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0005\n\u00e5\b\n"+
+ "\n\n\f\n\u00e8\t\n\u0003\n\u00ea\b\n\u0001\n\u0001\n\u0001\u000b\u0001"+
+ "\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0005\f\u00f4\b\f\n\f\f\f\u00f7"+
+ "\t\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0003\r\u00fe\b\r\u0001\u000e"+
+ "\u0001\u000e\u0001\u000e\u0001\u000e\u0005\u000e\u0104\b\u000e\n\u000e"+
+ "\f\u000e\u0107\t\u000e\u0001\u000e\u0003\u000e\u010a\b\u000e\u0001\u000f"+
+ "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0005\u000f\u0111\b\u000f"+
+ "\n\u000f\f\u000f\u0114\t\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001"+
+ "\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0003\u0011\u011d\b\u0011\u0001"+
+ "\u0011\u0001\u0011\u0003\u0011\u0121\b\u0011\u0001\u0012\u0001\u0012\u0001"+
+ "\u0012\u0001\u0012\u0003\u0012\u0127\b\u0012\u0001\u0013\u0001\u0013\u0001"+
+ "\u0013\u0005\u0013\u012c\b\u0013\n\u0013\f\u0013\u012f\t\u0013\u0001\u0014"+
+ "\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0005\u0015\u0136\b\u0015"+
+ "\n\u0015\f\u0015\u0139\t\u0015\u0001\u0016\u0001\u0016\u0001\u0017\u0001"+
+ "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+
+ "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0005"+
+ "\u0017\u014a\b\u0017\n\u0017\f\u0017\u014d\t\u0017\u0001\u0017\u0001\u0017"+
+ "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0005\u0017\u0155\b\u0017"+
+ "\n\u0017\f\u0017\u0158\t\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+
+ "\u0017\u0001\u0017\u0001\u0017\u0005\u0017\u0160\b\u0017\n\u0017\f\u0017"+
+ "\u0163\t\u0017\u0001\u0017\u0001\u0017\u0003\u0017\u0167\b\u0017\u0001"+
+ "\u0018\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001"+
+ "\u0019\u0005\u0019\u0170\b\u0019\n\u0019\f\u0019\u0173\t\u0019\u0001\u001a"+
+ "\u0001\u001a\u0003\u001a\u0177\b\u001a\u0001\u001a\u0001\u001a\u0003\u001a"+
+ "\u017b\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0005\u001b"+
+ "\u0181\b\u001b\n\u001b\f\u001b\u0184\t\u001b\u0001\u001b\u0001\u001b\u0001"+
+ "\u001b\u0001\u001b\u0005\u001b\u018a\b\u001b\n\u001b\f\u001b\u018d\t\u001b"+
+ "\u0003\u001b\u018f\b\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c"+
+ "\u0005\u001c\u0195\b\u001c\n\u001c\f\u001c\u0198\t\u001c\u0001\u001d\u0001"+
+ "\u001d\u0001\u001d\u0001\u001d\u0005\u001d\u019e\b\u001d\n\u001d\f\u001d"+
+ "\u01a1\t\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f"+
+ "\u0001\u001f\u0001\u001f\u0001\u001f\u0003\u001f\u01ab\b\u001f\u0001 "+
+ "\u0001 \u0001 \u0001 \u0001!\u0001!\u0001!\u0001\"\u0001\"\u0001\"\u0005"+
+ "\"\u01b7\b\"\n\"\f\"\u01ba\t\"\u0001#\u0001#\u0001#\u0001#\u0001$\u0001"+
+ "$\u0001%\u0001%\u0003%\u01c4\b%\u0001&\u0003&\u01c7\b&\u0001&\u0001&\u0001"+
+ "\'\u0003\'\u01cc\b\'\u0001\'\u0001\'\u0001(\u0001(\u0001)\u0001)\u0001"+
+ "*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001"+
+ ",\u0003,\u01df\b,\u0001-\u0001-\u0001-\u0001-\u0003-\u01e5\b-\u0001-\u0001"+
+ "-\u0001-\u0001-\u0005-\u01eb\b-\n-\f-\u01ee\t-\u0003-\u01f0\b-\u0001."+
+ "\u0001.\u0001.\u0003.\u01f5\b.\u0001.\u0001.\u0001.\u0000\u0003\u0002"+
+ "\n\u0010/\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016"+
+ "\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\\u0000\b\u0001"+
+ "\u0000<=\u0001\u0000>@\u0001\u0000LM\u0001\u0000CD\u0002\u0000 ##\u0001"+
+ "\u0000&\'\u0002\u0000%%33\u0001\u00006;\u0217\u0000^\u0001\u0000\u0000"+
+ "\u0000\u0002a\u0001\u0000\u0000\u0000\u0004p\u0001\u0000\u0000\u0000\u0006"+
+ "\u007f\u0001\u0000\u0000\u0000\b\u0081\u0001\u0000\u0000\u0000\n\u00a0"+
+ "\u0001\u0000\u0000\u0000\f\u00bb\u0001\u0000\u0000\u0000\u000e\u00c2\u0001"+
+ "\u0000\u0000\u0000\u0010\u00c8\u0001\u0000\u0000\u0000\u0012\u00dc\u0001"+
+ "\u0000\u0000\u0000\u0014\u00de\u0001\u0000\u0000\u0000\u0016\u00ed\u0001"+
+ "\u0000\u0000\u0000\u0018\u00f0\u0001\u0000\u0000\u0000\u001a\u00fd\u0001"+
+ "\u0000\u0000\u0000\u001c\u00ff\u0001\u0000\u0000\u0000\u001e\u010b\u0001"+
+ "\u0000\u0000\u0000 \u0117\u0001\u0000\u0000\u0000\"\u011a\u0001\u0000"+
+ "\u0000\u0000$\u0122\u0001\u0000\u0000\u0000&\u0128\u0001\u0000\u0000\u0000"+
+ "(\u0130\u0001\u0000\u0000\u0000*\u0132\u0001\u0000\u0000\u0000,\u013a"+
+ "\u0001\u0000\u0000\u0000.\u0166\u0001\u0000\u0000\u00000\u0168\u0001\u0000"+
+ "\u0000\u00002\u016b\u0001\u0000\u0000\u00004\u0174\u0001\u0000\u0000\u0000"+
+ "6\u018e\u0001\u0000\u0000\u00008\u0190\u0001\u0000\u0000\u0000:\u0199"+
+ "\u0001\u0000\u0000\u0000<\u01a2\u0001\u0000\u0000\u0000>\u01a6\u0001\u0000"+
+ "\u0000\u0000@\u01ac\u0001\u0000\u0000\u0000B\u01b0\u0001\u0000\u0000\u0000"+
+ "D\u01b3\u0001\u0000\u0000\u0000F\u01bb\u0001\u0000\u0000\u0000H\u01bf"+
+ "\u0001\u0000\u0000\u0000J\u01c3\u0001\u0000\u0000\u0000L\u01c6\u0001\u0000"+
+ "\u0000\u0000N\u01cb\u0001\u0000\u0000\u0000P\u01cf\u0001\u0000\u0000\u0000"+
+ "R\u01d1\u0001\u0000\u0000\u0000T\u01d3\u0001\u0000\u0000\u0000V\u01d6"+
+ "\u0001\u0000\u0000\u0000X\u01de\u0001\u0000\u0000\u0000Z\u01e0\u0001\u0000"+
+ "\u0000\u0000\\\u01f4\u0001\u0000\u0000\u0000^_\u0003\u0002\u0001\u0000"+
+ "_`\u0005\u0000\u0000\u0001`\u0001\u0001\u0000\u0000\u0000ab\u0006\u0001"+
+ "\uffff\uffff\u0000bc\u0003\u0004\u0002\u0000ci\u0001\u0000\u0000\u0000"+
+ "de\n\u0001\u0000\u0000ef\u0005\u001a\u0000\u0000fh\u0003\u0006\u0003\u0000"+
+ "gd\u0001\u0000\u0000\u0000hk\u0001\u0000\u0000\u0000ig\u0001\u0000\u0000"+
+ "\u0000ij\u0001\u0000\u0000\u0000j\u0003\u0001\u0000\u0000\u0000ki\u0001"+
+ "\u0000\u0000\u0000lq\u0003T*\u0000mq\u0003\u001c\u000e\u0000nq\u0003\u0016"+
+ "\u000b\u0000oq\u0003X,\u0000pl\u0001\u0000\u0000\u0000pm\u0001\u0000\u0000"+
+ "\u0000pn\u0001\u0000\u0000\u0000po\u0001\u0000\u0000\u0000q\u0005\u0001"+
+ "\u0000\u0000\u0000r\u0080\u0003 \u0010\u0000s\u0080\u0003$\u0012\u0000"+
+ "t\u0080\u00030\u0018\u0000u\u0080\u00036\u001b\u0000v\u0080\u00032\u0019"+
+ "\u0000w\u0080\u0003\"\u0011\u0000x\u0080\u0003\b\u0004\u0000y\u0080\u0003"+
+ "8\u001c\u0000z\u0080\u0003:\u001d\u0000{\u0080\u0003>\u001f\u0000|\u0080"+
+ "\u0003@ \u0000}\u0080\u0003Z-\u0000~\u0080\u0003B!\u0000\u007fr\u0001"+
+ "\u0000\u0000\u0000\u007fs\u0001\u0000\u0000\u0000\u007ft\u0001\u0000\u0000"+
+ "\u0000\u007fu\u0001\u0000\u0000\u0000\u007fv\u0001\u0000\u0000\u0000\u007f"+
+ "w\u0001\u0000\u0000\u0000\u007fx\u0001\u0000\u0000\u0000\u007fy\u0001"+
+ "\u0000\u0000\u0000\u007fz\u0001\u0000\u0000\u0000\u007f{\u0001\u0000\u0000"+
+ "\u0000\u007f|\u0001\u0000\u0000\u0000\u007f}\u0001\u0000\u0000\u0000\u007f"+
+ "~\u0001\u0000\u0000\u0000\u0080\u0007\u0001\u0000\u0000\u0000\u0081\u0082"+
+ "\u0005\u0012\u0000\u0000\u0082\u0083\u0003\n\u0005\u0000\u0083\t\u0001"+
+ "\u0000\u0000\u0000\u0084\u0085\u0006\u0005\uffff\uffff\u0000\u0085\u0086"+
+ "\u0005,\u0000\u0000\u0086\u00a1\u0003\n\u0005\u0007\u0087\u00a1\u0003"+
+ "\u000e\u0007\u0000\u0088\u00a1\u0003\f\u0006\u0000\u0089\u008b\u0003\u000e"+
+ "\u0007\u0000\u008a\u008c\u0005,\u0000\u0000\u008b\u008a\u0001\u0000\u0000"+
+ "\u0000\u008b\u008c\u0001\u0000\u0000\u0000\u008c\u008d\u0001\u0000\u0000"+
+ "\u0000\u008d\u008e\u0005)\u0000\u0000\u008e\u008f\u0005(\u0000\u0000\u008f"+
+ "\u0094\u0003\u000e\u0007\u0000\u0090\u0091\u0005\"\u0000\u0000\u0091\u0093"+
+ "\u0003\u000e\u0007\u0000\u0092\u0090\u0001\u0000\u0000\u0000\u0093\u0096"+
+ "\u0001\u0000\u0000\u0000\u0094\u0092\u0001\u0000\u0000\u0000\u0094\u0095"+
+ "\u0001\u0000\u0000\u0000\u0095\u0097\u0001\u0000\u0000\u0000\u0096\u0094"+
+ "\u0001\u0000\u0000\u0000\u0097\u0098\u00052\u0000\u0000\u0098\u00a1\u0001"+
+ "\u0000\u0000\u0000\u0099\u009a\u0003\u000e\u0007\u0000\u009a\u009c\u0005"+
+ "*\u0000\u0000\u009b\u009d\u0005,\u0000\u0000\u009c\u009b\u0001\u0000\u0000"+
+ "\u0000\u009c\u009d\u0001\u0000\u0000\u0000\u009d\u009e\u0001\u0000\u0000"+
+ "\u0000\u009e\u009f\u0005-\u0000\u0000\u009f\u00a1\u0001\u0000\u0000\u0000"+
+ "\u00a0\u0084\u0001\u0000\u0000\u0000\u00a0\u0087\u0001\u0000\u0000\u0000"+
+ "\u00a0\u0088\u0001\u0000\u0000\u0000\u00a0\u0089\u0001\u0000\u0000\u0000"+
+ "\u00a0\u0099\u0001\u0000\u0000\u0000\u00a1\u00aa\u0001\u0000\u0000\u0000"+
+ "\u00a2\u00a3\n\u0004\u0000\u0000\u00a3\u00a4\u0005\u001f\u0000\u0000\u00a4"+
+ "\u00a9\u0003\n\u0005\u0005\u00a5\u00a6\n\u0003\u0000\u0000\u00a6\u00a7"+
+ "\u0005/\u0000\u0000\u00a7\u00a9\u0003\n\u0005\u0004\u00a8\u00a2\u0001"+
+ "\u0000\u0000\u0000\u00a8\u00a5\u0001\u0000\u0000\u0000\u00a9\u00ac\u0001"+
+ "\u0000\u0000\u0000\u00aa\u00a8\u0001\u0000\u0000\u0000\u00aa\u00ab\u0001"+
+ "\u0000\u0000\u0000\u00ab\u000b\u0001\u0000\u0000\u0000\u00ac\u00aa\u0001"+
+ "\u0000\u0000\u0000\u00ad\u00af\u0003\u000e\u0007\u0000\u00ae\u00b0\u0005"+
+ ",\u0000\u0000\u00af\u00ae\u0001\u0000\u0000\u0000\u00af\u00b0\u0001\u0000"+
+ "\u0000\u0000\u00b0\u00b1\u0001\u0000\u0000\u0000\u00b1\u00b2\u0005+\u0000"+
+ "\u0000\u00b2\u00b3\u0003P(\u0000\u00b3\u00bc\u0001\u0000\u0000\u0000\u00b4"+
+ "\u00b6\u0003\u000e\u0007\u0000\u00b5\u00b7\u0005,\u0000\u0000\u00b6\u00b5"+
+ "\u0001\u0000\u0000\u0000\u00b6\u00b7\u0001\u0000\u0000\u0000\u00b7\u00b8"+
+ "\u0001\u0000\u0000\u0000\u00b8\u00b9\u00051\u0000\u0000\u00b9\u00ba\u0003"+
+ "P(\u0000\u00ba\u00bc\u0001\u0000\u0000\u0000\u00bb\u00ad\u0001\u0000\u0000"+
+ "\u0000\u00bb\u00b4\u0001\u0000\u0000\u0000\u00bc\r\u0001\u0000\u0000\u0000"+
+ "\u00bd\u00c3\u0003\u0010\b\u0000\u00be\u00bf\u0003\u0010\b\u0000\u00bf"+
+ "\u00c0\u0003R)\u0000\u00c0\u00c1\u0003\u0010\b\u0000\u00c1\u00c3\u0001"+
+ "\u0000\u0000\u0000\u00c2\u00bd\u0001\u0000\u0000\u0000\u00c2\u00be\u0001"+
+ "\u0000\u0000\u0000\u00c3\u000f\u0001\u0000\u0000\u0000\u00c4\u00c5\u0006"+
+ "\b\uffff\uffff\u0000\u00c5\u00c9\u0003\u0012\t\u0000\u00c6\u00c7\u0007"+
+ "\u0000\u0000\u0000\u00c7\u00c9\u0003\u0010\b\u0003\u00c8\u00c4\u0001\u0000"+
+ "\u0000\u0000\u00c8\u00c6\u0001\u0000\u0000\u0000\u00c9\u00d2\u0001\u0000"+
+ "\u0000\u0000\u00ca\u00cb\n\u0002\u0000\u0000\u00cb\u00cc\u0007\u0001\u0000"+
+ "\u0000\u00cc\u00d1\u0003\u0010\b\u0003\u00cd\u00ce\n\u0001\u0000\u0000"+
+ "\u00ce\u00cf\u0007\u0000\u0000\u0000\u00cf\u00d1\u0003\u0010\b\u0002\u00d0"+
+ "\u00ca\u0001\u0000\u0000\u0000\u00d0\u00cd\u0001\u0000\u0000\u0000\u00d1"+
+ "\u00d4\u0001\u0000\u0000\u0000\u00d2\u00d0\u0001\u0000\u0000\u0000\u00d2"+
+ "\u00d3\u0001\u0000\u0000\u0000\u00d3\u0011\u0001\u0000\u0000\u0000\u00d4"+
+ "\u00d2\u0001\u0000\u0000\u0000\u00d5\u00dd\u0003.\u0017\u0000\u00d6\u00dd"+
+ "\u0003*\u0015\u0000\u00d7\u00dd\u0003\u0014\n\u0000\u00d8\u00d9\u0005"+
+ "(\u0000\u0000\u00d9\u00da\u0003\n\u0005\u0000\u00da\u00db\u00052\u0000"+
+ "\u0000\u00db\u00dd\u0001\u0000\u0000\u0000\u00dc\u00d5\u0001\u0000\u0000"+
+ "\u0000\u00dc\u00d6\u0001\u0000\u0000\u0000\u00dc\u00d7\u0001\u0000\u0000"+
+ "\u0000\u00dc\u00d8\u0001\u0000\u0000\u0000\u00dd\u0013\u0001\u0000\u0000"+
+ "\u0000\u00de\u00df\u0003,\u0016\u0000\u00df\u00e9\u0005(\u0000\u0000\u00e0"+
+ "\u00ea\u0005>\u0000\u0000\u00e1\u00e6\u0003\n\u0005\u0000\u00e2\u00e3"+
+ "\u0005\"\u0000\u0000\u00e3\u00e5\u0003\n\u0005\u0000\u00e4\u00e2\u0001"+
+ "\u0000\u0000\u0000\u00e5\u00e8\u0001\u0000\u0000\u0000\u00e6\u00e4\u0001"+
+ "\u0000\u0000\u0000\u00e6\u00e7\u0001\u0000\u0000\u0000\u00e7\u00ea\u0001"+
+ "\u0000\u0000\u0000\u00e8\u00e6\u0001\u0000\u0000\u0000\u00e9\u00e0\u0001"+
+ "\u0000\u0000\u0000\u00e9\u00e1\u0001\u0000\u0000\u0000\u00e9\u00ea\u0001"+
+ "\u0000\u0000\u0000\u00ea\u00eb\u0001\u0000\u0000\u0000\u00eb\u00ec\u0005"+
+ "2\u0000\u0000\u00ec\u0015\u0001\u0000\u0000\u0000\u00ed\u00ee\u0005\u000e"+
+ "\u0000\u0000\u00ee\u00ef\u0003\u0018\f\u0000\u00ef\u0017\u0001\u0000\u0000"+
+ "\u0000\u00f0\u00f5\u0003\u001a\r\u0000\u00f1\u00f2\u0005\"\u0000\u0000"+
+ "\u00f2\u00f4\u0003\u001a\r\u0000\u00f3\u00f1\u0001\u0000\u0000\u0000\u00f4"+
+ "\u00f7\u0001\u0000\u0000\u0000\u00f5\u00f3\u0001\u0000\u0000\u0000\u00f5"+
+ "\u00f6\u0001\u0000\u0000\u0000\u00f6\u0019\u0001\u0000\u0000\u0000\u00f7"+
+ "\u00f5\u0001\u0000\u0000\u0000\u00f8\u00fe\u0003\n\u0005\u0000\u00f9\u00fa"+
+ "\u0003*\u0015\u0000\u00fa\u00fb\u0005!\u0000\u0000\u00fb\u00fc\u0003\n"+
+ "\u0005\u0000\u00fc\u00fe\u0001\u0000\u0000\u0000\u00fd\u00f8\u0001\u0000"+
+ "\u0000\u0000\u00fd\u00f9\u0001\u0000\u0000\u0000\u00fe\u001b\u0001\u0000"+
+ "\u0000\u0000\u00ff\u0100\u0005\u0006\u0000\u0000\u0100\u0105\u0003(\u0014"+
+ "\u0000\u0101\u0102\u0005\"\u0000\u0000\u0102\u0104\u0003(\u0014\u0000"+
+ "\u0103\u0101\u0001\u0000\u0000\u0000\u0104\u0107\u0001\u0000\u0000\u0000"+
+ "\u0105\u0103\u0001\u0000\u0000\u0000\u0105\u0106\u0001\u0000\u0000\u0000"+
+ "\u0106\u0109\u0001\u0000\u0000\u0000\u0107\u0105\u0001\u0000\u0000\u0000"+
+ "\u0108\u010a\u0003\u001e\u000f\u0000\u0109\u0108\u0001\u0000\u0000\u0000"+
+ "\u0109\u010a\u0001\u0000\u0000\u0000\u010a\u001d\u0001\u0000\u0000\u0000"+
+ "\u010b\u010c\u0005A\u0000\u0000\u010c\u010d\u0005I\u0000\u0000\u010d\u0112"+
+ "\u0003(\u0014\u0000\u010e\u010f\u0005\"\u0000\u0000\u010f\u0111\u0003"+
+ "(\u0014\u0000\u0110\u010e\u0001\u0000\u0000\u0000\u0111\u0114\u0001\u0000"+
+ "\u0000\u0000\u0112\u0110\u0001\u0000\u0000\u0000\u0112\u0113\u0001\u0000"+
+ "\u0000\u0000\u0113\u0115\u0001\u0000\u0000\u0000\u0114\u0112\u0001\u0000"+
+ "\u0000\u0000\u0115\u0116\u0005B\u0000\u0000\u0116\u001f\u0001\u0000\u0000"+
+ "\u0000\u0117\u0118\u0005\u0004\u0000\u0000\u0118\u0119\u0003\u0018\f\u0000"+
+ "\u0119!\u0001\u0000\u0000\u0000\u011a\u011c\u0005\u0011\u0000\u0000\u011b"+
+ "\u011d\u0003\u0018\f\u0000\u011c\u011b\u0001\u0000\u0000\u0000\u011c\u011d"+
+ "\u0001\u0000\u0000\u0000\u011d\u0120\u0001\u0000\u0000\u0000\u011e\u011f"+
+ "\u0005\u001e\u0000\u0000\u011f\u0121\u0003&\u0013\u0000\u0120\u011e\u0001"+
+ "\u0000\u0000\u0000\u0120\u0121\u0001\u0000\u0000\u0000\u0121#\u0001\u0000"+
+ "\u0000\u0000\u0122\u0123\u0005\b\u0000\u0000\u0123\u0126\u0003\u0018\f"+
+ "\u0000\u0124\u0125\u0005\u001e\u0000\u0000\u0125\u0127\u0003&\u0013\u0000"+
+ "\u0126\u0124\u0001\u0000\u0000\u0000\u0126\u0127\u0001\u0000\u0000\u0000"+
+ "\u0127%\u0001\u0000\u0000\u0000\u0128\u012d\u0003*\u0015\u0000\u0129\u012a"+
+ "\u0005\"\u0000\u0000\u012a\u012c\u0003*\u0015\u0000\u012b\u0129\u0001"+
+ "\u0000\u0000\u0000\u012c\u012f\u0001\u0000\u0000\u0000\u012d\u012b\u0001"+
+ "\u0000\u0000\u0000\u012d\u012e\u0001\u0000\u0000\u0000\u012e\'\u0001\u0000"+
+ "\u0000\u0000\u012f\u012d\u0001\u0000\u0000\u0000\u0130\u0131\u0007\u0002"+
+ "\u0000\u0000\u0131)\u0001\u0000\u0000\u0000\u0132\u0137\u0003,\u0016\u0000"+
+ "\u0133\u0134\u0005$\u0000\u0000\u0134\u0136\u0003,\u0016\u0000\u0135\u0133"+
+ "\u0001\u0000\u0000\u0000\u0136\u0139\u0001\u0000\u0000\u0000\u0137\u0135"+
+ "\u0001\u0000\u0000\u0000\u0137\u0138\u0001\u0000\u0000\u0000\u0138+\u0001"+
+ "\u0000\u0000\u0000\u0139\u0137\u0001\u0000\u0000\u0000\u013a\u013b\u0007"+
+ "\u0003\u0000\u0000\u013b-\u0001\u0000\u0000\u0000\u013c\u0167\u0005-\u0000"+
+ "\u0000\u013d\u013e\u0003N\'\u0000\u013e\u013f\u0005C\u0000\u0000\u013f"+
+ "\u0167\u0001\u0000\u0000\u0000\u0140\u0167\u0003L&\u0000\u0141\u0167\u0003"+
+ "N\'\u0000\u0142\u0167\u0003H$\u0000\u0143\u0167\u00050\u0000\u0000\u0144"+
+ "\u0167\u0003P(\u0000\u0145\u0146\u0005A\u0000\u0000\u0146\u014b\u0003"+
+ "J%\u0000\u0147\u0148\u0005\"\u0000\u0000\u0148\u014a\u0003J%\u0000\u0149"+
+ "\u0147\u0001\u0000\u0000\u0000\u014a\u014d\u0001\u0000\u0000\u0000\u014b"+
+ "\u0149\u0001\u0000\u0000\u0000\u014b\u014c\u0001\u0000\u0000\u0000\u014c"+
+ "\u014e\u0001\u0000\u0000\u0000\u014d\u014b\u0001\u0000\u0000\u0000\u014e"+
+ "\u014f\u0005B\u0000\u0000\u014f\u0167\u0001\u0000\u0000\u0000\u0150\u0151"+
+ "\u0005A\u0000\u0000\u0151\u0156\u0003H$\u0000\u0152\u0153\u0005\"\u0000"+
+ "\u0000\u0153\u0155\u0003H$\u0000\u0154\u0152\u0001\u0000\u0000\u0000\u0155"+
+ "\u0158\u0001\u0000\u0000\u0000\u0156\u0154\u0001\u0000\u0000\u0000\u0156"+
+ "\u0157\u0001\u0000\u0000\u0000\u0157\u0159\u0001\u0000\u0000\u0000\u0158"+
+ "\u0156\u0001\u0000\u0000\u0000\u0159\u015a\u0005B\u0000\u0000\u015a\u0167"+
+ "\u0001\u0000\u0000\u0000\u015b\u015c\u0005A\u0000\u0000\u015c\u0161\u0003"+
+ "P(\u0000\u015d\u015e\u0005\"\u0000\u0000\u015e\u0160\u0003P(\u0000\u015f"+
+ "\u015d\u0001\u0000\u0000\u0000\u0160\u0163\u0001\u0000\u0000\u0000\u0161"+
+ "\u015f\u0001\u0000\u0000\u0000\u0161\u0162\u0001\u0000\u0000\u0000\u0162"+
+ "\u0164\u0001\u0000\u0000\u0000\u0163\u0161\u0001\u0000\u0000\u0000\u0164"+
+ "\u0165\u0005B\u0000\u0000\u0165\u0167\u0001\u0000\u0000\u0000\u0166\u013c"+
+ "\u0001\u0000\u0000\u0000\u0166\u013d\u0001\u0000\u0000\u0000\u0166\u0140"+
+ "\u0001\u0000\u0000\u0000\u0166\u0141\u0001\u0000\u0000\u0000\u0166\u0142"+
+ "\u0001\u0000\u0000\u0000\u0166\u0143\u0001\u0000\u0000\u0000\u0166\u0144"+
+ "\u0001\u0000\u0000\u0000\u0166\u0145\u0001\u0000\u0000\u0000\u0166\u0150"+
+ "\u0001\u0000\u0000\u0000\u0166\u015b\u0001\u0000\u0000\u0000\u0167/\u0001"+
+ "\u0000\u0000\u0000\u0168\u0169\u0005\n\u0000\u0000\u0169\u016a\u0005\u001c"+
+ "\u0000\u0000\u016a1\u0001\u0000\u0000\u0000\u016b\u016c\u0005\u0010\u0000"+
+ "\u0000\u016c\u0171\u00034\u001a\u0000\u016d\u016e\u0005\"\u0000\u0000"+
+ "\u016e\u0170\u00034\u001a\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u0170"+
+ "\u0173\u0001\u0000\u0000\u0000\u0171\u016f\u0001\u0000\u0000\u0000\u0171"+
+ "\u0172\u0001\u0000\u0000\u0000\u01723\u0001\u0000\u0000\u0000\u0173\u0171"+
+ "\u0001\u0000\u0000\u0000\u0174\u0176\u0003\n\u0005\u0000\u0175\u0177\u0007"+
+ "\u0004\u0000\u0000\u0176\u0175\u0001\u0000\u0000\u0000\u0176\u0177\u0001"+
+ "\u0000\u0000\u0000\u0177\u017a\u0001\u0000\u0000\u0000\u0178\u0179\u0005"+
+ ".\u0000\u0000\u0179\u017b\u0007\u0005\u0000\u0000\u017a\u0178\u0001\u0000"+
+ "\u0000\u0000\u017a\u017b\u0001\u0000\u0000\u0000\u017b5\u0001\u0000\u0000"+
+ "\u0000\u017c\u017d\u0005\t\u0000\u0000\u017d\u0182\u0003(\u0014\u0000"+
+ "\u017e\u017f\u0005\"\u0000\u0000\u017f\u0181\u0003(\u0014\u0000\u0180"+
+ "\u017e\u0001\u0000\u0000\u0000\u0181\u0184\u0001\u0000\u0000\u0000\u0182"+
+ "\u0180\u0001\u0000\u0000\u0000\u0182\u0183\u0001\u0000\u0000\u0000\u0183"+
+ "\u018f\u0001\u0000\u0000\u0000\u0184\u0182\u0001\u0000\u0000\u0000\u0185"+
+ "\u0186\u0005\f\u0000\u0000\u0186\u018b\u0003(\u0014\u0000\u0187\u0188"+
+ "\u0005\"\u0000\u0000\u0188\u018a\u0003(\u0014\u0000\u0189\u0187\u0001"+
+ "\u0000\u0000\u0000\u018a\u018d\u0001\u0000\u0000\u0000\u018b\u0189\u0001"+
+ "\u0000\u0000\u0000\u018b\u018c\u0001\u0000\u0000\u0000\u018c\u018f\u0001"+
+ "\u0000\u0000\u0000\u018d\u018b\u0001\u0000\u0000\u0000\u018e\u017c\u0001"+
+ "\u0000\u0000\u0000\u018e\u0185\u0001\u0000\u0000\u0000\u018f7\u0001\u0000"+
+ "\u0000\u0000\u0190\u0191\u0005\u0002\u0000\u0000\u0191\u0196\u0003(\u0014"+
+ "\u0000\u0192\u0193\u0005\"\u0000\u0000\u0193\u0195\u0003(\u0014\u0000"+
+ "\u0194\u0192\u0001\u0000\u0000\u0000\u0195\u0198\u0001\u0000\u0000\u0000"+
+ "\u0196\u0194\u0001\u0000\u0000\u0000\u0196\u0197\u0001\u0000\u0000\u0000"+
+ "\u01979\u0001\u0000\u0000\u0000\u0198\u0196\u0001\u0000\u0000\u0000\u0199"+
+ "\u019a\u0005\r\u0000\u0000\u019a\u019f\u0003<\u001e\u0000\u019b\u019c"+
+ "\u0005\"\u0000\u0000\u019c\u019e\u0003<\u001e\u0000\u019d\u019b\u0001"+
+ "\u0000\u0000\u0000\u019e\u01a1\u0001\u0000\u0000\u0000\u019f\u019d\u0001"+
+ "\u0000\u0000\u0000\u019f\u01a0\u0001\u0000\u0000\u0000\u01a0;\u0001\u0000"+
+ "\u0000\u0000\u01a1\u019f\u0001\u0000\u0000\u0000\u01a2\u01a3\u0003(\u0014"+
+ "\u0000\u01a3\u01a4\u0005H\u0000\u0000\u01a4\u01a5\u0003(\u0014\u0000\u01a5"+
+ "=\u0001\u0000\u0000\u0000\u01a6\u01a7\u0005\u0001\u0000\u0000\u01a7\u01a8"+
+ "\u0003\u0012\t\u0000\u01a8\u01aa\u0003P(\u0000\u01a9\u01ab\u0003D\"\u0000"+
+ "\u01aa\u01a9\u0001\u0000\u0000\u0000\u01aa\u01ab\u0001\u0000\u0000\u0000"+
+ "\u01ab?\u0001\u0000\u0000\u0000\u01ac\u01ad\u0005\u0007\u0000\u0000\u01ad"+
+ "\u01ae\u0003\u0012\t\u0000\u01ae\u01af\u0003P(\u0000\u01afA\u0001\u0000"+
+ "\u0000\u0000\u01b0\u01b1\u0005\u000b\u0000\u0000\u01b1\u01b2\u0003(\u0014"+
+ "\u0000\u01b2C\u0001\u0000\u0000\u0000\u01b3\u01b8\u0003F#\u0000\u01b4"+
+ "\u01b5\u0005\"\u0000\u0000\u01b5\u01b7\u0003F#\u0000\u01b6\u01b4\u0001"+
+ "\u0000\u0000\u0000\u01b7\u01ba\u0001\u0000\u0000\u0000\u01b8\u01b6\u0001"+
+ "\u0000\u0000\u0000\u01b8\u01b9\u0001\u0000\u0000\u0000\u01b9E\u0001\u0000"+
+ "\u0000\u0000\u01ba\u01b8\u0001\u0000\u0000\u0000\u01bb\u01bc\u0003,\u0016"+
+ "\u0000\u01bc\u01bd\u0005!\u0000\u0000\u01bd\u01be\u0003.\u0017\u0000\u01be"+
+ "G\u0001\u0000\u0000\u0000\u01bf\u01c0\u0007\u0006\u0000\u0000\u01c0I\u0001"+
+ "\u0000\u0000\u0000\u01c1\u01c4\u0003L&\u0000\u01c2\u01c4\u0003N\'\u0000"+
+ "\u01c3\u01c1\u0001\u0000\u0000\u0000\u01c3\u01c2\u0001\u0000\u0000\u0000"+
+ "\u01c4K\u0001\u0000\u0000\u0000\u01c5\u01c7\u0007\u0000\u0000\u0000\u01c6"+
+ "\u01c5\u0001\u0000\u0000\u0000\u01c6\u01c7\u0001\u0000\u0000\u0000\u01c7"+
+ "\u01c8\u0001\u0000\u0000\u0000\u01c8\u01c9\u0005\u001d\u0000\u0000\u01c9"+
+ "M\u0001\u0000\u0000\u0000\u01ca\u01cc\u0007\u0000\u0000\u0000\u01cb\u01ca"+
+ "\u0001\u0000\u0000\u0000\u01cb\u01cc\u0001\u0000\u0000\u0000\u01cc\u01cd"+
+ "\u0001\u0000\u0000\u0000\u01cd\u01ce\u0005\u001c\u0000\u0000\u01ceO\u0001"+
+ "\u0000\u0000\u0000\u01cf\u01d0\u0005\u001b\u0000\u0000\u01d0Q\u0001\u0000"+
+ "\u0000\u0000\u01d1\u01d2\u0007\u0007\u0000\u0000\u01d2S\u0001\u0000\u0000"+
+ "\u0000\u01d3\u01d4\u0005\u0005\u0000\u0000\u01d4\u01d5\u0003V+\u0000\u01d5"+
+ "U\u0001\u0000\u0000\u0000\u01d6\u01d7\u0005A\u0000\u0000\u01d7\u01d8\u0003"+
+ "\u0002\u0001\u0000\u01d8\u01d9\u0005B\u0000\u0000\u01d9W\u0001\u0000\u0000"+
+ "\u0000\u01da\u01db\u0005\u000f\u0000\u0000\u01db\u01df\u00054\u0000\u0000"+
+ "\u01dc\u01dd\u0005\u000f\u0000\u0000\u01dd\u01df\u00055\u0000\u0000\u01de"+
+ "\u01da\u0001\u0000\u0000\u0000\u01de\u01dc\u0001\u0000\u0000\u0000\u01df"+
+ "Y\u0001\u0000\u0000\u0000\u01e0\u01e1\u0005\u0003\u0000\u0000\u01e1\u01e4"+
+ "\u0003(\u0014\u0000\u01e2\u01e3\u0005J\u0000\u0000\u01e3\u01e5\u0003("+
+ "\u0014\u0000\u01e4\u01e2\u0001\u0000\u0000\u0000\u01e4\u01e5\u0001\u0000"+
+ "\u0000\u0000\u01e5\u01ef\u0001\u0000\u0000\u0000\u01e6\u01e7\u0005K\u0000"+
+ "\u0000\u01e7\u01ec\u0003\\.\u0000\u01e8\u01e9\u0005\"\u0000\u0000\u01e9"+
+ "\u01eb\u0003\\.\u0000\u01ea\u01e8\u0001\u0000\u0000\u0000\u01eb\u01ee"+
+ "\u0001\u0000\u0000\u0000\u01ec\u01ea\u0001\u0000\u0000\u0000\u01ec\u01ed"+
+ "\u0001\u0000\u0000\u0000\u01ed\u01f0\u0001\u0000\u0000\u0000\u01ee\u01ec"+
+ "\u0001\u0000\u0000\u0000\u01ef\u01e6\u0001\u0000\u0000\u0000\u01ef\u01f0"+
+ "\u0001\u0000\u0000\u0000\u01f0[\u0001\u0000\u0000\u0000\u01f1\u01f2\u0003"+
+ "(\u0014\u0000\u01f2\u01f3\u0005!\u0000\u0000\u01f3\u01f5\u0001\u0000\u0000"+
+ "\u0000\u01f4\u01f1\u0001\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000"+
+ "\u0000\u01f5\u01f6\u0001\u0000\u0000\u0000\u01f6\u01f7\u0003(\u0014\u0000"+
+ "\u01f7]\u0001\u0000\u0000\u00003ip\u007f\u008b\u0094\u009c\u00a0\u00a8"+
+ "\u00aa\u00af\u00b6\u00bb\u00c2\u00c8\u00d0\u00d2\u00dc\u00e6\u00e9\u00f5"+
+ "\u00fd\u0105\u0109\u0112\u011c\u0120\u0126\u012d\u0137\u014b\u0156\u0161"+
+ "\u0166\u0171\u0176\u017a\u0182\u018b\u018e\u0196\u019f\u01aa\u01b8\u01c3"+
+ "\u01c6\u01cb\u01de\u01e4\u01ec\u01ef\u01f4";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java
index ceef1b4e681a7..3137eff0b6550 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java
@@ -252,6 +252,18 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener {
* The default implementation does nothing.
*/
@Override public void exitDereference(EsqlBaseParser.DereferenceContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterFunction(EsqlBaseParser.FunctionContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitFunction(EsqlBaseParser.FunctionContext ctx) { }
/**
* {@inheritDoc}
*
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java
index 48f5b33fcfec1..d7b2f359e3c83 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java
@@ -152,6 +152,13 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im
* {@link #visitChildren} on {@code ctx}.
*/
@Override public T visitDereference(EsqlBaseParser.DereferenceContext ctx) { return visitChildren(ctx); }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation returns the result of calling
+ * {@link #visitChildren} on {@code ctx}.
+ */
+ @Override public T visitFunction(EsqlBaseParser.FunctionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java
index 04f0d6da3dbe4..dd6cdaacddbef 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java
@@ -237,6 +237,18 @@ public interface EsqlBaseParserListener extends ParseTreeListener {
* @param ctx the parse tree
*/
void exitDereference(EsqlBaseParser.DereferenceContext ctx);
+ /**
+ * Enter a parse tree produced by the {@code function}
+ * labeled alternative in {@link EsqlBaseParser#primaryExpression}.
+ * @param ctx the parse tree
+ */
+ void enterFunction(EsqlBaseParser.FunctionContext ctx);
+ /**
+ * Exit a parse tree produced by the {@code function}
+ * labeled alternative in {@link EsqlBaseParser#primaryExpression}.
+ * @param ctx the parse tree
+ */
+ void exitFunction(EsqlBaseParser.FunctionContext ctx);
/**
* Enter a parse tree produced by the {@code parenthesizedExpression}
* labeled alternative in {@link EsqlBaseParser#primaryExpression}.
@@ -250,14 +262,12 @@ public interface EsqlBaseParserListener extends ParseTreeListener {
*/
void exitParenthesizedExpression(EsqlBaseParser.ParenthesizedExpressionContext ctx);
/**
- * Enter a parse tree produced by the {@code functionExpression}
- * labeled alternative in {@link EsqlBaseParser#primaryExpression}.
+ * Enter a parse tree produced by {@link EsqlBaseParser#functionExpression}.
* @param ctx the parse tree
*/
void enterFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx);
/**
- * Exit a parse tree produced by the {@code functionExpression}
- * labeled alternative in {@link EsqlBaseParser#primaryExpression}.
+ * Exit a parse tree produced by {@link EsqlBaseParser#functionExpression}.
* @param ctx the parse tree
*/
void exitFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx);
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java
index 681de2590d575..35297f3d4f336 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java
@@ -145,6 +145,13 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor {
* @return the visitor result
*/
T visitDereference(EsqlBaseParser.DereferenceContext ctx);
+ /**
+ * Visit a parse tree produced by the {@code function}
+ * labeled alternative in {@link EsqlBaseParser#primaryExpression}.
+ * @param ctx the parse tree
+ * @return the visitor result
+ */
+ T visitFunction(EsqlBaseParser.FunctionContext ctx);
/**
* Visit a parse tree produced by the {@code parenthesizedExpression}
* labeled alternative in {@link EsqlBaseParser#primaryExpression}.
@@ -153,8 +160,7 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor {
*/
T visitParenthesizedExpression(EsqlBaseParser.ParenthesizedExpressionContext ctx);
/**
- * Visit a parse tree produced by the {@code functionExpression}
- * labeled alternative in {@link EsqlBaseParser#primaryExpression}.
+ * Visit a parse tree produced by {@link EsqlBaseParser#functionExpression}.
* @param ctx the parse tree
* @return the visitor result
*/
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java
index aa653d36d141b..a7c8d6dd49cc7 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java
@@ -20,6 +20,7 @@
import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.RLike;
import org.elasticsearch.xpack.esql.evaluator.predicate.operator.regex.WildcardLike;
import org.elasticsearch.xpack.esql.expression.Order;
+import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry;
import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add;
import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div;
import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod;
@@ -62,6 +63,7 @@
import java.util.function.BiFunction;
import static java.util.Collections.emptyList;
+import static java.util.Collections.singletonList;
import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.DATE_PERIOD;
import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.TIME_DURATION;
import static org.elasticsearch.xpack.ql.parser.ParserUtils.source;
@@ -312,12 +314,15 @@ public UnresolvedAttribute visitDereference(EsqlBaseParser.DereferenceContext ct
@Override
public Expression visitFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx) {
- return new UnresolvedFunction(
- source(ctx),
- visitIdentifier(ctx.identifier()),
- FunctionResolutionStrategy.DEFAULT,
- ctx.booleanExpression().stream().map(this::expression).toList()
- );
+ String name = visitIdentifier(ctx.identifier());
+ List args = expressions(ctx.booleanExpression());
+ if ("count".equals(EsqlFunctionRegistry.normalizeName(name))) {
+ // to simplify the registration, handle in the parser the special count cases
+ if (args.isEmpty() || ctx.ASTERISK() != null) {
+ args = singletonList(new Literal(source(ctx), "*", DataTypes.KEYWORD));
+ }
+ }
+ return new UnresolvedFunction(source(ctx), name, FunctionResolutionStrategy.DEFAULT, args);
}
@Override
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsStatsQueryExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsStatsQueryExec.java
new file mode 100644
index 0000000000000..8e65e66e3045f
--- /dev/null
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsStatsQueryExec.java
@@ -0,0 +1,128 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.plan.physical;
+
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.xpack.ql.expression.Attribute;
+import org.elasticsearch.xpack.ql.expression.Expression;
+import org.elasticsearch.xpack.ql.index.EsIndex;
+import org.elasticsearch.xpack.ql.tree.NodeInfo;
+import org.elasticsearch.xpack.ql.tree.NodeUtils;
+import org.elasticsearch.xpack.ql.tree.Source;
+
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * Specialized query class for retrieving statistics about the underlying data and not the actual documents.
+ * For that see {@link EsQueryExec}
+ */
+public class EsStatsQueryExec extends LeafExec implements EstimatesRowSize {
+
+ public enum StatsType {
+ COUNT,
+ MIN,
+ MAX,
+ EXISTS;
+ }
+
+ public record Stat(String name, StatsType type) {};
+
+ private final EsIndex index;
+ private final QueryBuilder query;
+ private final Expression limit;
+ private final List attrs;
+ private final List stats;
+
+ public EsStatsQueryExec(
+ Source source,
+ EsIndex index,
+ QueryBuilder query,
+ Expression limit,
+ List attributes,
+ List stats
+ ) {
+ super(source);
+ this.index = index;
+ this.query = query;
+ this.limit = limit;
+ this.attrs = attributes;
+ this.stats = stats;
+ }
+
+ @Override
+ protected NodeInfo info() {
+ return NodeInfo.create(this, EsStatsQueryExec::new, index, query, limit, attrs, stats);
+ }
+
+ public EsIndex index() {
+ return index;
+ }
+
+ public QueryBuilder query() {
+ return query;
+ }
+
+ @Override
+ public List output() {
+ return attrs;
+ }
+
+ public Expression limit() {
+ return limit;
+ }
+
+ @Override
+ // TODO - get the estimation outside the plan so it doesn't touch the plan
+ public PhysicalPlan estimateRowSize(State state) {
+ int size;
+ state.add(false, attrs);
+ size = state.consumeAllFields(false);
+ return this;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(index, query, limit, attrs, stats);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+
+ EsStatsQueryExec other = (EsStatsQueryExec) obj;
+ return Objects.equals(index, other.index)
+ && Objects.equals(attrs, other.attrs)
+ && Objects.equals(query, other.query)
+ && Objects.equals(limit, other.limit)
+ && Objects.equals(stats, other.stats);
+ }
+
+ @Override
+ public String nodeString() {
+ return nodeName()
+ + "["
+ + index
+ + "], stats"
+ + stats
+ + "], query["
+ + (query != null ? Strings.toString(query, false, true) : "")
+ + "]"
+ + NodeUtils.limitedToString(attrs)
+ + ", limit["
+ + (limit != null ? limit.toString() : "")
+ + "], ";
+ }
+}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java
index 0e984b3b85b0b..113e4b91232ae 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java
@@ -18,6 +18,7 @@
import org.elasticsearch.compute.operator.HashAggregationOperator.HashAggregationOperatorFactory;
import org.elasticsearch.compute.operator.Operator;
import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException;
+import org.elasticsearch.xpack.esql.expression.function.aggregate.Count;
import org.elasticsearch.xpack.esql.plan.physical.AggregateExec;
import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.LocalExecutionPlannerContext;
import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.PhysicalOperation;
@@ -35,7 +36,9 @@
import java.util.Set;
import java.util.function.Consumer;
-abstract class AbstractPhysicalOperationProviders implements PhysicalOperationProviders {
+import static java.util.Collections.emptyList;
+
+public abstract class AbstractPhysicalOperationProviders implements PhysicalOperationProviders {
private final AggregateMapper aggregateMapper = new AggregateMapper();
@@ -235,7 +238,30 @@ private void aggregatesToFactory(
if (mode == AggregateExec.Mode.PARTIAL) {
aggMode = AggregatorMode.INITIAL;
// TODO: this needs to be made more reliable - use casting to blow up when dealing with expressions (e+1)
- sourceAttr = List.of(Expressions.attribute(aggregateFunction.field()));
+ Expression field = aggregateFunction.field();
+ // Only count can now support literals - all the other aggs should be optimized away
+ if (field.foldable()) {
+ if (aggregateFunction instanceof Count count) {
+ sourceAttr = emptyList();
+ } else {
+ throw new EsqlIllegalArgumentException(
+ "Does not support yet aggregations over constants - [{}]",
+ aggregateFunction.sourceText()
+ );
+ }
+ } else {
+ Attribute attr = Expressions.attribute(field);
+ // cannot determine attribute
+ if (attr == null) {
+ throw new EsqlIllegalArgumentException(
+ "Cannot work with target field [{}] for agg [{}]",
+ field.sourceText(),
+ aggregateFunction.sourceText()
+ );
+ }
+ sourceAttr = List.of(attr);
+ }
+
} else if (mode == AggregateExec.Mode.FINAL) {
aggMode = AggregatorMode.FINAL;
if (grouping) {
@@ -253,7 +279,9 @@ private void aggregatesToFactory(
}
List inputChannels = sourceAttr.stream().map(attr -> layout.get(attr.id()).channel()).toList();
- assert inputChannels != null && inputChannels.size() > 0 && inputChannels.stream().allMatch(i -> i >= 0);
+ if (inputChannels.size() > 0) {
+ assert inputChannels.size() > 0 && inputChannels.stream().allMatch(i -> i >= 0);
+ }
if (aggregateFunction instanceof ToAggregator agg) {
consumer.accept(new AggFunctionSupplierContext(agg.supplier(bigArrays, inputChannels), aggMode));
} else {
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java
index ac62c45d4d1f3..ce5e277deaad8 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java
@@ -20,6 +20,8 @@
import org.elasticsearch.compute.operator.Operator;
import org.elasticsearch.compute.operator.OrdinalsGroupingOperator;
import org.elasticsearch.index.mapper.NestedLookup;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.index.search.NestedHelper;
import org.elasticsearch.logging.LogManager;
@@ -54,6 +56,10 @@ public EsPhysicalOperationProviders(List searchContexts) {
this.searchContexts = searchContexts;
}
+ public List searchContexts() {
+ return searchContexts;
+ }
+
@Override
public final PhysicalOperation fieldExtractPhysicalOperation(FieldExtractExec fieldExtractExec, PhysicalOperation source) {
Layout.Builder layout = source.layout.builder();
@@ -85,12 +91,12 @@ public final PhysicalOperation fieldExtractPhysicalOperation(FieldExtractExec fi
return op;
}
- @Override
- public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, LocalExecutionPlannerContext context) {
- final LuceneOperator.Factory luceneFactory;
- Function querySupplier = searchContext -> {
+ public static Function querySupplier(QueryBuilder queryBuilder) {
+ final QueryBuilder qb = queryBuilder == null ? QueryBuilders.matchAllQuery() : queryBuilder;
+
+ return searchContext -> {
SearchExecutionContext ctx = searchContext.getSearchExecutionContext();
- Query query = ctx.toQuery(esQueryExec.query()).query();
+ Query query = ctx.toQuery(qb).query();
NestedLookup nestedLookup = ctx.nestedLookup();
if (nestedLookup != NestedLookup.EMPTY) {
NestedHelper nestedHelper = new NestedHelper(nestedLookup, ctx::isFieldMapped);
@@ -110,6 +116,12 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec,
}
return query;
};
+ }
+
+ @Override
+ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, LocalExecutionPlannerContext context) {
+ Function querySupplier = querySupplier(esQueryExec.query());
+ final LuceneOperator.Factory luceneFactory;
List sorts = esQueryExec.sorts();
List> fieldSorts = null;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java
index 18fad8cecb014..156b93e1551c4 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java
@@ -7,6 +7,7 @@
package org.elasticsearch.xpack.esql.planner;
+import org.apache.lucene.search.Query;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.compute.Describable;
@@ -15,6 +16,8 @@
import org.elasticsearch.compute.data.ElementType;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.lucene.DataPartitioning;
+import org.elasticsearch.compute.lucene.LuceneCountOperator;
+import org.elasticsearch.compute.lucene.LuceneOperator;
import org.elasticsearch.compute.operator.ColumnExtractOperator;
import org.elasticsearch.compute.operator.Driver;
import org.elasticsearch.compute.operator.DriverContext;
@@ -43,7 +46,7 @@
import org.elasticsearch.compute.operator.topn.TopNOperator.TopNOperatorFactory;
import org.elasticsearch.core.Releasables;
import org.elasticsearch.core.TimeValue;
-import org.elasticsearch.index.query.MatchAllQueryBuilder;
+import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.tasks.CancellableTask;
import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException;
import org.elasticsearch.xpack.esql.enrich.EnrichLookupOperator;
@@ -54,6 +57,7 @@
import org.elasticsearch.xpack.esql.plan.physical.DissectExec;
import org.elasticsearch.xpack.esql.plan.physical.EnrichExec;
import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec;
+import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec;
import org.elasticsearch.xpack.esql.plan.physical.EvalExec;
import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec;
import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec;
@@ -96,6 +100,7 @@
import java.util.stream.Stream;
import static java.util.stream.Collectors.joining;
+import static org.elasticsearch.compute.lucene.LuceneOperator.NO_LIMIT;
import static org.elasticsearch.compute.operator.LimitOperator.Factory;
import static org.elasticsearch.compute.operator.ProjectOperator.ProjectOperatorFactory;
@@ -196,6 +201,8 @@ private PhysicalOperation plan(PhysicalPlan node, LocalExecutionPlannerContext c
// source nodes
else if (node instanceof EsQueryExec esQuery) {
return planEsQueryNode(esQuery, context);
+ } else if (node instanceof EsStatsQueryExec statsQuery) {
+ return planEsStats(statsQuery, context);
} else if (node instanceof RowExec row) {
return planRow(row, context);
} else if (node instanceof LocalSourceExec localSource) {
@@ -224,19 +231,33 @@ private PhysicalOperation planAggregation(AggregateExec aggregate, LocalExecutio
return physicalOperationProviders.groupingPhysicalOperation(aggregate, source, context);
}
- private PhysicalOperation planEsQueryNode(EsQueryExec esQuery, LocalExecutionPlannerContext context) {
- if (esQuery.query() == null) {
- esQuery = new EsQueryExec(
- esQuery.source(),
- esQuery.index(),
- esQuery.output(),
- new MatchAllQueryBuilder(),
- esQuery.limit(),
- esQuery.sorts(),
- esQuery.estimatedRowSize()
- );
+ private PhysicalOperation planEsQueryNode(EsQueryExec esQueryExec, LocalExecutionPlannerContext context) {
+ return physicalOperationProviders.sourcePhysicalOperation(esQueryExec, context);
+ }
+
+ private PhysicalOperation planEsStats(EsStatsQueryExec statsQuery, LocalExecutionPlannerContext context) {
+ if (physicalOperationProviders instanceof EsPhysicalOperationProviders == false) {
+ throw new EsqlIllegalArgumentException("EsStatsQuery should only occur against a Lucene backend");
}
- return physicalOperationProviders.sourcePhysicalOperation(esQuery, context);
+ EsPhysicalOperationProviders esProvider = (EsPhysicalOperationProviders) physicalOperationProviders;
+
+ Function querySupplier = EsPhysicalOperationProviders.querySupplier(statsQuery.query());
+
+ Expression limitExp = statsQuery.limit();
+ int limit = limitExp != null ? (Integer) limitExp.fold() : NO_LIMIT;
+ final LuceneOperator.Factory luceneFactory = new LuceneCountOperator.Factory(
+ esProvider.searchContexts(),
+ querySupplier,
+ context.dataPartitioning(),
+ context.taskConcurrency(),
+ limit
+ );
+
+ Layout.Builder layout = new Layout.Builder();
+ layout.append(statsQuery.outputSet());
+ int instanceCount = Math.max(1, luceneFactory.taskConcurrency());
+ context.driverParallelism(new DriverParallelism(DriverParallelism.Type.DATA_PARALLELISM, instanceCount));
+ return PhysicalOperation.fromSource(luceneFactory, layout.build());
}
private PhysicalOperation planFieldExtractNode(LocalExecutionPlannerContext context, FieldExtractExec fieldExtractExec) {
@@ -318,11 +339,11 @@ private PhysicalOperation planExchange(ExchangeExec exchangeExec, LocalExecution
private PhysicalOperation planExchangeSink(ExchangeSinkExec exchangeSink, LocalExecutionPlannerContext context) {
Objects.requireNonNull(exchangeSinkHandler, "ExchangeSinkHandler wasn't provided");
- PhysicalOperation source = plan(exchangeSink.child(), context);
+ var child = exchangeSink.child();
+ PhysicalOperation source = plan(child, context);
- Function transformer = exchangeSink.child() instanceof AggregateExec
- ? Function.identity()
- : alignPageToAttributes(exchangeSink.output(), source.layout);
+ boolean isAgg = child instanceof AggregateExec || child instanceof EsStatsQueryExec;
+ Function transformer = isAgg ? Function.identity() : alignPageToAttributes(exchangeSink.output(), source.layout);
return source.withSink(new ExchangeSinkOperatorFactory(exchangeSinkHandler::createExchangeSink, transformer), source.layout);
}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlBlockFactoryParams.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlBlockFactoryParams.java
deleted file mode 100644
index 1ca1d5e217f6a..0000000000000
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlBlockFactoryParams.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
- */
-
-package org.elasticsearch.xpack.esql.plugin;
-
-import org.elasticsearch.common.breaker.CircuitBreaker;
-import org.elasticsearch.common.breaker.NoopCircuitBreaker;
-import org.elasticsearch.common.util.BigArrays;
-import org.elasticsearch.compute.data.BlockFactoryParameters;
-
-/** A provider for sharing the given parameters with the compute engine's block factory. */
-public class EsqlBlockFactoryParams implements BlockFactoryParameters {
-
- static final CircuitBreaker NOOP_BREAKER = new NoopCircuitBreaker("ESQL-noop-breaker");
-
- static CircuitBreaker ESQL_BREAKER;
- static BigArrays ESQL_BIGARRAYS;
-
- static void init(BigArrays bigArrays) {
- ESQL_BREAKER = bigArrays.breakerService().getBreaker("request");
- ESQL_BIGARRAYS = bigArrays;
- }
-
- final CircuitBreaker breaker;
- final BigArrays bigArrays;
-
- public EsqlBlockFactoryParams() {
- this.breaker = ESQL_BREAKER;
- this.bigArrays = ESQL_BIGARRAYS;
- }
-
- @Override
- public CircuitBreaker breaker() {
- return breaker != null ? breaker : NOOP_BREAKER;
- }
-
- @Override
- public BigArrays bigArrays() {
- return bigArrays != null ? bigArrays : BigArrays.NON_RECYCLING_INSTANCE;
- }
-}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java
index 1ff00401029cf..b9ab0f7646b96 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java
@@ -12,6 +12,7 @@
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.service.ClusterService;
+import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
@@ -37,6 +38,7 @@
import java.time.ZoneOffset;
import java.util.List;
import java.util.Locale;
+import java.util.Objects;
import java.util.concurrent.Executor;
public class TransportEsqlQueryAction extends HandledTransportAction {
@@ -69,8 +71,7 @@ public TransportEsqlQueryAction(
this.requestExecutor = threadPool.executor(EsqlPlugin.ESQL_THREAD_POOL_NAME);
exchangeService.registerTransportHandler(transportService);
this.exchangeService = exchangeService;
- EsqlBlockFactoryParams.init(bigArrays);
- var blockFactory = BlockFactory.getGlobalInstance();
+ var blockFactory = createBlockFactory(bigArrays);
this.enrichPolicyResolver = new EnrichPolicyResolver(clusterService, transportService, planExecutor.indexResolver());
this.enrichLookupService = new EnrichLookupService(clusterService, searchService, transportService, bigArrays, blockFactory);
this.computeService = new ComputeService(
@@ -85,6 +86,12 @@ public TransportEsqlQueryAction(
this.settings = settings;
}
+ static BlockFactory createBlockFactory(BigArrays bigArrays) {
+ CircuitBreaker circuitBreaker = bigArrays.breakerService().getBreaker("request");
+ Objects.requireNonNull(circuitBreaker, "request circuit breaker wasn't set");
+ return new BlockFactory(circuitBreaker, bigArrays);
+ }
+
@Override
protected void doExecute(Task task, EsqlQueryRequest request, ActionListener listener) {
// workaround for https://github.com/elastic/elasticsearch/issues/97916 - TODO remove this when we can
diff --git a/x-pack/plugin/esql/src/main/resources/META-INF/services/org.elasticsearch.compute.data.BlockFactoryParameters b/x-pack/plugin/esql/src/main/resources/META-INF/services/org.elasticsearch.compute.data.BlockFactoryParameters
deleted file mode 100644
index e397954c84cbe..0000000000000
--- a/x-pack/plugin/esql/src/main/resources/META-INF/services/org.elasticsearch.compute.data.BlockFactoryParameters
+++ /dev/null
@@ -1,8 +0,0 @@
-#
-# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
-# or more contributor license agreements. Licensed under the Elastic License
-# 2.0; you may not use this file except in compliance with the Elastic License
-# 2.0.
-#
-
-org.elasticsearch.xpack.esql.plugin.EsqlBlockFactoryParams
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java
index 8a5b021addae5..a3b63e7e34c37 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java
@@ -327,7 +327,7 @@ private ActualResults executePlan() throws Exception {
sessionId,
new CancellableTask(1, "transport", "esql", null, TaskId.EMPTY_TASK_ID, Map.of()),
bigArrays,
- BlockFactory.getGlobalInstance(),
+ BlockFactory.getNonBreakingInstance(),
configuration,
exchangeSource,
exchangeSink,
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java
index 995eed164b144..6633e5ae3c0fe 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java
@@ -637,8 +637,9 @@ private static void writeToTempDir(String subdir, String str, String extension)
*/
protected DriverContext driverContext() {
MockBigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1));
- breakers.add(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST));
- return new DriverContext(bigArrays.withCircuitBreaking(), BlockFactory.getGlobalInstance());
+ CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST);
+ breakers.add(breaker);
+ return new DriverContext(bigArrays.withCircuitBreaking(), new BlockFactory(breaker, bigArrays));
}
@After
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java
index 0bb43539dba72..e20ba72b82e5c 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java
@@ -14,12 +14,14 @@
import org.elasticsearch.core.Tuple;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.RangeQueryBuilder;
import org.elasticsearch.index.query.RegexpQueryBuilder;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.query.TermsQueryBuilder;
import org.elasticsearch.index.query.WildcardQueryBuilder;
import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.xpack.core.enrich.EnrichPolicy;
import org.elasticsearch.xpack.esql.EsqlTestUtils;
import org.elasticsearch.xpack.esql.analysis.Analyzer;
@@ -42,6 +44,7 @@
import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec;
import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec.FieldSort;
import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec;
+import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec;
import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize;
import org.elasticsearch.xpack.esql.plan.physical.EvalExec;
import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec;
@@ -54,6 +57,7 @@
import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan;
import org.elasticsearch.xpack.esql.plan.physical.ProjectExec;
import org.elasticsearch.xpack.esql.plan.physical.TopNExec;
+import org.elasticsearch.xpack.esql.planner.FilterTests;
import org.elasticsearch.xpack.esql.planner.Mapper;
import org.elasticsearch.xpack.esql.planner.PhysicalVerificationException;
import org.elasticsearch.xpack.esql.planner.PlannerUtils;
@@ -91,6 +95,7 @@
import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping;
import static org.elasticsearch.xpack.esql.EsqlTestUtils.statsForMissingField;
import static org.elasticsearch.xpack.esql.SerializationTestUtils.assertSerialization;
+import static org.elasticsearch.xpack.esql.plan.physical.AggregateExec.Mode.FINAL;
import static org.elasticsearch.xpack.ql.expression.Expressions.name;
import static org.elasticsearch.xpack.ql.expression.Expressions.names;
import static org.elasticsearch.xpack.ql.expression.Order.OrderDirection.ASC;
@@ -103,7 +108,7 @@
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
-//@TestLogging(value = "org.elasticsearch.xpack.esql.optimizer.LocalLogicalPlanOptimizer:TRACE", reason = "debug")
+@TestLogging(value = "org.elasticsearch.xpack.esql.optimizer.LocalLogicalPlanOptimizer:TRACE", reason = "debug")
public class PhysicalPlanOptimizerTests extends ESTestCase {
private static final String PARAM_FORMATTING = "%1$s";
@@ -1844,7 +1849,7 @@ public void testAvgSurrogateFunctionAfterRenameAndLimit() {
assertThat(limit.limit(), instanceOf(Literal.class));
assertThat(limit.limit().fold(), equalTo(10000));
var aggFinal = as(limit.child(), AggregateExec.class);
- assertThat(aggFinal.getMode(), equalTo(AggregateExec.Mode.FINAL));
+ assertThat(aggFinal.getMode(), equalTo(FINAL));
var aggPartial = as(aggFinal.child(), AggregateExec.class);
assertThat(aggPartial.getMode(), equalTo(AggregateExec.Mode.PARTIAL));
limit = as(aggPartial.child(), LimitExec.class);
@@ -1861,6 +1866,86 @@ public void testAvgSurrogateFunctionAfterRenameAndLimit() {
assertThat(source.limit().fold(), equalTo(10));
}
+ // optimized doesn't know yet how to push down count over field
+ public void testCountOneFieldWithFilter() {
+ var plan = optimizedPlan(physicalPlan("""
+ from test
+ | where salary > 1000
+ | stats c = count(salary)
+ """));
+ assertThat(plan.anyMatch(EsQueryExec.class::isInstance), is(true));
+ }
+
+ // optimized doesn't know yet how to push down count over field
+ public void testCountOneFieldWithFilterAndLimit() {
+ var plan = optimizedPlan(physicalPlan("""
+ from test
+ | where salary > 1000
+ | limit 10
+ | stats c = count(salary)
+ """));
+ assertThat(plan.anyMatch(EsQueryExec.class::isInstance), is(true));
+ }
+
+ // optimized doesn't know yet how to break down different multi count
+ public void testCountMultipleFieldsWithFilter() {
+ var plan = optimizedPlan(physicalPlan("""
+ from test
+ | where salary > 1000 and emp_no > 10010
+ | stats cs = count(salary), ce = count(emp_no)
+ """));
+ assertThat(plan.anyMatch(EsQueryExec.class::isInstance), is(true));
+ }
+
+ public void testCountAllWithFilter() {
+ var plan = optimizedPlan(physicalPlan("""
+ from test
+ | where emp_no > 10010
+ | stats c = count()
+ """));
+
+ var limit = as(plan, LimitExec.class);
+ var agg = as(limit.child(), AggregateExec.class);
+ assertThat(agg.getMode(), is(FINAL));
+ assertThat(Expressions.names(agg.aggregates()), contains("c"));
+ var exchange = as(agg.child(), ExchangeExec.class);
+ var esStatsQuery = as(exchange.child(), EsStatsQueryExec.class);
+ assertThat(esStatsQuery.limit(), is(nullValue()));
+ assertThat(Expressions.names(esStatsQuery.output()), contains("count", "seen"));
+ var expected = wrapWithSingleQuery(QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no");
+ assertThat(expected.toString(), is(esStatsQuery.query().toString()));
+ }
+
+ @AwaitsFix(bugUrl = "intermediateAgg does proper reduction but the agg itself does not - the optimizer needs to improve")
+ public void testMultiCountAllWithFilter() {
+ var plan = optimizedPlan(physicalPlan("""
+ from test
+ | where emp_no > 10010
+ | stats c = count(), call = count(*), c_literal = count(1)
+ """));
+
+ var limit = as(plan, LimitExec.class);
+ var agg = as(limit.child(), AggregateExec.class);
+ assertThat(agg.getMode(), is(FINAL));
+ assertThat(Expressions.names(agg.aggregates()), contains("c", "call", "c_literal"));
+ var exchange = as(agg.child(), ExchangeExec.class);
+ var esStatsQuery = as(exchange.child(), EsStatsQueryExec.class);
+ assertThat(esStatsQuery.limit(), is(nullValue()));
+ assertThat(Expressions.names(esStatsQuery.output()), contains("count", "seen"));
+ var expected = wrapWithSingleQuery(QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no");
+ assertThat(expected.toString(), is(esStatsQuery.query().toString()));
+ }
+
+ // optimized doesn't know yet how to break down different multi count
+ public void testCountFieldsAndAllWithFilter() {
+ var plan = optimizedPlan(physicalPlan("""
+ from test
+ | where emp_no > 10010
+ | stats c = count(), cs = count(salary), ce = count(emp_no)
+ """));
+ assertThat(plan.anyMatch(EsQueryExec.class::isInstance), is(true));
+ }
+
private static EsQueryExec source(PhysicalPlan plan) {
if (plan instanceof ExchangeExec exchange) {
plan = exchange.child();
@@ -1915,4 +2000,8 @@ private QueryBuilder sv(QueryBuilder builder, String fieldName) {
assertThat(sv.field(), equalTo(fieldName));
return sv.next();
}
+
+ private QueryBuilder wrapWithSingleQuery(QueryBuilder inner, String fieldName) {
+ return FilterTests.singleValueQuery(inner, fieldName);
+ }
}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java
index f0d4f0534caee..904e2c2ce7e8f 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java
@@ -159,7 +159,7 @@ private static FieldAttribute field(String name, DataType type) {
static DriverContext driverContext() {
return new DriverContext(
new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(),
- BlockFactory.getGlobalInstance()
+ BlockFactory.getNonBreakingInstance()
);
}
}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java
index 95ef6e7baf70c..f66aa9f47cb8d 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java
@@ -119,7 +119,7 @@ private LocalExecutionPlanner planner() throws IOException {
"test",
null,
BigArrays.NON_RECYCLING_INSTANCE,
- BlockFactory.getGlobalInstance(),
+ BlockFactory.getNonBreakingInstance(),
config(),
null,
null,
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java
index 937488d2ed546..640dd410d8573 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java
@@ -19,6 +19,8 @@
import org.elasticsearch.xpack.esql.plan.logical.Dissect;
import org.elasticsearch.xpack.esql.plan.logical.Grok;
import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec;
+import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.Stat;
+import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.StatsType;
import org.elasticsearch.xpack.esql.plan.physical.OutputExec;
import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan;
import org.elasticsearch.xpack.esql.type.EsqlDataTypes;
@@ -97,6 +99,10 @@ protected Object pluggableMakeArg(Class extends Node>> toBuildClass, Class
),
IndexResolution.invalid(randomAlphaOfLength(5))
);
+
+ } else if (argClass == Stat.class) {
+ // record field
+ return new Stat(randomRealisticUnicodeOfLength(10), randomFrom(StatsType.values()));
} else if (argClass == Integer.class) {
return randomInt();
}
diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java
index 0cfa3c43b6b5a..2073d2067445b 100644
--- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java
+++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java
@@ -369,7 +369,7 @@ private String loadTemplateSource(String resource, int mappingsVersion) {
@Override
public List> getActions() {
return List.of(
- new ActionHandler<>(GetGlobalCheckpointsAction.INSTANCE, GetGlobalCheckpointsAction.TransportAction.class),
+ new ActionHandler<>(GetGlobalCheckpointsAction.INSTANCE, GetGlobalCheckpointsAction.LocalAction.class),
new ActionHandler<>(GetGlobalCheckpointsShardAction.INSTANCE, GetGlobalCheckpointsShardAction.TransportAction.class),
new ActionHandler<>(GetSecretAction.INSTANCE, TransportGetSecretAction.class),
new ActionHandler<>(PostSecretAction.INSTANCE, TransportPostSecretAction.class),
diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetGlobalCheckpointsAction.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetGlobalCheckpointsAction.java
index b7856daa8d842..1e3794a4cefe4 100644
--- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetGlobalCheckpointsAction.java
+++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/action/GetGlobalCheckpointsAction.java
@@ -18,6 +18,7 @@
import org.elasticsearch.action.UnavailableShardsException;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.IndicesOptions;
+import org.elasticsearch.action.support.TransportAction;
import org.elasticsearch.client.internal.node.NodeClient;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateObserver;
@@ -26,8 +27,8 @@
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.common.util.concurrent.CountDown;
import org.elasticsearch.core.TimeValue;
@@ -54,7 +55,7 @@ public class GetGlobalCheckpointsAction extends ActionType {
+ public static class LocalAction extends TransportAction {
private final ClusterService clusterService;
private final NodeClient client;
@@ -161,7 +163,7 @@ public static class TransportAction extends org.elasticsearch.action.support.Tra
private final ThreadPool threadPool;
@Inject
- public TransportAction(
+ public LocalAction(
final ActionFilters actionFilters,
final TransportService transportService,
final ClusterService clusterService,
diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderIndex.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderIndex.java
index b4826e389393c..ec3973a4b5781 100644
--- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderIndex.java
+++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/saml/sp/SamlServiceProviderIndex.java
@@ -124,7 +124,7 @@ public static final class DocumentSupplier {
public DocumentSupplier(DocumentVersion version, Supplier document) {
this.version = version;
- this.document = new CachedSupplier<>(document);
+ this.document = CachedSupplier.wrap(document);
}
public SamlServiceProviderDocument getDocument() {
diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/MockInferenceServiceIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/MockInferenceServiceIT.java
index 411c29255fd78..4a70522f41848 100644
--- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/MockInferenceServiceIT.java
+++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/MockInferenceServiceIT.java
@@ -7,9 +7,12 @@
package org.elasticsearch.xpack.inference.integration;
+import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.client.internal.Client;
import org.elasticsearch.common.bytes.BytesArray;
-import org.elasticsearch.inference.Model;
+import org.elasticsearch.core.TimeValue;
+import org.elasticsearch.inference.ModelConfigurations;
+import org.elasticsearch.inference.ModelSecrets;
import org.elasticsearch.inference.TaskType;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
@@ -20,20 +23,34 @@
import org.elasticsearch.xpack.inference.action.GetInferenceModelAction;
import org.elasticsearch.xpack.inference.action.InferenceAction;
import org.elasticsearch.xpack.inference.action.PutInferenceModelAction;
+import org.elasticsearch.xpack.inference.registry.ModelRegistry;
+import org.junit.Before;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.List;
import java.util.Map;
+import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
+import static org.elasticsearch.xpack.inference.services.MapParsingUtils.removeFromMapOrThrowIfNull;
+import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.not;
public class MockInferenceServiceIT extends ESIntegTestCase {
+ private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS);
+
+ private ModelRegistry modelRegistry;
+
+ @Before
+ public void createComponents() {
+ modelRegistry = new ModelRegistry(client());
+ }
+
@Override
protected Collection> nodePlugins() {
return List.of(InferencePlugin.class, TestInferenceServicePlugin.class);
@@ -54,15 +71,28 @@ protected Function getClientWrapper() {
public void testMockService() {
String modelId = "test-mock";
- Model putModel = putMockService(modelId, TaskType.SPARSE_EMBEDDING);
- Model readModel = getModel(modelId, TaskType.SPARSE_EMBEDDING);
+ ModelConfigurations putModel = putMockService(modelId, TaskType.SPARSE_EMBEDDING);
+ ModelConfigurations readModel = getModel(modelId, TaskType.SPARSE_EMBEDDING);
assertModelsAreEqual(putModel, readModel);
// The response is randomly generated, the input can be anything
inferOnMockService(modelId, TaskType.SPARSE_EMBEDDING, randomAlphaOfLength(10));
}
- private Model putMockService(String modelId, TaskType taskType) {
+ public void testGetUnparsedModelMap_ForTestServiceModel_ReturnsSecretsPopulated() {
+ String modelId = "test-unparsed";
+ putMockService(modelId, TaskType.SPARSE_EMBEDDING);
+
+ var listener = new PlainActionFuture();
+ modelRegistry.getUnparsedModelMap(modelId, listener);
+
+ var modelConfig = listener.actionGet(TIMEOUT);
+ var secretsMap = removeFromMapOrThrowIfNull(modelConfig.secrets(), ModelSecrets.SECRET_SETTINGS);
+ var secrets = TestInferenceServicePlugin.TestSecretSettings.fromMap(secretsMap);
+ assertThat(secrets.apiKey(), is("abc64"));
+ }
+
+ private ModelConfigurations putMockService(String modelId, TaskType taskType) {
String body = """
{
"service": "test_service",
@@ -88,7 +118,6 @@ private Model putMockService(String modelId, TaskType taskType) {
assertThat(response.getModel().getServiceSettings(), instanceOf(TestInferenceServicePlugin.TestServiceSettings.class));
var serviceSettings = (TestInferenceServicePlugin.TestServiceSettings) response.getModel().getServiceSettings();
assertEquals("my_model", serviceSettings.model());
- assertEquals("abc64", serviceSettings.apiKey());
assertThat(response.getModel().getTaskSettings(), instanceOf(TestInferenceServicePlugin.TestTaskSettings.class));
var taskSettings = (TestInferenceServicePlugin.TestTaskSettings) response.getModel().getTaskSettings();
@@ -97,7 +126,7 @@ private Model putMockService(String modelId, TaskType taskType) {
return response.getModel();
}
- public Model getModel(String modelId, TaskType taskType) {
+ public ModelConfigurations getModel(String modelId, TaskType taskType) {
var response = client().execute(GetInferenceModelAction.INSTANCE, new GetInferenceModelAction.Request(modelId, taskType.toString()))
.actionGet();
return response.getModel();
@@ -115,7 +144,7 @@ private void inferOnMockService(String modelId, TaskType taskType, String input)
}
}
- private void assertModelsAreEqual(Model model1, Model model2) {
+ private void assertModelsAreEqual(ModelConfigurations model1, ModelConfigurations model2) {
// The test can't rely on Model::equals as the specific subclass
// may be different. Model loses information about it's implemented
// subtype when it is streamed across the wire.
diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java
index a400f84e3c2ec..9f079afaa24e5 100644
--- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java
+++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java
@@ -10,6 +10,7 @@
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.inference.Model;
+import org.elasticsearch.inference.ModelConfigurations;
import org.elasticsearch.inference.ServiceSettings;
import org.elasticsearch.inference.TaskSettings;
import org.elasticsearch.inference.TaskType;
@@ -99,13 +100,14 @@ public void testGetModel() throws Exception {
assertThat(exceptionHolder.get(), is(nullValue()));
assertThat(modelHolder.get(), not(nullValue()));
- UnparsedModel unparsedModel = UnparsedModel.unparsedModelFromMap(modelHolder.get().config());
- assertEquals(model.getService(), unparsedModel.service());
+ UnparsedModel unparsedModel = UnparsedModel.unparsedModelFromMap(modelHolder.get().config(), modelHolder.get().secrets());
+ assertEquals(model.getConfigurations().getService(), unparsedModel.service());
ElserMlNodeModel roundTripModel = ElserMlNodeService.parseConfig(
false,
unparsedModel.modelId(),
unparsedModel.taskType(),
- unparsedModel.settings()
+ unparsedModel.settings(),
+ unparsedModel.secrets()
);
assertEquals(model, roundTripModel);
}
@@ -179,17 +181,19 @@ protected void blockingCall(Consumer> function, AtomicRefe
latch.await();
}
- private static ModelWithUnknownField buildModelWithUnknownField(String modelId) {
- return new ModelWithUnknownField(
- modelId,
- TaskType.SPARSE_EMBEDDING,
- ElserMlNodeService.NAME,
- ElserMlNodeServiceSettingsTests.createRandom(),
- ElserMlNodeTaskSettingsTests.createRandom()
+ private static Model buildModelWithUnknownField(String modelId) {
+ return new Model(
+ new ModelWithUnknownField(
+ modelId,
+ TaskType.SPARSE_EMBEDDING,
+ ElserMlNodeService.NAME,
+ ElserMlNodeServiceSettingsTests.createRandom(),
+ ElserMlNodeTaskSettingsTests.createRandom()
+ )
);
}
- private static class ModelWithUnknownField extends Model {
+ private static class ModelWithUnknownField extends ModelConfigurations {
ModelWithUnknownField(
String modelId,
diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java
index b9b0d68054ef5..61837336f291b 100644
--- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java
+++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/TestInferenceServicePlugin.java
@@ -17,6 +17,9 @@
import org.elasticsearch.inference.InferenceResults;
import org.elasticsearch.inference.InferenceService;
import org.elasticsearch.inference.Model;
+import org.elasticsearch.inference.ModelConfigurations;
+import org.elasticsearch.inference.ModelSecrets;
+import org.elasticsearch.inference.SecretSettings;
import org.elasticsearch.inference.ServiceSettings;
import org.elasticsearch.inference.TaskSettings;
import org.elasticsearch.inference.TaskType;
@@ -45,40 +48,25 @@ public List getInferenceServiceFactories() {
public List getInferenceServiceNamedWriteables() {
return List.of(
new NamedWriteableRegistry.Entry(ServiceSettings.class, TestServiceSettings.NAME, TestServiceSettings::new),
- new NamedWriteableRegistry.Entry(TaskSettings.class, TestTaskSettings.NAME, TestTaskSettings::new)
+ new NamedWriteableRegistry.Entry(TaskSettings.class, TestTaskSettings.NAME, TestTaskSettings::new),
+ new NamedWriteableRegistry.Entry(SecretSettings.class, TestSecretSettings.NAME, TestSecretSettings::new)
);
}
- public class TestInferenceService implements InferenceService {
+ public static class TestInferenceService implements InferenceService {
private static final String NAME = "test_service";
- public static TestServiceModel parseConfig(
- boolean throwOnUnknownFields,
- String modelId,
- TaskType taskType,
- Map settings
- ) {
- Map serviceSettingsMap = removeFromMapOrThrowIfNull(settings, Model.SERVICE_SETTINGS);
- var serviceSettings = TestServiceSettings.fromMap(serviceSettingsMap);
-
+ private static Map getTaskSettingsMap(Map settings) {
Map taskSettingsMap;
// task settings are optional
- if (settings.containsKey(Model.TASK_SETTINGS)) {
- taskSettingsMap = removeFromMapOrThrowIfNull(settings, Model.TASK_SETTINGS);
+ if (settings.containsKey(ModelConfigurations.TASK_SETTINGS)) {
+ taskSettingsMap = removeFromMapOrThrowIfNull(settings, ModelConfigurations.TASK_SETTINGS);
} else {
taskSettingsMap = Map.of();
}
- var taskSettings = TestTaskSettings.fromMap(taskSettingsMap);
-
- if (throwOnUnknownFields) {
- throwIfNotEmptyMap(settings, NAME);
- throwIfNotEmptyMap(serviceSettingsMap, NAME);
- throwIfNotEmptyMap(taskSettingsMap, NAME);
- }
-
- return new TestServiceModel(modelId, taskType, NAME, serviceSettings, taskSettings);
+ return taskSettingsMap;
}
public TestInferenceService(InferenceServicePlugin.InferenceServiceFactoryContext context) {
@@ -91,22 +79,47 @@ public String name() {
}
@Override
- public TestServiceModel parseConfigStrict(String modelId, TaskType taskType, Map config) {
- return parseConfig(true, modelId, taskType, config);
+ public TestServiceModel parseRequestConfig(String modelId, TaskType taskType, Map config) {
+ Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS);
+ var serviceSettings = TestServiceSettings.fromMap(serviceSettingsMap);
+ var secretSettings = TestSecretSettings.fromMap(serviceSettingsMap);
+
+ var taskSettingsMap = getTaskSettingsMap(config);
+ var taskSettings = TestTaskSettings.fromMap(taskSettingsMap);
+
+ throwIfNotEmptyMap(config, NAME);
+ throwIfNotEmptyMap(serviceSettingsMap, NAME);
+ throwIfNotEmptyMap(taskSettingsMap, NAME);
+
+ return new TestServiceModel(modelId, taskType, NAME, serviceSettings, taskSettings, secretSettings);
}
@Override
- public TestServiceModel parseConfigLenient(String modelId, TaskType taskType, Map config) {
- return parseConfig(false, modelId, taskType, config);
+ public TestServiceModel parsePersistedConfig(
+ String modelId,
+ TaskType taskType,
+ Map config,
+ Map secrets
+ ) {
+ Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS);
+ Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS);
+
+ var serviceSettings = TestServiceSettings.fromMap(serviceSettingsMap);
+ var secretSettings = TestSecretSettings.fromMap(secretSettingsMap);
+
+ var taskSettingsMap = getTaskSettingsMap(config);
+ var taskSettings = TestTaskSettings.fromMap(taskSettingsMap);
+
+ return new TestServiceModel(modelId, taskType, NAME, serviceSettings, taskSettings, secretSettings);
}
@Override
public void infer(Model model, String input, Map taskSettings, ActionListener listener) {
- switch (model.getTaskType()) {
+ switch (model.getConfigurations().getTaskType()) {
case SPARSE_EMBEDDING -> listener.onResponse(TextExpansionResultsTests.createRandomResults(1, 10));
default -> listener.onFailure(
new ElasticsearchStatusException(
- TaskType.unsupportedTaskTypeErrorMsg(model.getTaskType(), NAME),
+ TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), NAME),
RestStatus.BAD_REQUEST
)
);
@@ -127,9 +140,10 @@ public TestServiceModel(
TaskType taskType,
String service,
TestServiceSettings serviceSettings,
- TestTaskSettings taskSettings
+ TestTaskSettings taskSettings,
+ TestSecretSettings secretSettings
) {
- super(modelId, taskType, service, serviceSettings, taskSettings);
+ super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secretSettings));
}
@Override
@@ -141,9 +155,14 @@ public TestServiceSettings getServiceSettings() {
public TestTaskSettings getTaskSettings() {
return (TestTaskSettings) super.getTaskSettings();
}
+
+ @Override
+ public TestSecretSettings getSecretSettings() {
+ return (TestSecretSettings) super.getSecretSettings();
+ }
}
- public record TestServiceSettings(String model, String apiKey) implements ServiceSettings {
+ public record TestServiceSettings(String model) implements ServiceSettings {
private static final String NAME = "test_service_settings";
@@ -151,31 +170,28 @@ public static TestServiceSettings fromMap(Map map) {
ValidationException validationException = new ValidationException();
String model = MapParsingUtils.removeAsType(map, "model", String.class);
- String apiKey = MapParsingUtils.removeAsType(map, "api_key", String.class);
if (model == null) {
- validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg("model", Model.SERVICE_SETTINGS));
- }
- if (apiKey == null) {
- validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg("api_key", Model.SERVICE_SETTINGS));
+ validationException.addValidationError(
+ MapParsingUtils.missingSettingErrorMsg("model", ModelConfigurations.SERVICE_SETTINGS)
+ );
}
if (validationException.validationErrors().isEmpty() == false) {
throw validationException;
}
- return new TestServiceSettings(model, apiKey);
+ return new TestServiceSettings(model);
}
public TestServiceSettings(StreamInput in) throws IOException {
- this(in.readString(), in.readString());
+ this(in.readString());
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("model", model);
- builder.field("api_key", apiKey);
builder.endObject();
return builder;
}
@@ -193,7 +209,6 @@ public TransportVersion getMinimalSupportedVersion() {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(model);
- out.writeString(apiKey);
}
}
@@ -235,4 +250,52 @@ public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests
}
}
+
+ public record TestSecretSettings(String apiKey) implements SecretSettings {
+
+ private static final String NAME = "test_secret_settings";
+
+ public static TestSecretSettings fromMap(Map map) {
+ ValidationException validationException = new ValidationException();
+
+ String apiKey = MapParsingUtils.removeAsType(map, "api_key", String.class);
+
+ if (apiKey == null) {
+ validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg("api_key", ModelSecrets.SECRET_SETTINGS));
+ }
+
+ if (validationException.validationErrors().isEmpty() == false) {
+ throw validationException;
+ }
+
+ return new TestSecretSettings(apiKey);
+ }
+
+ public TestSecretSettings(StreamInput in) throws IOException {
+ this(in.readString());
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeString(apiKey);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field("api_key", apiKey);
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public String getWriteableName() {
+ return NAME;
+ }
+
+ @Override
+ public TransportVersion getMinimalSupportedVersion() {
+ return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests
+ }
+ }
}
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java
index ba0f1b142a799..f84b841066c01 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java
@@ -130,6 +130,17 @@ public Collection getSystemIndexDescriptors(Settings sett
.setSettings(InferenceIndex.settings())
.setVersionMetaKey("version")
.setOrigin(ClientHelper.INFERENCE_ORIGIN)
+ .build(),
+ SystemIndexDescriptor.builder()
+ .setType(SystemIndexDescriptor.Type.INTERNAL_MANAGED)
+ .setIndexPattern(InferenceSecretsIndex.INDEX_PATTERN)
+ .setPrimaryIndex(InferenceSecretsIndex.INDEX_NAME)
+ .setDescription("Contains inference service secrets")
+ .setMappings(InferenceSecretsIndex.mappings())
+ .setSettings(InferenceSecretsIndex.settings())
+ .setVersionMetaKey("version")
+ .setOrigin(ClientHelper.INFERENCE_ORIGIN)
+ .setNetNew()
.build()
);
}
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceSecretsIndex.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceSecretsIndex.java
new file mode 100644
index 0000000000000..a29682620ed95
--- /dev/null
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceSecretsIndex.java
@@ -0,0 +1,82 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.inference;
+
+import org.elasticsearch.Version;
+import org.elasticsearch.cluster.metadata.IndexMetadata;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.indices.SystemIndexDescriptor;
+import org.elasticsearch.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.io.UncheckedIOException;
+
+import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME;
+import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
+
+public class InferenceSecretsIndex {
+
+ private InferenceSecretsIndex() {}
+
+ public static final String INDEX_NAME = ".infer-secrets";
+ public static final String INDEX_PATTERN = INDEX_NAME + "*";
+
+ // Increment this version number when the mappings change
+ private static final int INDEX_MAPPING_VERSION = 1;
+
+ public static Settings settings() {
+ return Settings.builder()
+ .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
+ .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1")
+ .build();
+ }
+
+ /**
+ * Reject any unknown fields being added by setting dynamic mappings to
+ * {@code strict} for the top level object. A document that contains unknown
+ * fields in the document root will be rejected at index time.
+ *
+ * The {@code secrets} object
+ * have dynamic mappings set to {@code false} which means all fields will
+ * be accepted without throwing an error but those fields are not indexed.
+ *
+ * The reason for mixing {@code strict} and {@code false} dynamic settings
+ * is that {@code secrets} is defined by
+ * the inference services and therefore are not known when creating the
+ * index. However, the top level settings are known in advance and can
+ * be strictly mapped.
+ *
+ * If the top level strict mapping changes then the no new documents should
+ * be indexed until the index mappings have been updated, this happens
+ * automatically once all nodes in the cluster are of a compatible version.
+ *
+ * @return The index mappings
+ */
+ public static XContentBuilder mappings() {
+ try {
+ return jsonBuilder().startObject()
+ .startObject(SINGLE_MAPPING_NAME)
+ .startObject("_meta")
+ .field("version", Version.CURRENT)
+ .field(SystemIndexDescriptor.VERSION_META_KEY, INDEX_MAPPING_VERSION)
+ .endObject()
+ .field("dynamic", "strict")
+ .startObject("properties")
+ .startObject("secret_settings")
+ .field("dynamic", "false")
+ .startObject("properties")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject();
+ } catch (IOException e) {
+ throw new UncheckedIOException("Failed to build mappings for index " + INDEX_NAME, e);
+ }
+ }
+}
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java
index b6dd41df174e5..03e0f4d8a4543 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/UnparsedModel.java
@@ -8,21 +8,21 @@
package org.elasticsearch.xpack.inference;
import org.elasticsearch.ElasticsearchStatusException;
-import org.elasticsearch.inference.Model;
+import org.elasticsearch.inference.ModelConfigurations;
import org.elasticsearch.inference.TaskType;
import org.elasticsearch.rest.RestStatus;
import java.util.Map;
-public record UnparsedModel(String modelId, TaskType taskType, String service, Map settings) {
+public record UnparsedModel(String modelId, TaskType taskType, String service, Map settings, Map secrets) {
- public static UnparsedModel unparsedModelFromMap(Map sourceMap) {
- String modelId = removeStringOrThrowIfNull(sourceMap, Model.MODEL_ID);
- String service = removeStringOrThrowIfNull(sourceMap, Model.SERVICE);
- String taskTypeStr = removeStringOrThrowIfNull(sourceMap, TaskType.NAME);
+ public static UnparsedModel unparsedModelFromMap(Map configMap, Map secretsMap) {
+ String modelId = removeStringOrThrowIfNull(configMap, ModelConfigurations.MODEL_ID);
+ String service = removeStringOrThrowIfNull(configMap, ModelConfigurations.SERVICE);
+ String taskTypeStr = removeStringOrThrowIfNull(configMap, TaskType.NAME);
TaskType taskType = TaskType.fromString(taskTypeStr);
- return new UnparsedModel(modelId, taskType, service, sourceMap);
+ return new UnparsedModel(modelId, taskType, service, configMap, secretsMap);
}
private static String removeStringOrThrowIfNull(Map sourceMap, String fieldName) {
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/PutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/PutInferenceModelAction.java
index 6c59fc89fd152..45b9474cebcdc 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/PutInferenceModelAction.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/PutInferenceModelAction.java
@@ -15,7 +15,7 @@
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentHelper;
-import org.elasticsearch.inference.Model;
+import org.elasticsearch.inference.ModelConfigurations;
import org.elasticsearch.inference.TaskType;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;
@@ -104,18 +104,18 @@ public int hashCode() {
public static class Response extends ActionResponse implements ToXContentObject {
- private final Model model;
+ private final ModelConfigurations model;
- public Response(Model model) {
+ public Response(ModelConfigurations model) {
this.model = model;
}
public Response(StreamInput in) throws IOException {
super(in);
- model = new Model(in);
+ model = new ModelConfigurations(in);
}
- public Model getModel() {
+ public ModelConfigurations getModel() {
return model;
}
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java
index 1e208e83985cb..8a8ea81653644 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportGetInferenceModelAction.java
@@ -45,7 +45,7 @@ protected void doExecute(
ActionListener listener
) {
modelRegistry.getUnparsedModelMap(request.getModelId(), ActionListener.wrap(modelConfigMap -> {
- var unparsedModel = UnparsedModel.unparsedModelFromMap(modelConfigMap.config());
+ var unparsedModel = UnparsedModel.unparsedModelFromMap(modelConfigMap.config(), modelConfigMap.secrets());
var service = serviceRegistry.getService(unparsedModel.service());
if (service.isEmpty()) {
listener.onFailure(
@@ -58,8 +58,9 @@ protected void doExecute(
);
return;
}
- var model = service.get().parseConfigLenient(unparsedModel.modelId(), unparsedModel.taskType(), unparsedModel.settings());
- listener.onResponse(new PutInferenceModelAction.Response(model));
+ var model = service.get()
+ .parsePersistedConfig(unparsedModel.modelId(), unparsedModel.taskType(), unparsedModel.settings(), unparsedModel.secrets());
+ listener.onResponse(new PutInferenceModelAction.Response(model.getConfigurations()));
}, listener::onFailure));
}
}
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java
index aab8ed98f4241..386243e43a1a0 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportInferenceAction.java
@@ -42,7 +42,7 @@ public TransportInferenceAction(
protected void doExecute(Task task, InferenceAction.Request request, ActionListener listener) {
ActionListener getModelListener = ActionListener.wrap(modelConfigMap -> {
- var unparsedModel = UnparsedModel.unparsedModelFromMap(modelConfigMap.config());
+ var unparsedModel = UnparsedModel.unparsedModelFromMap(modelConfigMap.config(), modelConfigMap.secrets());
var service = serviceRegistry.getService(unparsedModel.service());
if (service.isEmpty()) {
listener.onFailure(
@@ -68,7 +68,8 @@ protected void doExecute(Task task, InferenceAction.Request request, ActionListe
return;
}
- var model = service.get().parseConfigLenient(unparsedModel.modelId(), unparsedModel.taskType(), unparsedModel.settings());
+ var model = service.get()
+ .parsePersistedConfig(unparsedModel.modelId(), unparsedModel.taskType(), unparsedModel.settings(), unparsedModel.secrets());
inferOnService(model, request, service.get(), listener);
}, listener::onFailure);
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java
index 8ab09bafbd248..b0995e5405b2f 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java
@@ -22,6 +22,7 @@
import org.elasticsearch.inference.InferenceService;
import org.elasticsearch.inference.InferenceServiceRegistry;
import org.elasticsearch.inference.Model;
+import org.elasticsearch.inference.ModelConfigurations;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
@@ -74,7 +75,7 @@ protected void masterOperation(
) throws Exception {
var requestAsMap = requestToMap(request);
- String serviceName = (String) requestAsMap.remove(Model.SERVICE);
+ String serviceName = (String) requestAsMap.remove(ModelConfigurations.SERVICE);
if (serviceName == null) {
listener.onFailure(new ElasticsearchStatusException("Model configuration is missing a service", RestStatus.BAD_REQUEST));
return;
@@ -86,7 +87,7 @@ protected void masterOperation(
return;
}
- var model = service.get().parseConfigStrict(request.getModelId(), request.getTaskType(), requestAsMap);
+ var model = service.get().parseRequestConfig(request.getModelId(), request.getTaskType(), requestAsMap);
// model is valid good to persist then start
this.modelRegistry.storeModel(
model,
@@ -97,7 +98,10 @@ protected void masterOperation(
private static void startModel(InferenceService service, Model model, ActionListener listener) {
service.start(
model,
- ActionListener.wrap(ok -> listener.onResponse(new PutInferenceModelAction.Response(model)), listener::onFailure)
+ ActionListener.wrap(
+ ok -> listener.onResponse(new PutInferenceModelAction.Response(model.getConfigurations())),
+ listener::onFailure
+ )
);
}
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java
index 4403ec53e7a13..aec87ed1765d1 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/registry/ModelRegistry.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.inference.registry;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.ExceptionsHelper;
@@ -14,6 +16,8 @@
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocWriteRequest;
+import org.elasticsearch.action.bulk.BulkItemResponse;
+import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
@@ -27,19 +31,27 @@
import org.elasticsearch.index.reindex.DeleteByQueryRequest;
import org.elasticsearch.inference.Model;
import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.search.SearchHit;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xpack.core.ClientHelper;
import org.elasticsearch.xpack.inference.InferenceIndex;
+import org.elasticsearch.xpack.inference.InferenceSecretsIndex;
import java.io.IOException;
+import java.util.Arrays;
import java.util.Map;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+import static org.elasticsearch.core.Strings.format;
public class ModelRegistry {
- public record ModelConfigMap(Map config) {}
+ public record ModelConfigMap(Map config, Map secrets) {}
+ private static final Logger logger = LogManager.getLogger(ModelRegistry.class);
private final OriginSettingClient client;
public ModelRegistry(Client client) {
@@ -48,46 +60,160 @@ public ModelRegistry(Client client) {
public void getUnparsedModelMap(String modelId, ActionListener listener) {
ActionListener searchListener = ActionListener.wrap(searchResponse -> {
+ // There should be a hit for the configurations and secrets
if (searchResponse.getHits().getHits().length == 0) {
listener.onFailure(new ResourceNotFoundException("Model not found [{}]", modelId));
return;
}
var hits = searchResponse.getHits().getHits();
- assert hits.length == 1;
- listener.onResponse(new ModelConfigMap(hits[0].getSourceAsMap()));
+ listener.onResponse(createModelConfigMap(hits, modelId));
}, listener::onFailure);
QueryBuilder queryBuilder = documentIdQuery(modelId);
- SearchRequest modelSearch = client.prepareSearch(InferenceIndex.INDEX_PATTERN).setQuery(queryBuilder).setSize(1).request();
+ SearchRequest modelSearch = client.prepareSearch(InferenceIndex.INDEX_PATTERN, InferenceSecretsIndex.INDEX_PATTERN)
+ .setQuery(queryBuilder)
+ .setSize(2)
+ .request();
client.search(modelSearch, searchListener);
}
+ private ModelConfigMap createModelConfigMap(SearchHit[] hits, String modelId) {
+ Map mappedHits = Arrays.stream(hits).collect(Collectors.toMap(hit -> {
+ if (hit.getIndex().startsWith(InferenceIndex.INDEX_NAME)) {
+ return InferenceIndex.INDEX_NAME;
+ }
+
+ if (hit.getIndex().startsWith(InferenceSecretsIndex.INDEX_NAME)) {
+ return InferenceSecretsIndex.INDEX_NAME;
+ }
+
+ logger.error(format("Found invalid index for model [%s] at index [%s]", modelId, hit.getIndex()));
+ throw new IllegalArgumentException(
+ format(
+ "Invalid result while loading model [%s] index: [%s]. Try deleting and reinitializing the service",
+ modelId,
+ hit.getIndex()
+ )
+ );
+ }, Function.identity()));
+
+ if (mappedHits.containsKey(InferenceIndex.INDEX_NAME) == false
+ || mappedHits.containsKey(InferenceSecretsIndex.INDEX_NAME) == false
+ || mappedHits.size() > 2) {
+ logger.error(format("Failed to load model [%s], found model parts from index prefixes: [%s]", modelId, mappedHits.keySet()));
+ throw new IllegalStateException(
+ format("Failed to load model, model [%s] is in an invalid state. Try deleting and reinitializing the service", modelId)
+ );
+ }
+
+ return new ModelConfigMap(
+ mappedHits.get(InferenceIndex.INDEX_NAME).getSourceAsMap(),
+ mappedHits.get(InferenceSecretsIndex.INDEX_NAME).getSourceAsMap()
+ );
+ }
+
public void storeModel(Model model, ActionListener listener) {
- IndexRequest request = createIndexRequest(Model.documentId(model.getModelId()), InferenceIndex.INDEX_NAME, model, false);
- request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
+ ActionListener bulkResponseActionListener = getStoreModelListener(model, listener);
+
+ IndexRequest configRequest = createIndexRequest(
+ Model.documentId(model.getConfigurations().getModelId()),
+ InferenceIndex.INDEX_NAME,
+ model.getConfigurations(),
+ false
+ );
+
+ IndexRequest secretsRequest = createIndexRequest(
+ Model.documentId(model.getConfigurations().getModelId()),
+ InferenceSecretsIndex.INDEX_NAME,
+ model.getSecrets(),
+ false
+ );
+
+ client.prepareBulk()
+ .add(configRequest)
+ .add(secretsRequest)
+ .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
+ .execute(bulkResponseActionListener);
+ }
+
+ private static ActionListener getStoreModelListener(Model model, ActionListener listener) {
+ return ActionListener.wrap(bulkItemResponses -> {
+ var modelId = model.getConfigurations().getModelId();
+
+ if (bulkItemResponses.getItems().length == 0) {
+ logger.error(format("Storing model [%s] failed, no items were received from the bulk response", modelId));
- client.index(request, ActionListener.wrap(indexResponse -> listener.onResponse(true), e -> {
- if (ExceptionsHelper.unwrapCause(e) instanceof VersionConflictEngineException) {
- listener.onFailure(new ResourceAlreadyExistsException("Inference model [{}] already exists", model.getModelId()));
- } else {
listener.onFailure(
new ElasticsearchStatusException(
- "Failed to store inference model [{}]",
- RestStatus.INTERNAL_SERVER_ERROR,
- e,
- model.getModelId()
+ format(
+ "Failed to store inference model [%s], invalid bulk response received. Try reinitializing the service",
+ modelId
+ ),
+ RestStatus.INTERNAL_SERVER_ERROR
)
);
+ return;
}
- }));
+
+ BulkItemResponse.Failure failure = getFirstBulkFailure(bulkItemResponses);
+
+ if (failure == null) {
+ listener.onResponse(true);
+ return;
+ }
+
+ logBulkFailures(model.getConfigurations().getModelId(), bulkItemResponses);
+
+ if (ExceptionsHelper.unwrapCause(failure.getCause()) instanceof VersionConflictEngineException) {
+ listener.onFailure(new ResourceAlreadyExistsException("Inference model [{}] already exists", modelId));
+ return;
+ }
+
+ listener.onFailure(
+ new ElasticsearchStatusException(
+ format("Failed to store inference model [%s]", modelId),
+ RestStatus.INTERNAL_SERVER_ERROR,
+ failure.getCause()
+ )
+ );
+ }, e -> {
+ String errorMessage = format("Failed to store inference model [%s]", model.getConfigurations().getModelId());
+ logger.error(errorMessage, e);
+ listener.onFailure(new ElasticsearchStatusException(errorMessage, RestStatus.INTERNAL_SERVER_ERROR, e));
+ });
+ }
+
+ private static void logBulkFailures(String modelId, BulkResponse bulkResponse) {
+ for (BulkItemResponse item : bulkResponse.getItems()) {
+ if (item.isFailed()) {
+ logger.error(
+ format(
+ "Failed to store inference model [%s] index: [%s] bulk failure message [%s]",
+ modelId,
+ item.getIndex(),
+ item.getFailureMessage()
+ )
+ );
+ }
+ }
+ }
+
+ private static BulkItemResponse.Failure getFirstBulkFailure(BulkResponse bulkResponse) {
+ for (BulkItemResponse item : bulkResponse.getItems()) {
+ if (item.isFailed()) {
+ return item.getFailure();
+ }
+ }
+
+ return null;
}
public void deleteModel(String modelId, ActionListener listener) {
DeleteByQueryRequest request = new DeleteByQueryRequest().setAbortOnVersionConflict(false);
- request.indices(InferenceIndex.INDEX_PATTERN);
+ request.indices(InferenceIndex.INDEX_PATTERN, InferenceSecretsIndex.INDEX_PATTERN);
request.setQuery(documentIdQuery(modelId));
request.setRefresh(true);
@@ -106,7 +232,7 @@ private static IndexRequest createIndexRequest(String docId, String indexName, T
return request.opType(operation).id(docId).source(source);
} catch (IOException ex) {
- throw new ElasticsearchException("Unexpected serialization exception for [" + docId + "]", ex);
+ throw new ElasticsearchException(format("Unexpected serialization exception for index [%s] doc [%s]", indexName, docId), ex);
}
}
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java
index 6c7e36e5d81ee..a317992bc7c40 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java
@@ -8,6 +8,7 @@
package org.elasticsearch.xpack.inference.services.elser;
import org.elasticsearch.inference.Model;
+import org.elasticsearch.inference.ModelConfigurations;
import org.elasticsearch.inference.TaskType;
public class ElserMlNodeModel extends Model {
@@ -19,7 +20,7 @@ public ElserMlNodeModel(
ElserMlNodeServiceSettings serviceSettings,
ElserMlNodeTaskSettings taskSettings
) {
- super(modelId, taskType, service, serviceSettings, taskSettings);
+ super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings));
}
@Override
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java
index 45acc467b047b..f8e8584a6a382 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java
@@ -14,6 +14,7 @@
import org.elasticsearch.inference.InferenceResults;
import org.elasticsearch.inference.InferenceService;
import org.elasticsearch.inference.Model;
+import org.elasticsearch.inference.ModelConfigurations;
import org.elasticsearch.inference.TaskType;
import org.elasticsearch.plugins.InferenceServicePlugin;
import org.elasticsearch.rest.RestStatus;
@@ -40,15 +41,16 @@ public static ElserMlNodeModel parseConfig(
boolean throwOnUnknownFields,
String modelId,
TaskType taskType,
- Map settings
+ Map settings,
+ Map secrets
) {
- Map serviceSettingsMap = removeFromMapOrThrowIfNull(settings, Model.SERVICE_SETTINGS);
+ Map serviceSettingsMap = removeFromMapOrThrowIfNull(settings, ModelConfigurations.SERVICE_SETTINGS);
var serviceSettings = serviceSettingsFromMap(serviceSettingsMap);
Map taskSettingsMap;
// task settings are optional
- if (settings.containsKey(Model.TASK_SETTINGS)) {
- taskSettingsMap = removeFromMapOrThrowIfNull(settings, Model.TASK_SETTINGS);
+ if (settings.containsKey(ModelConfigurations.TASK_SETTINGS)) {
+ taskSettingsMap = removeFromMapOrThrowIfNull(settings, ModelConfigurations.TASK_SETTINGS);
} else {
taskSettingsMap = Map.of();
}
@@ -71,31 +73,40 @@ public ElserMlNodeService(InferenceServicePlugin.InferenceServiceFactoryContext
}
@Override
- public ElserMlNodeModel parseConfigStrict(String modelId, TaskType taskType, Map config) {
- return parseConfig(true, modelId, taskType, config);
+ public ElserMlNodeModel parseRequestConfig(String modelId, TaskType taskType, Map config) {
+ return parseConfig(true, modelId, taskType, config, config);
}
@Override
- public ElserMlNodeModel parseConfigLenient(String modelId, TaskType taskType, Map config) {
- return parseConfig(false, modelId, taskType, config);
+ public ElserMlNodeModel parsePersistedConfig(
+ String modelId,
+ TaskType taskType,
+ Map config,
+ Map secrets
+ ) {
+ return parseConfig(false, modelId, taskType, config, secrets);
}
@Override
public void start(Model model, ActionListener listener) {
if (model instanceof ElserMlNodeModel == false) {
- listener.onFailure(new IllegalStateException("Error starting model, [" + model.getModelId() + "] is not an elser model"));
+ listener.onFailure(
+ new IllegalStateException("Error starting model, [" + model.getConfigurations().getModelId() + "] is not an elser model")
+ );
return;
}
- if (model.getTaskType() != TaskType.SPARSE_EMBEDDING) {
- listener.onFailure(new IllegalStateException(TaskType.unsupportedTaskTypeErrorMsg(model.getTaskType(), NAME)));
+ if (model.getConfigurations().getTaskType() != TaskType.SPARSE_EMBEDDING) {
+ listener.onFailure(
+ new IllegalStateException(TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), NAME))
+ );
return;
}
var elserModel = (ElserMlNodeModel) model;
var serviceSettings = elserModel.getServiceSettings();
- var startRequest = new StartTrainedModelDeploymentAction.Request(ELSER_V1_MODEL, model.getModelId());
+ var startRequest = new StartTrainedModelDeploymentAction.Request(ELSER_V1_MODEL, model.getConfigurations().getModelId());
startRequest.setNumberOfAllocations(serviceSettings.getNumAllocations());
startRequest.setThreadsPerAllocation(serviceSettings.getNumThreads());
startRequest.setWaitForState(STARTED);
@@ -111,15 +122,18 @@ public void start(Model model, ActionListener listener) {
public void infer(Model model, String input, Map taskSettings, ActionListener listener) {
// No task settings to override with requestTaskSettings
- if (model.getTaskType() != TaskType.SPARSE_EMBEDDING) {
+ if (model.getConfigurations().getTaskType() != TaskType.SPARSE_EMBEDDING) {
listener.onFailure(
- new ElasticsearchStatusException(TaskType.unsupportedTaskTypeErrorMsg(model.getTaskType(), NAME), RestStatus.BAD_REQUEST)
+ new ElasticsearchStatusException(
+ TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), NAME),
+ RestStatus.BAD_REQUEST
+ )
);
return;
}
var request = InferTrainedModelDeploymentAction.Request.forTextInput(
- model.getModelId(),
+ model.getConfigurations().getModelId(),
TextExpansionConfigUpdate.EMPTY_UPDATE,
List.of(input),
TimeValue.timeValueSeconds(10) // TODO get timeout from request
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java
index 1314e6eab4f25..42cb491c76204 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java
@@ -12,7 +12,7 @@
import org.elasticsearch.common.ValidationException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.inference.Model;
+import org.elasticsearch.inference.ModelConfigurations;
import org.elasticsearch.inference.ServiceSettings;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xpack.inference.services.MapParsingUtils;
@@ -46,13 +46,17 @@ public static ElserMlNodeServiceSettings fromMap(Map map) {
Integer numThreads = MapParsingUtils.removeAsType(map, NUM_THREADS, Integer.class);
if (numAllocations == null) {
- validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg(NUM_ALLOCATIONS, Model.SERVICE_SETTINGS));
+ validationException.addValidationError(
+ MapParsingUtils.missingSettingErrorMsg(NUM_ALLOCATIONS, ModelConfigurations.SERVICE_SETTINGS)
+ );
} else if (numAllocations < 1) {
validationException.addValidationError(mustBeAPositiveNumberError(NUM_ALLOCATIONS, numAllocations));
}
if (numThreads == null) {
- validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg(NUM_THREADS, Model.SERVICE_SETTINGS));
+ validationException.addValidationError(
+ MapParsingUtils.missingSettingErrorMsg(NUM_THREADS, ModelConfigurations.SERVICE_SETTINGS)
+ );
} else if (numThreads < 1) {
validationException.addValidationError(mustBeAPositiveNumberError(NUM_THREADS, numThreads));
}
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettings.java
index c1d84af5b5fbe..c494cab08d8ae 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettings.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeTaskSettings.java
@@ -57,7 +57,7 @@ public boolean equals(Object obj) {
@Override
public int hashCode() {
// TODO Class has no members all instances are equivalent
- // Return the hash of NAME to make the serialization tests poss
+ // Return the hash of NAME to make the serialization tests pass
return Objects.hash(NAME);
}
}
diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelConfigurationsTests.java
similarity index 72%
rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelTests.java
rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelConfigurationsTests.java
index 778f4703767a6..3adfcd29b0f7a 100644
--- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelTests.java
+++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelConfigurationsTests.java
@@ -9,7 +9,7 @@
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.Writeable;
-import org.elasticsearch.inference.Model;
+import org.elasticsearch.inference.ModelConfigurations;
import org.elasticsearch.inference.ServiceSettings;
import org.elasticsearch.inference.TaskSettings;
import org.elasticsearch.inference.TaskType;
@@ -17,31 +17,37 @@
import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeServiceSettingsTests;
import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeTaskSettings;
-public class ModelTests extends AbstractWireSerializingTestCase {
+public class ModelConfigurationsTests extends AbstractWireSerializingTestCase {
- public static Model createRandomInstance() {
+ public static ModelConfigurations createRandomInstance() {
// TODO randomise task types and settings
var taskType = TaskType.SPARSE_EMBEDDING;
- return new Model(randomAlphaOfLength(6), taskType, randomAlphaOfLength(6), randomServiceSettings(), randomTaskSettings(taskType));
+ return new ModelConfigurations(
+ randomAlphaOfLength(6),
+ taskType,
+ randomAlphaOfLength(6),
+ randomServiceSettings(),
+ randomTaskSettings(taskType)
+ );
}
- public static Model mutateTestInstance(Model instance) {
+ public static ModelConfigurations mutateTestInstance(ModelConfigurations instance) {
switch (randomIntBetween(0, 2)) {
- case 0 -> new Model(
+ case 0 -> new ModelConfigurations(
instance.getModelId() + "foo",
instance.getTaskType(),
instance.getService(),
instance.getServiceSettings(),
instance.getTaskSettings()
);
- case 1 -> new Model(
+ case 1 -> new ModelConfigurations(
instance.getModelId(),
TaskType.values()[(instance.getTaskType().ordinal() + 1) % TaskType.values().length],
instance.getService(),
instance.getServiceSettings(),
instance.getTaskSettings()
);
- case 2 -> new Model(
+ case 2 -> new ModelConfigurations(
instance.getModelId(),
instance.getTaskType(),
instance.getService() + "bar",
@@ -67,17 +73,17 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() {
}
@Override
- protected Writeable.Reader instanceReader() {
- return Model::new;
+ protected Writeable.Reader instanceReader() {
+ return ModelConfigurations::new;
}
@Override
- protected Model createTestInstance() {
+ protected ModelConfigurations createTestInstance() {
return createRandomInstance();
}
@Override
- protected Model mutateInstance(Model instance) {
+ protected ModelConfigurations mutateInstance(ModelConfigurations instance) {
return mutateTestInstance(instance);
}
}
diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelSecretsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelSecretsTests.java
new file mode 100644
index 0000000000000..99d5aa0b2f1fa
--- /dev/null
+++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelSecretsTests.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.inference;
+
+import org.elasticsearch.TransportVersion;
+import org.elasticsearch.TransportVersions;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.inference.ModelSecrets;
+import org.elasticsearch.inference.SecretSettings;
+import org.elasticsearch.test.AbstractWireSerializingTestCase;
+import org.elasticsearch.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.List;
+
+public class ModelSecretsTests extends AbstractWireSerializingTestCase {
+
+ public static ModelSecrets createRandomInstance() {
+ return new ModelSecrets(randomSecretSettings());
+ }
+
+ public static ModelSecrets mutateTestInstance(ModelSecrets instance) {
+ return createRandomInstance();
+ }
+
+ private static SecretSettings randomSecretSettings() {
+ return new FakeSecretSettings(randomAlphaOfLengthBetween(1, 10));
+ }
+
+ @Override
+ protected NamedWriteableRegistry getNamedWriteableRegistry() {
+ return new NamedWriteableRegistry(
+ List.of(new NamedWriteableRegistry.Entry(SecretSettings.class, FakeSecretSettings.NAME, FakeSecretSettings::new))
+ );
+ }
+
+ @Override
+ protected Writeable.Reader instanceReader() {
+ return ModelSecrets::new;
+ }
+
+ @Override
+ protected ModelSecrets createTestInstance() {
+ return createRandomInstance();
+ }
+
+ @Override
+ protected ModelSecrets mutateInstance(ModelSecrets instance) {
+ return mutateTestInstance(instance);
+ }
+
+ public record FakeSecretSettings(String apiKey) implements SecretSettings {
+ public static final String API_KEY = "api_key";
+ public static final String NAME = "fake_secret_settings";
+
+ FakeSecretSettings(StreamInput in) throws IOException {
+ this(in.readString());
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeString(apiKey);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(API_KEY, apiKey);
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public String getWriteableName() {
+ return NAME;
+ }
+
+ @Override
+ public TransportVersion getMinimalSupportedVersion() {
+ return TransportVersions.INFERENCE_MODEL_SECRETS_ADDED;
+ }
+ }
+}
diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelResponseTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelResponseTests.java
index 1e8c05b7b05a8..0a2ad4699cca8 100644
--- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelResponseTests.java
+++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelResponseTests.java
@@ -11,18 +11,18 @@
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.xpack.inference.InferenceNamedWriteablesProvider;
-import org.elasticsearch.xpack.inference.ModelTests;
+import org.elasticsearch.xpack.inference.ModelConfigurationsTests;
public class PutInferenceModelResponseTests extends AbstractWireSerializingTestCase {
@Override
protected PutInferenceModelAction.Response createTestInstance() {
- return new PutInferenceModelAction.Response(ModelTests.createRandomInstance());
+ return new PutInferenceModelAction.Response(ModelConfigurationsTests.createRandomInstance());
}
@Override
protected PutInferenceModelAction.Response mutateInstance(PutInferenceModelAction.Response instance) {
- var mutatedModel = ModelTests.mutateTestInstance(instance.getModel());
+ var mutatedModel = ModelConfigurationsTests.mutateTestInstance(instance.getModel());
return new PutInferenceModelAction.Response(mutatedModel);
}
diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java
new file mode 100644
index 0000000000000..43928da8ed3b3
--- /dev/null
+++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/model/TestModel.java
@@ -0,0 +1,205 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.inference.model;
+
+import org.elasticsearch.TransportVersion;
+import org.elasticsearch.common.ValidationException;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.inference.Model;
+import org.elasticsearch.inference.ModelConfigurations;
+import org.elasticsearch.inference.ModelSecrets;
+import org.elasticsearch.inference.SecretSettings;
+import org.elasticsearch.inference.ServiceSettings;
+import org.elasticsearch.inference.TaskSettings;
+import org.elasticsearch.inference.TaskType;
+import org.elasticsearch.xcontent.XContentBuilder;
+import org.elasticsearch.xpack.inference.services.MapParsingUtils;
+
+import java.io.IOException;
+import java.util.Map;
+
+import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength;
+import static org.elasticsearch.test.ESTestCase.randomInt;
+
+public class TestModel extends Model {
+
+ public static TestModel createRandomInstance() {
+ return new TestModel(
+ randomAlphaOfLength(4),
+ TaskType.TEXT_EMBEDDING,
+ randomAlphaOfLength(10),
+ new TestModel.TestServiceSettings(randomAlphaOfLength(4)),
+ new TestModel.TestTaskSettings(randomInt(3)),
+ new TestModel.TestSecretSettings(randomAlphaOfLength(4))
+ );
+ }
+
+ public TestModel(
+ String modelId,
+ TaskType taskType,
+ String service,
+ TestServiceSettings serviceSettings,
+ TestTaskSettings taskSettings,
+ TestSecretSettings secretSettings
+ ) {
+ super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secretSettings));
+ }
+
+ @Override
+ public TestServiceSettings getServiceSettings() {
+ return (TestServiceSettings) super.getServiceSettings();
+ }
+
+ @Override
+ public TestTaskSettings getTaskSettings() {
+ return (TestTaskSettings) super.getTaskSettings();
+ }
+
+ @Override
+ public TestSecretSettings getSecretSettings() {
+ return (TestSecretSettings) super.getSecretSettings();
+ }
+
+ public record TestServiceSettings(String model) implements ServiceSettings {
+
+ private static final String NAME = "test_service_settings";
+
+ public static TestServiceSettings fromMap(Map map) {
+ ValidationException validationException = new ValidationException();
+
+ String model = MapParsingUtils.removeAsType(map, "model", String.class);
+
+ if (model == null) {
+ validationException.addValidationError(
+ MapParsingUtils.missingSettingErrorMsg("model", ModelConfigurations.SERVICE_SETTINGS)
+ );
+ }
+
+ if (validationException.validationErrors().isEmpty() == false) {
+ throw validationException;
+ }
+
+ return new TestServiceSettings(model);
+ }
+
+ public TestServiceSettings(StreamInput in) throws IOException {
+ this(in.readString());
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field("model", model);
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public String getWriteableName() {
+ return NAME;
+ }
+
+ @Override
+ public TransportVersion getMinimalSupportedVersion() {
+ return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeString(model);
+ }
+ }
+
+ public record TestTaskSettings(Integer temperature) implements TaskSettings {
+
+ private static final String NAME = "test_task_settings";
+
+ public static TestTaskSettings fromMap(Map map) {
+ Integer temperature = MapParsingUtils.removeAsType(map, "temperature", Integer.class);
+ return new TestTaskSettings(temperature);
+ }
+
+ public TestTaskSettings(StreamInput in) throws IOException {
+ this(in.readOptionalVInt());
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeOptionalVInt(temperature);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ if (temperature != null) {
+ builder.field("temperature", temperature);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public String getWriteableName() {
+ return NAME;
+ }
+
+ @Override
+ public TransportVersion getMinimalSupportedVersion() {
+ return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests
+ }
+ }
+
+ public record TestSecretSettings(String apiKey) implements SecretSettings {
+
+ private static final String NAME = "test_secret_settings";
+
+ public static TestSecretSettings fromMap(Map map) {
+ ValidationException validationException = new ValidationException();
+
+ String apiKey = MapParsingUtils.removeAsType(map, "api_key", String.class);
+
+ if (apiKey == null) {
+ validationException.addValidationError(MapParsingUtils.missingSettingErrorMsg("api_key", ModelSecrets.SECRET_SETTINGS));
+ }
+
+ if (validationException.validationErrors().isEmpty() == false) {
+ throw validationException;
+ }
+
+ return new TestSecretSettings(apiKey);
+ }
+
+ public TestSecretSettings(StreamInput in) throws IOException {
+ this(in.readString());
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ out.writeString(apiKey);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field("api_key", apiKey);
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public String getWriteableName() {
+ return NAME;
+ }
+
+ @Override
+ public TransportVersion getMinimalSupportedVersion() {
+ return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests
+ }
+ }
+}
diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java
new file mode 100644
index 0000000000000..bad5c88067669
--- /dev/null
+++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/registry/ModelRegistryTests.java
@@ -0,0 +1,272 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.inference.registry;
+
+import org.apache.lucene.search.TotalHits;
+import org.elasticsearch.ElasticsearchStatusException;
+import org.elasticsearch.ResourceAlreadyExistsException;
+import org.elasticsearch.ResourceNotFoundException;
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.bulk.BulkAction;
+import org.elasticsearch.action.bulk.BulkItemResponse;
+import org.elasticsearch.action.bulk.BulkRequestBuilder;
+import org.elasticsearch.action.bulk.BulkResponse;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.action.support.PlainActionFuture;
+import org.elasticsearch.client.internal.Client;
+import org.elasticsearch.core.TimeValue;
+import org.elasticsearch.index.engine.VersionConflictEngineException;
+import org.elasticsearch.search.SearchHit;
+import org.elasticsearch.search.SearchHits;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.threadpool.TestThreadPool;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.xpack.inference.model.TestModel;
+import org.junit.After;
+import org.junit.Before;
+
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import static org.elasticsearch.core.Strings.format;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.nullValue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class ModelRegistryTests extends ESTestCase {
+
+ private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS);
+
+ private ThreadPool threadPool;
+
+ @Before
+ public void setUpThreadPool() {
+ threadPool = new TestThreadPool(getTestName());
+ }
+
+ @After
+ public void tearDownThreadPool() {
+ terminate(threadPool);
+ }
+
+ public void testGetUnparsedModelMap_ThrowsResourceNotFound_WhenNoHitsReturned() {
+ var client = mockClient();
+ mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[0]));
+
+ var registry = new ModelRegistry(client);
+
+ var listener = new PlainActionFuture();
+ registry.getUnparsedModelMap("1", listener);
+
+ ResourceNotFoundException exception = expectThrows(ResourceNotFoundException.class, () -> listener.actionGet(TIMEOUT));
+ assertThat(exception.getMessage(), is("Model not found [1]"));
+ }
+
+ public void testGetUnparsedModelMap_ThrowsIllegalArgumentException_WhenInvalidIndexReceived() {
+ var client = mockClient();
+ var unknownIndexHit = SearchHit.createFromMap(Map.of("_index", "unknown_index"));
+ mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { unknownIndexHit }));
+
+ var registry = new ModelRegistry(client);
+
+ var listener = new PlainActionFuture();
+ registry.getUnparsedModelMap("1", listener);
+
+ IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> listener.actionGet(TIMEOUT));
+ assertThat(
+ exception.getMessage(),
+ is("Invalid result while loading model [1] index: [unknown_index]. Try deleting and reinitializing the service")
+ );
+ }
+
+ public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFindInferenceEntry() {
+ var client = mockClient();
+ var inferenceSecretsHit = SearchHit.createFromMap(Map.of("_index", ".infer-secrets"));
+ mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceSecretsHit }));
+
+ var registry = new ModelRegistry(client);
+
+ var listener = new PlainActionFuture();
+ registry.getUnparsedModelMap("1", listener);
+
+ IllegalStateException exception = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT));
+ assertThat(
+ exception.getMessage(),
+ is("Failed to load model, model [1] is in an invalid state. Try deleting and reinitializing the service")
+ );
+ }
+
+ public void testGetUnparsedModelMap_ThrowsIllegalStateException_WhenUnableToFindInferenceSecretsEntry() {
+ var client = mockClient();
+ var inferenceHit = SearchHit.createFromMap(Map.of("_index", ".inference"));
+ mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit }));
+
+ var registry = new ModelRegistry(client);
+
+ var listener = new PlainActionFuture();
+ registry.getUnparsedModelMap("1", listener);
+
+ IllegalStateException exception = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT));
+ assertThat(
+ exception.getMessage(),
+ is("Failed to load model, model [1] is in an invalid state. Try deleting and reinitializing the service")
+ );
+ }
+
+ public void testGetUnparsedModelMap_ReturnsModelConfigMap_WhenBothInferenceAndSecretsHitsAreFound() {
+ var client = mockClient();
+ var inferenceHit = SearchHit.createFromMap(Map.of("_index", ".inference"));
+ var inferenceSecretsHit = SearchHit.createFromMap(Map.of("_index", ".infer-secrets"));
+
+ mockClientExecuteSearch(client, mockSearchResponse(new SearchHit[] { inferenceHit, inferenceSecretsHit }));
+
+ var registry = new ModelRegistry(client);
+
+ var listener = new PlainActionFuture();
+ registry.getUnparsedModelMap("1", listener);
+
+ var modelConfig = listener.actionGet(TIMEOUT);
+ assertThat(modelConfig.config(), nullValue());
+ assertThat(modelConfig.secrets(), nullValue());
+ }
+
+ public void testStoreModel_ReturnsTrue_WhenNoFailuresOccur() {
+ var client = mockBulkClient();
+
+ var bulkItem = mock(BulkItemResponse.class);
+ when(bulkItem.isFailed()).thenReturn(false);
+ var bulkResponse = mock(BulkResponse.class);
+ when(bulkResponse.getItems()).thenReturn(new BulkItemResponse[] { bulkItem });
+
+ mockClientExecuteBulk(client, bulkResponse);
+
+ var model = TestModel.createRandomInstance();
+ var registry = new ModelRegistry(client);
+ var listener = new PlainActionFuture();
+
+ registry.storeModel(model, listener);
+
+ assertTrue(listener.actionGet(TIMEOUT));
+ }
+
+ public void testStoreModel_ThrowsException_WhenBulkResponseIsEmpty() {
+ var client = mockBulkClient();
+
+ var bulkResponse = mock(BulkResponse.class);
+ when(bulkResponse.getItems()).thenReturn(new BulkItemResponse[0]);
+
+ mockClientExecuteBulk(client, bulkResponse);
+
+ var model = TestModel.createRandomInstance();
+ var registry = new ModelRegistry(client);
+ var listener = new PlainActionFuture();
+
+ registry.storeModel(model, listener);
+
+ ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT));
+ assertThat(
+ exception.getMessage(),
+ is(
+ format(
+ "Failed to store inference model [%s], invalid bulk response received. Try reinitializing the service",
+ model.getConfigurations().getModelId()
+ )
+ )
+ );
+ }
+
+ public void testStoreModel_ThrowsResourceAlreadyExistsException_WhenFailureIsAVersionConflict() {
+ var client = mockBulkClient();
+
+ var bulkItem = mock(BulkItemResponse.class);
+ when(bulkItem.isFailed()).thenReturn(true);
+
+ var failure = new BulkItemResponse.Failure("index", "id", mock(VersionConflictEngineException.class));
+ when(bulkItem.getFailure()).thenReturn(failure);
+ var bulkResponse = mock(BulkResponse.class);
+ when(bulkResponse.getItems()).thenReturn(new BulkItemResponse[] { bulkItem });
+
+ mockClientExecuteBulk(client, bulkResponse);
+
+ var model = TestModel.createRandomInstance();
+ var registry = new ModelRegistry(client);
+ var listener = new PlainActionFuture();
+
+ registry.storeModel(model, listener);
+
+ ResourceAlreadyExistsException exception = expectThrows(ResourceAlreadyExistsException.class, () -> listener.actionGet(TIMEOUT));
+ assertThat(exception.getMessage(), is(format("Inference model [%s] already exists", model.getConfigurations().getModelId())));
+ }
+
+ public void testStoreModel_ThrowsException_WhenFailureIsNotAVersionConflict() {
+ var client = mockBulkClient();
+
+ var bulkItem = mock(BulkItemResponse.class);
+ when(bulkItem.isFailed()).thenReturn(true);
+
+ var failure = new BulkItemResponse.Failure("index", "id", mock(IllegalStateException.class));
+ when(bulkItem.getFailure()).thenReturn(failure);
+ var bulkResponse = mock(BulkResponse.class);
+ when(bulkResponse.getItems()).thenReturn(new BulkItemResponse[] { bulkItem });
+
+ mockClientExecuteBulk(client, bulkResponse);
+
+ var model = TestModel.createRandomInstance();
+ var registry = new ModelRegistry(client);
+ var listener = new PlainActionFuture();
+
+ registry.storeModel(model, listener);
+
+ ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT));
+ assertThat(exception.getMessage(), is(format("Failed to store inference model [%s]", model.getConfigurations().getModelId())));
+ }
+
+ private Client mockBulkClient() {
+ var client = mockClient();
+ when(client.prepareBulk()).thenReturn(new BulkRequestBuilder(client, BulkAction.INSTANCE));
+
+ return client;
+ }
+
+ private Client mockClient() {
+ var client = mock(Client.class);
+ when(client.threadPool()).thenReturn(threadPool);
+
+ return client;
+ }
+
+ private static void mockClientExecuteSearch(Client client, SearchResponse searchResponse) {
+ doAnswer(invocationOnMock -> {
+ @SuppressWarnings("unchecked")
+ ActionListener actionListener = (ActionListener) invocationOnMock.getArguments()[2];
+ actionListener.onResponse(searchResponse);
+ return Void.TYPE;
+ }).when(client).execute(any(), any(), any());
+ }
+
+ private static void mockClientExecuteBulk(Client client, BulkResponse bulkResponse) {
+ doAnswer(invocationOnMock -> {
+ @SuppressWarnings("unchecked")
+ ActionListener actionListener = (ActionListener) invocationOnMock.getArguments()[2];
+ actionListener.onResponse(bulkResponse);
+ return Void.TYPE;
+ }).when(client).execute(any(), any(), any());
+ }
+
+ private static SearchResponse mockSearchResponse(SearchHit[] hits) {
+ SearchHits searchHits = new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 1);
+
+ var searchResponse = mock(SearchResponse.class);
+ when(searchResponse.getHits()).thenReturn(searchHits);
+
+ return searchResponse;
+ }
+}
diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java
index 0449c1b4a7d59..1ab580eec358b 100644
--- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java
+++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java
@@ -10,10 +10,12 @@
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.client.internal.Client;
import org.elasticsearch.inference.Model;
+import org.elasticsearch.inference.ModelConfigurations;
import org.elasticsearch.inference.TaskType;
import org.elasticsearch.plugins.InferenceServicePlugin;
import org.elasticsearch.test.ESTestCase;
+import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@@ -40,12 +42,12 @@ public void testParseConfigStrict() {
var settings = new HashMap();
settings.put(
- Model.SERVICE_SETTINGS,
+ ModelConfigurations.SERVICE_SETTINGS,
new HashMap<>(Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 1, ElserMlNodeServiceSettings.NUM_THREADS, 4))
);
- settings.put(Model.TASK_SETTINGS, Map.of());
+ settings.put(ModelConfigurations.TASK_SETTINGS, Map.of());
- ElserMlNodeModel parsedModel = service.parseConfigStrict("foo", TaskType.SPARSE_EMBEDDING, settings);
+ ElserMlNodeModel parsedModel = service.parseRequestConfig("foo", TaskType.SPARSE_EMBEDDING, settings);
assertEquals(
new ElserMlNodeModel(
@@ -64,11 +66,11 @@ public void testParseConfigStrictWithNoTaskSettings() {
var settings = new HashMap();
settings.put(
- Model.SERVICE_SETTINGS,
+ ModelConfigurations.SERVICE_SETTINGS,
new HashMap<>(Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 1, ElserMlNodeServiceSettings.NUM_THREADS, 4))
);
- ElserMlNodeModel parsedModel = service.parseConfigStrict("foo", TaskType.SPARSE_EMBEDDING, settings);
+ ElserMlNodeModel parsedModel = service.parseRequestConfig("foo", TaskType.SPARSE_EMBEDDING, settings);
assertEquals(
new ElserMlNodeModel(
@@ -88,52 +90,76 @@ public void testParseConfigStrictWithUnknownSettings() {
{
var settings = new HashMap();
settings.put(
- Model.SERVICE_SETTINGS,
+ ModelConfigurations.SERVICE_SETTINGS,
new HashMap<>(Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 1, ElserMlNodeServiceSettings.NUM_THREADS, 4))
);
- settings.put(Model.TASK_SETTINGS, Map.of());
+ settings.put(ModelConfigurations.TASK_SETTINGS, Map.of());
settings.put("foo", "bar");
if (throwOnUnknown) {
var e = expectThrows(
ElasticsearchStatusException.class,
- () -> ElserMlNodeService.parseConfig(throwOnUnknown, "foo", TaskType.SPARSE_EMBEDDING, settings)
+ () -> ElserMlNodeService.parseConfig(
+ throwOnUnknown,
+ "foo",
+ TaskType.SPARSE_EMBEDDING,
+ settings,
+ Collections.emptyMap()
+ )
);
assertThat(
e.getMessage(),
containsString("Model configuration contains settings [{foo=bar}] unknown to the [elser_mlnode] service")
);
} else {
- var parsed = ElserMlNodeService.parseConfig(throwOnUnknown, "foo", TaskType.SPARSE_EMBEDDING, settings);
+ var parsed = ElserMlNodeService.parseConfig(
+ throwOnUnknown,
+ "foo",
+ TaskType.SPARSE_EMBEDDING,
+ settings,
+ Collections.emptyMap()
+ );
}
}
{
var settings = new HashMap();
settings.put(
- Model.SERVICE_SETTINGS,
+ ModelConfigurations.SERVICE_SETTINGS,
new HashMap<>(Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 1, ElserMlNodeServiceSettings.NUM_THREADS, 4))
);
- settings.put(Model.TASK_SETTINGS, Map.of("foo", "bar"));
+ settings.put(ModelConfigurations.TASK_SETTINGS, Map.of("foo", "bar"));
if (throwOnUnknown) {
var e = expectThrows(
ElasticsearchStatusException.class,
- () -> ElserMlNodeService.parseConfig(throwOnUnknown, "foo", TaskType.SPARSE_EMBEDDING, settings)
+ () -> ElserMlNodeService.parseConfig(
+ throwOnUnknown,
+ "foo",
+ TaskType.SPARSE_EMBEDDING,
+ settings,
+ Collections.emptyMap()
+ )
);
assertThat(
e.getMessage(),
containsString("Model configuration contains settings [{foo=bar}] unknown to the [elser_mlnode] service")
);
} else {
- var parsed = ElserMlNodeService.parseConfig(throwOnUnknown, "foo", TaskType.SPARSE_EMBEDDING, settings);
+ var parsed = ElserMlNodeService.parseConfig(
+ throwOnUnknown,
+ "foo",
+ TaskType.SPARSE_EMBEDDING,
+ settings,
+ Collections.emptyMap()
+ );
}
}
{
var settings = new HashMap();
settings.put(
- Model.SERVICE_SETTINGS,
+ ModelConfigurations.SERVICE_SETTINGS,
new HashMap<>(
Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 1, ElserMlNodeServiceSettings.NUM_THREADS, 4, "foo", "bar")
)
@@ -142,14 +168,26 @@ public void testParseConfigStrictWithUnknownSettings() {
if (throwOnUnknown) {
var e = expectThrows(
ElasticsearchStatusException.class,
- () -> ElserMlNodeService.parseConfig(throwOnUnknown, "foo", TaskType.SPARSE_EMBEDDING, settings)
+ () -> ElserMlNodeService.parseConfig(
+ throwOnUnknown,
+ "foo",
+ TaskType.SPARSE_EMBEDDING,
+ settings,
+ Collections.emptyMap()
+ )
);
assertThat(
e.getMessage(),
containsString("Model configuration contains settings [{foo=bar}] unknown to the [elser_mlnode] service")
);
} else {
- var parsed = ElserMlNodeService.parseConfig(throwOnUnknown, "foo", TaskType.SPARSE_EMBEDDING, settings);
+ var parsed = ElserMlNodeService.parseConfig(
+ throwOnUnknown,
+ "foo",
+ TaskType.SPARSE_EMBEDDING,
+ settings,
+ Collections.emptyMap()
+ );
}
}
}
diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle
index de4857860f561..0b3dda1e365ed 100644
--- a/x-pack/plugin/ml/build.gradle
+++ b/x-pack/plugin/ml/build.gradle
@@ -1,3 +1,5 @@
+import org.elasticsearch.gradle.internal.info.BuildParams
+
apply plugin: 'elasticsearch.internal-es-plugin'
apply plugin: 'elasticsearch.internal-cluster-test'
apply plugin: 'elasticsearch.internal-test-artifact'
@@ -78,15 +80,20 @@ dependencies {
api "org.apache.lucene:lucene-analysis-icu:${versions.lucene}"
api "org.apache.lucene:lucene-analysis-kuromoji:${versions.lucene}"
implementation 'org.ojalgo:ojalgo:51.2.0'
- nativeBundle("org.elasticsearch.ml:ml-cpp:${project.version}:deps@zip") {
+ nativeBundle("org.elasticsearch.ml:ml-cpp:${mlCppVersion()}:deps@zip") {
changing = true
}
- nativeBundle("org.elasticsearch.ml:ml-cpp:${project.version}:nodeps@zip") {
+ nativeBundle("org.elasticsearch.ml:ml-cpp:${mlCppVersion()}:nodeps@zip") {
changing = true
}
testImplementation 'org.ini4j:ini4j:0.5.2'
}
+def mlCppVersion(){
+ return (project.gradle.parent != null && BuildParams.isSnapshotBuild() == false) ?
+ (project.version + "-SNAPSHOT") : project.version;
+}
+
artifacts {
// normal es plugins do not publish the jar but we need to since users need it for extensions
archives tasks.named("jar")
diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java
index 2cfc330533109..f93988681f605 100644
--- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java
+++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java
@@ -86,7 +86,7 @@ public class DataFrameDataExtractor {
context.extractedFields.getAllFields().forEach(f -> this.extractedFieldsByName.put(f.getName(), f));
hasNext = true;
hasPreviousSearchFailed = false;
- this.trainTestSplitter = new CachedSupplier<>(context.trainTestSplitterFactory::create);
+ this.trainTestSplitter = CachedSupplier.wrap(context.trainTestSplitterFactory::create);
}
public Map getHeaders() {
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java
index c7a6c1fbd498d..d89202909da27 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java
@@ -969,7 +969,7 @@ static final class AuthorizedIndices implements AuthorizationEngine.AuthorizedIn
private final Predicate isAuthorizedPredicate;
AuthorizedIndices(Supplier> allAuthorizedAndAvailableSupplier, Predicate isAuthorizedPredicate) {
- this.allAuthorizedAndAvailableSupplier = new CachedSupplier<>(allAuthorizedAndAvailableSupplier);
+ this.allAuthorizedAndAvailableSupplier = CachedSupplier.wrap(allAuthorizedAndAvailableSupplier);
this.isAuthorizedPredicate = Objects.requireNonNull(isAuthorizedPredicate);
}
diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java
index 37994b4cf8d66..b38772ee2cc5e 100644
--- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java
+++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotRetentionTask.java
@@ -187,7 +187,7 @@ static Map getAllPoliciesWithRetentionEnabled(f
static boolean snapshotEligibleForDeletion(
SnapshotInfo snapshot,
- Map> allSnapshots,
+ Map>> allSnapshotDetails,
Map policies
) {
assert snapshot.userMetadata() != null
@@ -209,17 +209,12 @@ static boolean snapshotEligibleForDeletion(
final String repository = policy.getRepository();
// Retrieve the predicate based on the retention policy, passing in snapshots pertaining only to *this* policy and repository
+ final var relevantSnapshots = allSnapshotDetails.getOrDefault(repository, Map.of()).getOrDefault(policyId, Map.of());
+ assert relevantSnapshots.containsKey(snapshot.snapshotId());
boolean eligible = retention.isSnapshotEligibleForDeletion(
- snapshot,
- allSnapshots.get(repository)
- .stream()
- .filter(
- info -> Optional.ofNullable(info.userMetadata())
- .map(meta -> meta.get(POLICY_ID_METADATA_FIELD))
- .map(pId -> pId.equals(policyId))
- .orElse(false)
- )
- .collect(Collectors.toMap(SnapshotInfo::snapshotId, RepositoryData.SnapshotDetails::fromSnapshotInfo))
+ snapshot.snapshotId(),
+ RepositoryData.SnapshotDetails.fromSnapshotInfo(snapshot),
+ relevantSnapshots
);
logger.debug(
"[{}] testing snapshot [{}] deletion eligibility: {}",
@@ -290,6 +285,24 @@ void getSnapshotsEligibleForDeletion(
);
}
+ // Repository name -> Retention policy ID -> (SnapshotId, SnapshotDetails)
+ final Map>> allSnapshotDetails = new HashMap<>();
+ // TODO should we make this properly immutable or is its scope small enough that we don't need it?
+ for (Map.Entry> repositorySnapshots : snapshots.entrySet()) {
+ final var repositoryName = repositorySnapshots.getKey();
+ final var repositorySnapshotDetails = allSnapshotDetails.computeIfAbsent(repositoryName, ignored -> new HashMap<>());
+ for (SnapshotInfo snapshotInfo : repositorySnapshots.getValue()) {
+ final var snapshotId = snapshotInfo.snapshotId();
+ final var snapshotDetails = RepositoryData.SnapshotDetails.fromSnapshotInfo(snapshotInfo);
+ final var slmPolicy = snapshotDetails.getSlmPolicy();
+ if (Strings.hasText(slmPolicy)) {
+ final var previousDetails = repositorySnapshotDetails.computeIfAbsent(slmPolicy, ignored -> new HashMap<>())
+ .put(snapshotId, snapshotDetails);
+ assert previousDetails == null : previousDetails;
+ }
+ }
+ }
+
// Find all the snapshots that are past their retention date
final Map>> snapshotsToBeDeleted = snapshots.entrySet()
.stream()
@@ -298,7 +311,7 @@ void getSnapshotsEligibleForDeletion(
Map.Entry::getKey,
e -> e.getValue()
.stream()
- .filter(snapshot -> snapshotEligibleForDeletion(snapshot, snapshots, policies))
+ .filter(snapshot -> snapshotEligibleForDeletion(snapshot, allSnapshotDetails, policies))
// SnapshotInfo instances can be quite large in case they contain e.g. a large collection of
// exceptions so we extract the only two things (id + policy id) here so they can be GCed
.map(snapshotInfo -> Tuple.tuple(snapshotInfo.snapshotId(), getPolicyId(snapshotInfo)))
diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotRetentionTaskTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotRetentionTaskTests.java
index b120b49c63654..90488c33edba2 100644
--- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotRetentionTaskTests.java
+++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SnapshotRetentionTaskTests.java
@@ -27,6 +27,7 @@
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.core.Tuple;
+import org.elasticsearch.repositories.RepositoryData;
import org.elasticsearch.snapshots.Snapshot;
import org.elasticsearch.snapshots.SnapshotId;
import org.elasticsearch.snapshots.SnapshotInfo;
@@ -133,10 +134,10 @@ public void testSnapshotEligibleForDeletion() {
new SnapshotRetentionConfiguration(TimeValue.timeValueDays(30), null, null)
);
Map policyMap = Collections.singletonMap("policy", policy);
- Function>> mkInfos = i -> Collections.singletonMap(
- repoName,
- Collections.singletonList(i)
- );
+ Function>>> mkInfos = snapshotInfo -> {
+ final var snapshotDetails = RepositoryData.SnapshotDetails.fromSnapshotInfo(snapshotInfo);
+ return Map.of(repoName, Map.of(snapshotDetails.getSlmPolicy(), Map.of(snapshotInfo.snapshotId(), snapshotDetails)));
+ };
// Test with an ancient snapshot that should be expunged
SnapshotInfo info = new SnapshotInfo(