diff --git a/.buildkite/Dockerfile b/.buildkite/Dockerfile index 5608747b6..6d44e2211 100644 --- a/.buildkite/Dockerfile +++ b/.buildkite/Dockerfile @@ -10,5 +10,7 @@ RUN apt-get clean -y && \ WORKDIR /usr/src/app -COPY . . +COPY package.json . RUN npm install --production=false + +COPY . . diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index d05129234..1dca14548 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -6,14 +6,13 @@ steps: env: NODE_VERSION: "{{ matrix.nodejs }}" TEST_SUITE: "{{ matrix.suite }}" - STACK_VERSION: 8.8.0-SNAPSHOT + STACK_VERSION: 8.12.0-SNAPSHOT matrix: setup: suite: - "free" - "platinum" nodejs: - - "16" - "18" - "20" command: ./.buildkite/run-tests.sh diff --git a/.ci/Dockerfile b/.ci/Dockerfile index 1f10aed8c..1f871d9f2 100644 --- a/.ci/Dockerfile +++ b/.ci/Dockerfile @@ -1,4 +1,4 @@ -ARG NODE_JS_VERSION=18 +ARG NODE_JS_VERSION=${NODE_JS_VERSION:-18} FROM node:${NODE_JS_VERSION} ARG BUILDER_UID=1000 @@ -12,15 +12,19 @@ RUN apt-get clean -y && \ apt-get install -y zip # Set user permissions and directory -RUN groupadd --system -g ${BUILDER_GID} ${BUILDER_GROUP} \ - && useradd --system --shell /bin/bash -u ${BUILDER_UID} -g ${BUILDER_GROUP} -m elastic 1>/dev/null 2>/dev/null \ +RUN (id -g ${BUILDER_GID} || groupadd --system -g ${BUILDER_GID} ${BUILDER_GROUP}) \ + && (id -u ${BUILDER_UID} || useradd --system --shell /bin/bash -u ${BUILDER_UID} -g ${BUILDER_GID} -m elastic) \ && mkdir -p /usr/src/elasticsearch-js \ - && chown -R ${BUILDER_USER}:${BUILDER_GROUP} /usr/src/ + && chown -R ${BUILDER_UID}:${BUILDER_GID} /usr/src/ + WORKDIR /usr/src/elasticsearch-js -USER ${BUILDER_USER}:${BUILDER_GROUP} -# Install app dependencies -COPY --chown=$BUILDER_USER:$BUILDER_GROUP package*.json ./ -RUN npm install +# run remainder of commands as non-root user +USER ${BUILDER_UID}:${BUILDER_GID} + +# install dependencies +COPY package.json . +RUN npm install --production=false -COPY --chown=$BUILDER_USER:$BUILDER_GROUP . . +# copy project files +COPY . . diff --git a/.ci/make.mjs b/.ci/make.mjs index 305f066e2..511944972 100644 --- a/.ci/make.mjs +++ b/.ci/make.mjs @@ -28,6 +28,11 @@ import assert from 'assert' import { join } from 'desm' import semver from 'semver' +// xz/globals loads minimist-parsed args as a global `argv`, but it +// interprets args like '8.10' as numbers and shortens them to '8.1'. +// so we have to import and configure minimist ourselves. +import minimist from 'minimist' +const argv = minimist(process.argv.slice(2), { string: ['_', 'task'] }) assert(typeof argv.task === 'string', 'Missing task parameter') switch (argv.task) { @@ -86,10 +91,10 @@ async function bump (args) { 'utf8' ) - const pipeline = await readFile(join(import.meta.url, '..', '.buildkite', 'pipeline.yml')) + const pipeline = await readFile(join(import.meta.url, '..', '.buildkite', 'pipeline.yml'), 'utf8') await writeFile( join(import.meta.url, '..', '.buildkite', 'pipeline.yml'), - pipeline.replace(/STACK_VERSION: [0-9]+[0-9\.]*[0-9](?:\-SNAPSHOT)?/, `STACK_VERSION: ${cleanVersion}-SNAPSHOT`), // eslint-disable-line + pipeline.replace(/STACK_VERSION: [0-9]+[0-9\.]*[0-9](?:\-SNAPSHOT)?/, `STACK_VERSION: ${cleanVersion}-SNAPSHOT`), 'utf8' ) } diff --git a/.ci/make.sh b/.ci/make.sh index 7f890cb2b..c3d9f5b4f 100755 --- a/.ci/make.sh +++ b/.ci/make.sh @@ -144,19 +144,35 @@ docker build \ echo -e "\033[34;1mINFO: running $product container\033[0m" -docker run \ - --volume "$repo:/usr/src/elasticsearch-js" \ - --volume /usr/src/elasticsearch-js/node_modules \ - -u "$(id -u):$(id -g)" \ - --env "WORKFLOW=$WORKFLOW" \ - --name make-elasticsearch-js \ - --rm \ - $product \ - /bin/bash -c "cd /usr/src && \ - git clone https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \ - mkdir -p /usr/src/elastic-client-generator-js/output && \ - cd /usr/src/elasticsearch-js && \ - node .ci/make.mjs --task $TASK ${TASK_ARGS[*]}" +if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}" ]]; then + echo -e "\033[34;1mINFO: Running in local mode" + docker run \ + -u "$(id -u):$(id -g)" \ + --volume "$repo:/usr/src/elasticsearch-js" \ + --volume /usr/src/elasticsearch-js/node_modules \ + --volume "$(realpath $repo/../elastic-client-generator-js):/usr/src/elastic-client-generator-js" \ + --env "WORKFLOW=$WORKFLOW" \ + --name make-elasticsearch-js \ + --rm \ + $product \ + /bin/bash -c "mkdir -p /usr/src/elastic-client-generator-js/output && \ + node .ci/make.mjs --task $TASK ${TASK_ARGS[*]}" +else + echo -e "\033[34;1mINFO: Running in CI mode" + docker run \ + --volume "$repo:/usr/src/elasticsearch-js" \ + --volume /usr/src/elasticsearch-js/node_modules \ + -u "$(id -u):$(id -g)" \ + --env "WORKFLOW=$WORKFLOW" \ + --name make-elasticsearch-js \ + --rm \ + $product \ + /bin/bash -c "cd /usr/src && \ + git clone https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \ + mkdir -p /usr/src/elastic-client-generator-js/output && \ + cd /usr/src/elasticsearch-js && \ + node .ci/make.mjs --task $TASK ${TASK_ARGS[*]}" +fi # ------------------------------------------------------- # # Post Command tasks & checks diff --git a/.dockerignore b/.dockerignore index 54eb2a95a..e34f9ff27 100644 --- a/.dockerignore +++ b/.dockerignore @@ -3,3 +3,5 @@ npm-debug.log test/benchmarks elasticsearch .git +lib +junit-output diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 906f1474b..56d4f328a 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -1,16 +1,27 @@ +--- name: Backport on: - pull_request: + pull_request_target: types: - closed - labeled jobs: backport: - runs-on: ubuntu-latest name: Backport + runs-on: ubuntu-latest + # Only react to merged PRs for security reasons. + # See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target. + if: > + github.event.pull_request.merged + && ( + github.event.action == 'closed' + || ( + github.event.action == 'labeled' + && contains(github.event.label.name, 'backport') + ) + ) steps: - - name: Backport - uses: tibdex/backport@7005ef85c4562bc23b0e9b4a9940d5922f439750 + - uses: tibdex/backport@9565281eda0731b1d20c4025c43339fb0a23812e # v2.0.4 with: github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/nodejs.yml b/.github/workflows/nodejs.yml index ca00cb372..f8a4165c5 100644 --- a/.github/workflows/nodejs.yml +++ b/.github/workflows/nodejs.yml @@ -1,21 +1,40 @@ --- name: Node CI -on: [push, pull_request] +on: + pull_request: {} jobs: + paths-filter: + name: Detect files changed + runs-on: ubuntu-latest + outputs: + src-only: '${{ steps.changes.outputs.src-only }}' + steps: + - uses: actions/checkout@v4 + - uses: dorny/paths-filter/@v2.11.1 + id: changes + with: + filters: | + src-only: + - '!(**/*.{md,asciidoc,txt}|*.{md,asciidoc,txt}|{docs,.ci,.buildkite,scripts}/**/*|catalog-info.yaml)' + - '.github/workflows/**' + test: name: Test runs-on: ${{ matrix.os }} + needs: paths-filter + # only run if code relevant to unit tests was changed + if: needs.paths-filter.outputs.src-only == 'true' strategy: fail-fast: false matrix: - node-version: [14.x, 16.x, 18.x, 20.x] + node-version: [18.x, 20.x] os: [ubuntu-latest, windows-latest, macOS-latest] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Use Node.js ${{ matrix.node-version }} uses: actions/setup-node@v3 diff --git a/.github/workflows/npm-publish.yml b/.github/workflows/npm-publish.yml new file mode 100644 index 000000000..73a7d36c2 --- /dev/null +++ b/.github/workflows/npm-publish.yml @@ -0,0 +1,27 @@ +name: Publish Package to npm +on: + workflow_dispatch: + inputs: + branch: + description: 'Git branch to build and publish' + required: true +jobs: + build: + runs-on: ubuntu-latest + permissions: + contents: read + id-token: write + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.inputs.branch }} + - uses: actions/setup-node@v3 + with: + node-version: '20.x' + registry-url: 'https://registry.npmjs.org' + - run: npm install -g npm + - run: npm install + - run: npm test + - run: npm publish --provenance --access public + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml new file mode 100644 index 000000000..3970f1d8d --- /dev/null +++ b/.github/workflows/stale.yml @@ -0,0 +1,21 @@ +--- +name: 'Close stale issues and PRs' +on: + schedule: + - cron: '30 1 * * *' + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v8 + with: + stale-issue-label: stale + stale-pr-label: stale + days-before-stale: 90 + days-before-close: 14 + exempt-issue-labels: 'good first issue' + close-issue-label: closed-stale + close-pr-label: closed-stale + stale-issue-message: 'This issue is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days.' + stale-pr-message: 'This pull request is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days.' diff --git a/README.md b/README.md index 37e78a387..5d34cfa98 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,11 @@ [![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](http://standardjs.com/) [![Build Status](https://badge.buildkite.com/15e4246eb268ea78f6e10aa90bce38c1abb0a4489e79f5a0ac.svg)](https://buildkite.com/elastic/elasticsearch-javascript-client-integration-tests/builds?branch=main) [![Node CI](https://github.com/elastic/elasticsearch-js/actions/workflows/nodejs.yml/badge.svg)](https://github.com/elastic/elasticsearch-js/actions/workflows/nodejs.yml) [![codecov](https://codecov.io/gh/elastic/elasticsearch-js/branch/master/graph/badge.svg)](https://codecov.io/gh/elastic/elasticsearch-js) [![NPM downloads](https://img.shields.io/npm/dm/@elastic/elasticsearch.svg?style=flat)](https://www.npmjs.com/package/@elastic/elasticsearch) +**[Download the latest version of Elasticsearch](https://www.elastic.co/downloads/elasticsearch)** +or +**[sign-up](https://cloud.elastic.co/registration?elektra=en-ess-sign-up-page)** +**for a free trial of Elastic Cloud**. + The official Node.js client for Elasticsearch. ## Installation @@ -28,7 +33,7 @@ of the getting started documentation. ### Node.js support -NOTE: The minimum supported version of Node.js is `v14`. +NOTE: The minimum supported version of Node.js is `v18`. The client versioning follows the Elastic Stack versioning, this means that major, minor, and patch releases are done following a precise schedule that @@ -53,6 +58,7 @@ of `^7.10.0`). | `10.x` | `April 2021` | `7.12` (mid 2021) | | `12.x` | `April 2022` | `8.2` (early 2022) | | `14.x` | `April 2023` | `8.8` (early 2023) | +| `16.x` | `September 2023` | `8.11` (late 2023) | ### Compatibility diff --git a/catalog-info.yaml b/catalog-info.yaml index b8bbd36ff..80c943cd8 100644 --- a/catalog-info.yaml +++ b/catalog-info.yaml @@ -42,9 +42,9 @@ spec: main_semi_daily: branch: 'main' cronline: '0 */12 * * *' - 8_9_semi_daily: - branch: '8.9' + 8_12_semi_daily: + branch: '8.12' cronline: '0 */12 * * *' - 8_8_daily: - branch: '8.8' + 8_11_daily: + branch: '8.11' cronline: '@daily' diff --git a/docs/advanced-config.asciidoc b/docs/advanced-config.asciidoc index 638aeada4..b3c9388a4 100644 --- a/docs/advanced-config.asciidoc +++ b/docs/advanced-config.asciidoc @@ -91,6 +91,95 @@ const client = new Client({ }) ---- +[discrete] +[[redaction]] +==== Redaction of potentially sensitive data + +When the client raises an `Error` that originated at the HTTP layer, like a `ConnectionError` or `TimeoutError`, a `meta` object is often attached to the error object that includes metadata useful for debugging, like request and response information. Because this can include potentially sensitive data, like authentication secrets in an `Authorization` header, the client takes measures to redact common sources of sensitive data when this metadata is attached and serialized. + +If your configuration requires extra headers or other configurations that may include sensitive data, you may want to adjust these settings to account for that. + +By default, the `redaction` option is set to `{ type: 'replace' }`, which recursively searches for sensitive key names, case insensitive, and replaces their values with the string `[redacted]`. + +[source,js] +---- +const { Client } = require('@elastic/elasticsearch') + +const client = new Client({ + cloud: { id: '' }, + auth: { apiKey: 'base64EncodedKey' }, +}) + +try { + await client.indices.create({ index: 'my_index' }) +} catch (err) { + console.log(err.meta.meta.request.options.headers.authorization) // prints "[redacted]" +} +---- + +If you would like to redact additional properties, you can include additional key names to search and replace: + +[source,js] +---- +const { Client } = require('@elastic/elasticsearch') + +const client = new Client({ + cloud: { id: '' }, + auth: { apiKey: 'base64EncodedKey' }, + headers: { 'X-My-Secret-Password': 'shhh it's a secret!' }, + redaction: { + type: "replace", + additionalKeys: ["x-my-secret-password"] + } +}) + +try { + await client.indices.create({ index: 'my_index' }) +} catch (err) { + console.log(err.meta.meta.request.options.headers['X-My-Secret-Password']) // prints "[redacted]" +} +---- + +Alternatively, if you know you're not going to use the metadata at all, setting the redaction type to `remove` will remove all optional sources of potentially sensitive data entirely, or replacing them with `null` for required properties. + +[source,js] +---- +const { Client } = require('@elastic/elasticsearch') + +const client = new Client({ + cloud: { id: '' }, + auth: { apiKey: 'base64EncodedKey' }, + redaction: { type: "remove" } +}) + +try { + await client.indices.create({ index: 'my_index' }) +} catch (err) { + console.log(err.meta.meta.request.options.headers) // undefined +} +---- + +Finally, if you prefer to turn off redaction altogether, perhaps while debugging on a local developer environment, you can set the redaction type to `off`. This will revert the client to pre-8.11.0 behavior, where basic redaction is only performed during common serialization methods like `console.log` and `JSON.stringify`. + +WARNING: Setting `redaction.type` to `off` is not recommended in production environments. + +[source,js] +---- +const { Client } = require('@elastic/elasticsearch') + +const client = new Client({ + cloud: { id: '' }, + auth: { apiKey: 'base64EncodedKey' }, + redaction: { type: "off" } +}) + +try { + await client.indices.create({ index: 'my_index' }) +} catch (err) { + console.log(err.meta.meta.request.options.headers.authorization) // the actual header value will be logged +} +---- + [discrete] ==== Migrate to v8 diff --git a/docs/changelog.asciidoc b/docs/changelog.asciidoc index b82c397da..4dbf11907 100644 --- a/docs/changelog.asciidoc +++ b/docs/changelog.asciidoc @@ -1,6 +1,37 @@ [[changelog-client]] == Release notes +[discrete] +=== 8.12.0 + +[discrete] +=== Features + +[discrete] +===== Support for Elasticsearch `v8.12.0` + +You can find all the API changes +https://www.elastic.co/guide/en/elasticsearch/reference/8.12/release-notes-8.12.0.html[here]. + +[discrete] +=== 8.11.0 + +[discrete] +=== Features + +[discrete] +===== Support for Elasticsearch `v8.11.0` + +You can find all the API changes +https://www.elastic.co/guide/en/elasticsearch/reference/8.11/release-notes-8.11.0.html[here]. + +[discrete] +===== Enhanced support for redacting potentially sensitive data https://github.com/elastic/elasticsearch-js/pull/2095[#2095] + +`@elastic/transport` https://github.com/elastic/elastic-transport-js/releases/tag/v8.4.0[version 8.4.0] introduces enhanced measures for ensuring that request metadata attached to some `Error` objects is redacted. This functionality is primarily to address custom logging solutions that don't use common serialization methods like `JSON.stringify`, `console.log`, or `util.inspect`, which were already accounted for. + +See <> for more information. + [discrete] === 8.10.0 @@ -342,6 +373,9 @@ The client API leaks HTTP-related notions in many places, and removing them woul This could be a rather big breaking change, so a double solution could be used during the 8.x lifecycle. (accepting body keys without them being wrapped in the body as well as the current solution). +To convert code from 7.x, you need to remove the `body` parameter in all the endpoints request. +For instance, this is an example for the `search` endpoint: + [source,js] ---- // from @@ -380,6 +414,12 @@ If you weren't extending the internals of the client, this won't be a breaking c The client API leaks HTTP-related notions in many places, and removing them would definitely improve the DX. The client will expose a new request-specific option to still get the full response details. +The new behaviour returns the `body` value directly as response. +If you want to have the 7.x response format, you need to add `meta : true` in the request. +This will return all the HTTP meta information, including the `body`. + +For instance, this is an example for the `search` endpoint: + [source,js] ---- // from diff --git a/docs/connecting.asciidoc b/docs/connecting.asciidoc index 239eea79e..15007ceb3 100644 --- a/docs/connecting.asciidoc +++ b/docs/connecting.asciidoc @@ -11,6 +11,8 @@ This page contains the information you need to connect and use the Client with * <> * <> * <> +* <> +* <> * <> [[authentication]] @@ -539,11 +541,17 @@ If you need to pass through an http(s) proxy for connecting to {es}, the client out of the box offers a handy configuration for helping you with it. Under the hood, it uses the https://github.com/delvedor/hpagent[`hpagent`] module. +IMPORTANT: In versions 8.0+ of the client, the default `Connection` type is set to `UndiciConnection`, which does not support proxy configurations. +To use a proxy, you will need to use the `HttpConnection` class from `@elastic/transport` instead. + [source,js] ---- +import { HttpConnection } from '@elastic/transport' + const client = new Client({ node: 'http://localhost:9200', - proxy: 'http://localhost:8080' + proxy: 'http://localhost:8080', + Connection: HttpConnection, }) ---- @@ -553,11 +561,12 @@ Basic authentication is supported as well: ---- const client = new Client({ node: 'http://localhost:9200', - proxy: 'http:user:pwd@//localhost:8080' + proxy: 'http:user:pwd@//localhost:8080', + Connection: HttpConnection, }) ---- -If you are connecting through a not http(s) proxy, such as a `socks5` or `pac`, +If you are connecting through a non-http(s) proxy, such as a `socks5` or `pac`, you can use the `agent` option to configure it. [source,js] @@ -567,7 +576,8 @@ const client = new Client({ node: 'http://localhost:9200', agent () { return new SocksProxyAgent('socks://127.0.0.1:1080') - } + }, + Connection: HttpConnection, }) ---- @@ -651,6 +661,51 @@ a|* `name` - `string` * `headers` - `object`, the response status code |=== +[[keep-alive]] +[discrete] +=== Keep-alive connections + +By default, the client uses persistent, keep-alive connections to reduce the overhead of creating a new HTTP connection for each Elasticsearch request. +If you are using the default `UndiciConnection` connection class, it maintains a pool of 256 connections with a keep-alive of 10 minutes. +If you are using the legacy `HttpConnection` connection class, it maintains a pool of 256 connections with a keep-alive of 1 minute. + +If you need to disable keep-alive connections, you can override the HTTP agent with your preferred https://nodejs.org/api/http.html#http_new_agent_options[HTTP agent options]: + +[source,js] +---- +const client = new Client({ + node: 'http://localhost:9200', + // the function takes as parameter the option + // object passed to the Connection constructor + agent: (opts) => new CustomAgent() +}) +---- + +Or you can disable the HTTP agent entirely: + +[source,js] +---- +const client = new Client({ + node: 'http://localhost:9200', + // Disable agent and keep-alive + agent: false +}) +---- + +[discrete] +[[close-connections]] +=== Closing a client's connections + +If you would like to close all open connections being managed by an instance of the client, use the `close()` function: + +[source,js] +---- +const client = new Client({ + node: 'http://localhost:9200' +}); +client.close(); +---- + [discrete] [[product-check]] === Automatic product check diff --git a/docs/doc_examples/36b86b97feedcf5632824eefc251d6ed.asciidoc b/docs/doc_examples/36b86b97feedcf5632824eefc251d6ed.asciidoc new file mode 100644 index 000000000..408ce2f71 --- /dev/null +++ b/docs/doc_examples/36b86b97feedcf5632824eefc251d6ed.asciidoc @@ -0,0 +1,12 @@ +[source,js] +---- +const response = await client.search({ + index: 'books', + query: { + match: { + name: 'brave' + } + } +}) +console.log(response) +---- diff --git a/docs/doc_examples/8575c966b004fb124c7afd6bb5827b50.asciidoc b/docs/doc_examples/8575c966b004fb124c7afd6bb5827b50.asciidoc new file mode 100644 index 000000000..d99bd96dc --- /dev/null +++ b/docs/doc_examples/8575c966b004fb124c7afd6bb5827b50.asciidoc @@ -0,0 +1,13 @@ +[source,js] +---- +const response = await client.index({ + index: 'books', + document: { + name: 'Snow Crash', + author: 'Neal Stephenson', + release_date: '1992-06-01', + page_count: 470, + } +}) +console.log(response) +---- diff --git a/docs/doc_examples/bcc75fc01b45e482638c65b8fbdf09fa.asciidoc b/docs/doc_examples/bcc75fc01b45e482638c65b8fbdf09fa.asciidoc new file mode 100644 index 000000000..1708d0956 --- /dev/null +++ b/docs/doc_examples/bcc75fc01b45e482638c65b8fbdf09fa.asciidoc @@ -0,0 +1,7 @@ +[source,js] +---- +const response = await client.search({ + index: 'books' +}) +console.log(response) +---- diff --git a/docs/doc_examples/d04f0c8c44e8b4fb55f2e7d9d05977e7.asciidoc b/docs/doc_examples/d04f0c8c44e8b4fb55f2e7d9d05977e7.asciidoc new file mode 100644 index 000000000..e5ce437b2 --- /dev/null +++ b/docs/doc_examples/d04f0c8c44e8b4fb55f2e7d9d05977e7.asciidoc @@ -0,0 +1,43 @@ +[source,js] +---- +const response = await client.bulk({ + operations: [ + { index: { _index: 'books' } }, + { + name: 'Revelation Space', + author: 'Alastair Reynolds', + release_date: '2000-03-15', + page_count: 585, + }, + { index: { _index: 'books' } }, + { + name: '1984', + author: 'George Orwell', + release_date: '1985-06-01', + page_count: 328, + }, + { index: { _index: 'books' } }, + { + name: 'Fahrenheit 451', + author: 'Ray Bradbury', + release_date: '1953-10-15', + page_count: 227, + }, + { index: { _index: 'books' } }, + { + name: 'Brave New World', + author: 'Aldous Huxley', + release_date: '1932-06-01', + page_count: 268, + }, + { index: { _index: 'books' } }, + { + name: 'The Handmaids Tale', + author: 'Margaret Atwood', + release_date: '1985-06-01', + page_count: 311, + } + ] +}) +console.log(response) +---- diff --git a/docs/installation.asciidoc b/docs/installation.asciidoc index b04a1a1cd..4fe1f78ab 100644 --- a/docs/installation.asciidoc +++ b/docs/installation.asciidoc @@ -24,7 +24,7 @@ To learn more about the supported major versions, please refer to the [[nodejs-support]] === Node.js support -NOTE: The minimum supported version of Node.js is `v14`. +NOTE: The minimum supported version of Node.js is `v18`. The client versioning follows the {stack} versioning, this means that major, minor, and patch releases are done following a precise schedule that @@ -64,6 +64,10 @@ of `^7.10.0`). |`14.x` |April 2023 |`8.8` (early 2023) + +|`16.x` +|September 2023 +|`8.11` (late 2023) |=== [discrete] diff --git a/docs/reference.asciidoc b/docs/reference.asciidoc index 56d17f6db..9e5291cbb 100644 --- a/docs/reference.asciidoc +++ b/docs/reference.asciidoc @@ -4942,14 +4942,14 @@ Retrieves Logstash Pipelines used by Central Management {ref}/logstash-api-get-pipeline.html[Endpoint documentation] [source,ts] ---- -client.logstash.getPipeline({ id }) +client.logstash.getPipeline({ ... }) ---- [discrete] ==== Arguments * *Request (object):* -** *`id` (string | string[])*: List of pipeline identifiers. +** *`id` (Optional, string | string[])*: List of pipeline identifiers. [discrete] ==== put_pipeline @@ -6302,6 +6302,13 @@ ELSER the config is not required. ** *`model_size_bytes` (Optional, number)*: The estimated memory usage in bytes to keep the trained model in memory. This property is supported only if defer_definition_decompression is true or the model definition is not supplied. +** *`platform_architecture` (Optional, string)*: The platform architecture (if applicable) of the trained mode. If the model +only works on one platform, because it is heavily optimized for a particular +processor architecture and OS combination, then this field specifies which. +The format of the string must match the platform identifiers used by Elasticsearch, +so one of, `linux-x86_64`, `linux-aarch64`, `darwin-x86_64`, `darwin-aarch64`, +or `windows-x86_64`. For portable models (those that work independent of processor +architecture or OS features), leave this field unset. ** *`tags` (Optional, string[])*: An array of tags to organize the model. ** *`defer_definition_decompression` (Optional, boolean)*: If set to `true` and a `compressed_definition` is provided, the request defers definition decompression and skips relevant validations. @@ -6363,6 +6370,7 @@ client.ml.putTrainedModelVocabulary({ model_id, vocabulary }) ** *`model_id` (string)*: The unique identifier of the trained model. ** *`vocabulary` (string[])*: The model vocabulary, which must not be empty. ** *`merges` (Optional, string[])*: The optional model merges if required by the tokenizer. +** *`scores` (Optional, number[])*: The optional vocabulary value scores if required by the tokenizer. [discrete] ==== reset_job @@ -6483,6 +6491,7 @@ client.ml.startTrainedModelDeployment({ model_id }) ** *`cache_size` (Optional, number | string)*: The inference cache size (in memory outside the JVM heap) per node for the model. The default value is the same size as the `model_size_bytes`. To disable the cache, `0b` can be provided. +** *`deployment_id` (Optional, string)*: A unique identifier for the deployment of the model. ** *`number_of_allocations` (Optional, number)*: The number of model allocations on each node where the model is deployed. All allocations on a node share the same copy of the model in memory but use a separate set of threads to evaluate the model. @@ -9057,6 +9066,8 @@ client.transform.deleteTransform({ transform_id }) ** *`transform_id` (string)*: Identifier for the transform. ** *`force` (Optional, boolean)*: If this value is false, the transform must be stopped before it can be deleted. If true, the transform is deleted regardless of its current state. +** *`delete_dest_index` (Optional, boolean)*: If this value is true, the destination index is deleted together with the transform. If false, the destination +index will not be deleted ** *`timeout` (Optional, string | -1 | 0)*: Period to wait for a response. If no response is received before the timeout expires, the request fails and returns an error. [discrete] diff --git a/package.json b/package.json index 995ee6243..7e3d637b0 100644 --- a/package.json +++ b/package.json @@ -1,10 +1,11 @@ { "name": "@elastic/elasticsearch", - "version": "8.10.1", - "versionCanary": "8.10.1-canary.1", + "version": "8.12.0", + "versionCanary": "8.12.0-canary.0", "description": "The official Elasticsearch client for Node.js", "main": "index.js", "types": "index.d.ts", + "type": "commonjs", "scripts": { "test": "npm run build && npm run lint && tap test/unit/{*,**/*}.test.ts", "test:unit": "npm run build && tap test/unit/{*,**/*}.test.ts", @@ -46,7 +47,7 @@ }, "homepage": "http://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html", "engines": { - "node": ">=14" + "node": ">=18" }, "devDependencies": { "@sinonjs/fake-timers": "github:sinonjs/fake-timers#0bfffc1", @@ -82,7 +83,7 @@ "zx": "^7.2.2" }, "dependencies": { - "@elastic/transport": "^8.3.4", + "@elastic/transport": "^8.4.0", "tslib": "^2.4.0" }, "tap": { @@ -92,4 +93,4 @@ "coverage": false, "check-coverage": false } -} +} \ No newline at end of file diff --git a/src/api/api/logstash.ts b/src/api/api/logstash.ts index 0b85093de..9367e308a 100644 --- a/src/api/api/logstash.ts +++ b/src/api/api/logstash.ts @@ -73,14 +73,15 @@ export default class Logstash { * Retrieves Logstash Pipelines used by Central Management * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/logstash-api-get-pipeline.html | Elasticsearch API documentation} */ - async getPipeline (this: That, params: T.LogstashGetPipelineRequest | TB.LogstashGetPipelineRequest, options?: TransportRequestOptionsWithOutMeta): Promise - async getPipeline (this: That, params: T.LogstashGetPipelineRequest | TB.LogstashGetPipelineRequest, options?: TransportRequestOptionsWithMeta): Promise> - async getPipeline (this: That, params: T.LogstashGetPipelineRequest | TB.LogstashGetPipelineRequest, options?: TransportRequestOptions): Promise - async getPipeline (this: That, params: T.LogstashGetPipelineRequest | TB.LogstashGetPipelineRequest, options?: TransportRequestOptions): Promise { + async getPipeline (this: That, params?: T.LogstashGetPipelineRequest | TB.LogstashGetPipelineRequest, options?: TransportRequestOptionsWithOutMeta): Promise + async getPipeline (this: That, params?: T.LogstashGetPipelineRequest | TB.LogstashGetPipelineRequest, options?: TransportRequestOptionsWithMeta): Promise> + async getPipeline (this: That, params?: T.LogstashGetPipelineRequest | TB.LogstashGetPipelineRequest, options?: TransportRequestOptions): Promise + async getPipeline (this: That, params?: T.LogstashGetPipelineRequest | TB.LogstashGetPipelineRequest, options?: TransportRequestOptions): Promise { const acceptedPath: string[] = ['id'] const querystring: Record = {} const body = undefined + params = params ?? {} for (const key in params) { if (acceptedPath.includes(key)) { continue diff --git a/src/api/api/ml.ts b/src/api/api/ml.ts index 27ccefdb9..e0f57a2ad 100644 --- a/src/api/api/ml.ts +++ b/src/api/api/ml.ts @@ -1821,7 +1821,7 @@ export default class Ml { async putTrainedModel (this: That, params: T.MlPutTrainedModelRequest | TB.MlPutTrainedModelRequest, options?: TransportRequestOptions): Promise async putTrainedModel (this: That, params: T.MlPutTrainedModelRequest | TB.MlPutTrainedModelRequest, options?: TransportRequestOptions): Promise { const acceptedPath: string[] = ['model_id'] - const acceptedBody: string[] = ['compressed_definition', 'definition', 'description', 'inference_config', 'input', 'metadata', 'model_type', 'model_size_bytes', 'tags'] + const acceptedBody: string[] = ['compressed_definition', 'definition', 'description', 'inference_config', 'input', 'metadata', 'model_type', 'model_size_bytes', 'platform_architecture', 'tags'] const querystring: Record = {} // @ts-expect-error const userBody: any = params?.body @@ -1923,7 +1923,7 @@ export default class Ml { async putTrainedModelVocabulary (this: That, params: T.MlPutTrainedModelVocabularyRequest | TB.MlPutTrainedModelVocabularyRequest, options?: TransportRequestOptions): Promise async putTrainedModelVocabulary (this: That, params: T.MlPutTrainedModelVocabularyRequest | TB.MlPutTrainedModelVocabularyRequest, options?: TransportRequestOptions): Promise { const acceptedPath: string[] = ['model_id'] - const acceptedBody: string[] = ['vocabulary', 'merges'] + const acceptedBody: string[] = ['vocabulary', 'merges', 'scores'] const querystring: Record = {} // @ts-expect-error const userBody: any = params?.body diff --git a/src/api/types.ts b/src/api/types.ts index 7748aa7e7..707e1cc13 100644 --- a/src/api/types.ts +++ b/src/api/types.ts @@ -707,7 +707,7 @@ export interface MsearchMultiSearchItem extends SearchRespo status?: integer } -export interface MsearchMultiSearchResult { +export interface MsearchMultiSearchResult> { took: long responses: MsearchResponseItem[] } @@ -780,7 +780,7 @@ export interface MsearchRequest extends RequestBase { export type MsearchRequestItem = MsearchMultisearchHeader | MsearchMultisearchBody -export type MsearchResponse> = MsearchMultiSearchResult +export type MsearchResponse> = MsearchMultiSearchResult export type MsearchResponseItem = MsearchMultiSearchItem | ErrorResponseBase @@ -796,7 +796,7 @@ export interface MsearchTemplateRequest extends RequestBase { export type MsearchTemplateRequestItem = MsearchMultisearchHeader | MsearchTemplateTemplateConfig -export type MsearchTemplateResponse> = MsearchMultiSearchResult +export type MsearchTemplateResponse> = MsearchMultiSearchResult export interface MsearchTemplateTemplateConfig { explain?: boolean @@ -2009,6 +2009,9 @@ export interface ClusterStatistics { skipped: integer successful: integer total: integer + running: integer + partial: integer + failed: integer details?: Record } @@ -2283,6 +2286,8 @@ export type Level = 'cluster' | 'indices' | 'shards' export type LifecycleOperationMode = 'RUNNING' | 'STOPPING' | 'STOPPED' +export type ManagedBy = 'Index Lifecycle Management' | 'Data stream lifecycle' | 'Unmanaged' + export type MapboxVectorTiles = ArrayBuffer export interface MergesStats { @@ -4930,7 +4935,7 @@ export interface MappingFieldNamesField { enabled: boolean } -export type MappingFieldType = 'none' | 'geo_point' | 'geo_shape' | 'ip' | 'binary' | 'keyword' | 'text' | 'search_as_you_type' | 'date' | 'date_nanos' | 'boolean' | 'completion' | 'nested' | 'object' | 'murmur3' | 'token_count' | 'percolator' | 'integer' | 'long' | 'short' | 'byte' | 'float' | 'half_float' | 'scaled_float' | 'double' | 'integer_range' | 'float_range' | 'long_range' | 'double_range' | 'date_range' | 'ip_range' | 'alias' | 'join' | 'rank_feature' | 'rank_features' | 'flattened' | 'shape' | 'histogram' | 'constant_keyword' | 'aggregate_metric_double' | 'dense_vector' | 'match_only_text' +export type MappingFieldType = 'none' | 'geo_point' | 'geo_shape' | 'ip' | 'binary' | 'keyword' | 'text' | 'search_as_you_type' | 'date' | 'date_nanos' | 'boolean' | 'completion' | 'nested' | 'object' | 'murmur3' | 'token_count' | 'percolator' | 'integer' | 'long' | 'short' | 'byte' | 'float' | 'half_float' | 'scaled_float' | 'double' | 'integer_range' | 'float_range' | 'long_range' | 'double_range' | 'date_range' | 'ip_range' | 'alias' | 'join' | 'rank_feature' | 'rank_features' | 'flattened' | 'shape' | 'histogram' | 'constant_keyword' | 'aggregate_metric_double' | 'dense_vector' | 'sparse_vector' | 'match_only_text' export interface MappingFlattenedProperty extends MappingPropertyBase { boost?: double @@ -9520,6 +9525,8 @@ export interface IndicesDataStream { generation: integer hidden: boolean ilm_policy?: Name + next_generation_managed_by: ManagedBy + prefer_ilm: boolean indices: IndicesDataStreamIndex[] lifecycle?: IndicesDataStreamLifecycleWithRollover name: DataStreamName @@ -9533,6 +9540,9 @@ export interface IndicesDataStream { export interface IndicesDataStreamIndex { index_name: IndexName index_uuid: Uuid + ilm_policy?: Name + managed_by: ManagedBy + prefer_ilm: boolean } export interface IndicesDataStreamLifecycle { @@ -11809,7 +11819,7 @@ export interface LogstashDeletePipelineRequest extends RequestBase { export type LogstashDeletePipelineResponse = boolean export interface LogstashGetPipelineRequest extends RequestBase { - id: Ids + id?: Ids } export type LogstashGetPipelineResponse = Record @@ -13977,6 +13987,7 @@ export interface MlPutTrainedModelRequest extends RequestBase { metadata?: any model_type?: MlTrainedModelType model_size_bytes?: long + platform_architecture?: string tags?: string[] } @@ -14040,6 +14051,7 @@ export interface MlPutTrainedModelVocabularyRequest extends RequestBase { model_id: Id vocabulary: string[] merges?: string[] + scores?: double[] } export type MlPutTrainedModelVocabularyResponse = AcknowledgedResponseBase @@ -14094,6 +14106,7 @@ export interface MlStartDatafeedResponse { export interface MlStartTrainedModelDeploymentRequest extends RequestBase { model_id: Id cache_size?: ByteSize + deployment_id?: string number_of_allocations?: integer priority?: MlTrainingPriority queue_capacity?: integer @@ -17368,6 +17381,7 @@ export interface TransformTimeSync { export interface TransformDeleteTransformRequest extends RequestBase { transform_id: Id force?: boolean + delete_dest_index?: boolean timeout?: Duration } diff --git a/src/api/typesWithBodyKey.ts b/src/api/typesWithBodyKey.ts index 8b077f0f6..dbbe3ea1a 100644 --- a/src/api/typesWithBodyKey.ts +++ b/src/api/typesWithBodyKey.ts @@ -735,7 +735,7 @@ export interface MsearchMultiSearchItem extends SearchRespo status?: integer } -export interface MsearchMultiSearchResult { +export interface MsearchMultiSearchResult> { took: long responses: MsearchResponseItem[] } @@ -809,7 +809,7 @@ export interface MsearchRequest extends RequestBase { export type MsearchRequestItem = MsearchMultisearchHeader | MsearchMultisearchBody -export type MsearchResponse> = MsearchMultiSearchResult +export type MsearchResponse> = MsearchMultiSearchResult export type MsearchResponseItem = MsearchMultiSearchItem | ErrorResponseBase @@ -826,7 +826,7 @@ export interface MsearchTemplateRequest extends RequestBase { export type MsearchTemplateRequestItem = MsearchMultisearchHeader | MsearchTemplateTemplateConfig -export type MsearchTemplateResponse> = MsearchMultiSearchResult +export type MsearchTemplateResponse> = MsearchMultiSearchResult export interface MsearchTemplateTemplateConfig { explain?: boolean @@ -2082,6 +2082,9 @@ export interface ClusterStatistics { skipped: integer successful: integer total: integer + running: integer + partial: integer + failed: integer details?: Record } @@ -2356,6 +2359,8 @@ export type Level = 'cluster' | 'indices' | 'shards' export type LifecycleOperationMode = 'RUNNING' | 'STOPPING' | 'STOPPED' +export type ManagedBy = 'Index Lifecycle Management' | 'Data stream lifecycle' | 'Unmanaged' + export type MapboxVectorTiles = ArrayBuffer export interface MergesStats { @@ -5003,7 +5008,7 @@ export interface MappingFieldNamesField { enabled: boolean } -export type MappingFieldType = 'none' | 'geo_point' | 'geo_shape' | 'ip' | 'binary' | 'keyword' | 'text' | 'search_as_you_type' | 'date' | 'date_nanos' | 'boolean' | 'completion' | 'nested' | 'object' | 'murmur3' | 'token_count' | 'percolator' | 'integer' | 'long' | 'short' | 'byte' | 'float' | 'half_float' | 'scaled_float' | 'double' | 'integer_range' | 'float_range' | 'long_range' | 'double_range' | 'date_range' | 'ip_range' | 'alias' | 'join' | 'rank_feature' | 'rank_features' | 'flattened' | 'shape' | 'histogram' | 'constant_keyword' | 'aggregate_metric_double' | 'dense_vector' | 'match_only_text' +export type MappingFieldType = 'none' | 'geo_point' | 'geo_shape' | 'ip' | 'binary' | 'keyword' | 'text' | 'search_as_you_type' | 'date' | 'date_nanos' | 'boolean' | 'completion' | 'nested' | 'object' | 'murmur3' | 'token_count' | 'percolator' | 'integer' | 'long' | 'short' | 'byte' | 'float' | 'half_float' | 'scaled_float' | 'double' | 'integer_range' | 'float_range' | 'long_range' | 'double_range' | 'date_range' | 'ip_range' | 'alias' | 'join' | 'rank_feature' | 'rank_features' | 'flattened' | 'shape' | 'histogram' | 'constant_keyword' | 'aggregate_metric_double' | 'dense_vector' | 'sparse_vector' | 'match_only_text' export interface MappingFlattenedProperty extends MappingPropertyBase { boost?: double @@ -9643,6 +9648,8 @@ export interface IndicesDataStream { generation: integer hidden: boolean ilm_policy?: Name + next_generation_managed_by: ManagedBy + prefer_ilm: boolean indices: IndicesDataStreamIndex[] lifecycle?: IndicesDataStreamLifecycleWithRollover name: DataStreamName @@ -9656,6 +9663,9 @@ export interface IndicesDataStream { export interface IndicesDataStreamIndex { index_name: IndexName index_uuid: Uuid + ilm_policy?: Name + managed_by: ManagedBy + prefer_ilm: boolean } export interface IndicesDataStreamLifecycle { @@ -11989,7 +11999,7 @@ export interface LogstashDeletePipelineRequest extends RequestBase { export type LogstashDeletePipelineResponse = boolean export interface LogstashGetPipelineRequest extends RequestBase { - id: Ids + id?: Ids } export type LogstashGetPipelineResponse = Record @@ -14233,6 +14243,7 @@ export interface MlPutTrainedModelRequest extends RequestBase { metadata?: any model_type?: MlTrainedModelType model_size_bytes?: long + platform_architecture?: string tags?: string[] } } @@ -14302,6 +14313,7 @@ export interface MlPutTrainedModelVocabularyRequest extends RequestBase { body?: { vocabulary: string[] merges?: string[] + scores?: double[] } } @@ -14363,6 +14375,7 @@ export interface MlStartDatafeedResponse { export interface MlStartTrainedModelDeploymentRequest extends RequestBase { model_id: Id cache_size?: ByteSize + deployment_id?: string number_of_allocations?: integer priority?: MlTrainingPriority queue_capacity?: integer @@ -17778,6 +17791,7 @@ export interface TransformTimeSync { export interface TransformDeleteTransformRequest extends RequestBase { transform_id: Id force?: boolean + delete_dest_index?: boolean timeout?: Duration } diff --git a/src/client.ts b/src/client.ts index 09118d58c..50ba4942f 100644 --- a/src/client.ts +++ b/src/client.ts @@ -43,6 +43,7 @@ import { BearerAuth, Context } from '@elastic/transport/lib/types' +import { RedactionOptions } from '@elastic/transport/lib/Transport' import BaseConnection, { prepareHeaders } from '@elastic/transport/lib/connection/BaseConnection' import SniffingTransport from './sniffingTransport' import Helpers from './helpers' @@ -113,6 +114,7 @@ export interface ClientOptions { caFingerprint?: string maxResponseSize?: number maxCompressedResponseSize?: number + redaction?: RedactionOptions } export default class Client extends API { @@ -186,7 +188,11 @@ export default class Client extends API { proxy: null, enableMetaHeader: true, maxResponseSize: null, - maxCompressedResponseSize: null + maxCompressedResponseSize: null, + redaction: { + type: 'replace', + additionalKeys: [] + } }, opts) if (options.caFingerprint != null && isHttpConnection(opts.node ?? opts.nodes)) { @@ -259,7 +265,8 @@ export default class Client extends API { jsonContentType: 'application/vnd.elasticsearch+json; compatible-with=8', ndjsonContentType: 'application/vnd.elasticsearch+x-ndjson; compatible-with=8', accept: 'application/vnd.elasticsearch+json; compatible-with=8,text/plain' - } + }, + redaction: options.redaction }) this.helpers = new Helpers({ diff --git a/src/helpers.ts b/src/helpers.ts index 57804d620..fbf4ff334 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -196,8 +196,11 @@ export default class Helpers { await sleep(wait) } assert(response !== undefined, 'The response is undefined, please file a bug report') + + const { redaction = { type: 'replace' } } = options + const errorOptions = { redaction } if (response.statusCode === 429) { - throw new ResponseError(response) + throw new ResponseError(response, errorOptions) } let scroll_id = response.body._scroll_id @@ -237,7 +240,7 @@ export default class Helpers { await sleep(wait) } if (response.statusCode === 429) { - throw new ResponseError(response) + throw new ResponseError(response, errorOptions) } } @@ -289,6 +292,9 @@ export default class Helpers { } = options reqOptions.meta = true + const { redaction = { type: 'replace' } } = reqOptions + const errorOptions = { redaction } + let stopReading = false let stopError: Error | null = null let timeoutRef = null @@ -502,7 +508,7 @@ export default class Helpers { // @ts-expect-error addDocumentsGetter(result) if (response.status != null && response.status >= 400) { - callbacks[i](new ResponseError(result), result) + callbacks[i](new ResponseError(result, errorOptions), result) } else { callbacks[i](null, result) } @@ -527,6 +533,8 @@ export default class Helpers { * @return {object} The possible operations to run with the datasource. */ bulk (options: BulkHelperOptions, reqOptions: TransportRequestOptions = {}): BulkHelper { + assert(!(reqOptions.asStream ?? false), 'bulk helper: the asStream request option is not supported') + const client = this[kClient] const { serializer } = client if (this[kMetaHeader] !== null) { diff --git a/test/integration/index.js b/test/integration/index.js index defdb400f..b07ddd2d7 100644 --- a/test/integration/index.js +++ b/test/integration/index.js @@ -317,7 +317,7 @@ async function start ({ client, isXPack }) { if (name === 'setup' || name === 'teardown') continue if (options.test && !name.endsWith(options.test)) continue - const junitTestCase = junitTestSuite.testcase(name, `node_${process.version}/${cleanPath}`) + const junitTestCase = junitTestSuite.testcase(name, `node_${process.version}: ${cleanPath}`) stats.total += 1 if (shouldSkip(isXPack, file, name)) { @@ -336,6 +336,7 @@ async function start ({ client, isXPack }) { junitTestSuite.end() junitTestSuites.end() generateJunitXmlReport(junit, isXPack ? 'platinum' : 'free') + err.meta = JSON.stringify(err.meta ?? {}, null, 2) console.error(err) if (options.bail) { @@ -374,6 +375,7 @@ async function start ({ client, isXPack }) { - Total: ${stats.total} - Skip: ${stats.skip} - Pass: ${stats.pass} + - Fail: ${stats.total - (stats.pass + stats.skip)} - Assertions: ${stats.assertions} `) } diff --git a/test/integration/test-runner.js b/test/integration/test-runner.js index 64570945a..ce80da43e 100644 --- a/test/integration/test-runner.js +++ b/test/integration/test-runner.js @@ -593,13 +593,14 @@ function build (opts = {}) { const key = Object.keys(action.match)[0] match( // in some cases, the yaml refers to the body with an empty string - key === '$body' || key === '' + key.split('.')[0] === '$body' || key === '' ? response : delve(response, fillStashedValues(key)), - key === '$body' + key.split('.')[0] === '$body' ? action.match[key] : fillStashedValues(action.match)[key], - action.match + action.match, + response ) } @@ -608,7 +609,8 @@ function build (opts = {}) { const key = Object.keys(action.lt)[0] lt( delve(response, fillStashedValues(key)), - fillStashedValues(action.lt)[key] + fillStashedValues(action.lt)[key], + response ) } @@ -617,7 +619,8 @@ function build (opts = {}) { const key = Object.keys(action.gt)[0] gt( delve(response, fillStashedValues(key)), - fillStashedValues(action.gt)[key] + fillStashedValues(action.gt)[key], + response ) } @@ -626,7 +629,8 @@ function build (opts = {}) { const key = Object.keys(action.lte)[0] lte( delve(response, fillStashedValues(key)), - fillStashedValues(action.lte)[key] + fillStashedValues(action.lte)[key], + response ) } @@ -635,7 +639,8 @@ function build (opts = {}) { const key = Object.keys(action.gte)[0] gte( delve(response, fillStashedValues(key)), - fillStashedValues(action.gte)[key] + fillStashedValues(action.gte)[key], + response ) } @@ -648,7 +653,8 @@ function build (opts = {}) { : delve(response, fillStashedValues(key)), key === '$body' ? action.length[key] - : fillStashedValues(action.length)[key] + : fillStashedValues(action.length)[key], + response ) } @@ -657,7 +663,8 @@ function build (opts = {}) { const isTrue = fillStashedValues(action.is_true) is_true( delve(response, isTrue), - isTrue + isTrue, + response ) } @@ -666,7 +673,8 @@ function build (opts = {}) { const isFalse = fillStashedValues(action.is_false) is_false( delve(response, isFalse), - isFalse + isFalse, + response ) } } @@ -679,46 +687,67 @@ function build (opts = {}) { * Asserts that the given value is truthy * @param {any} the value to check * @param {string} an optional message + * @param {any} debugging metadata to attach to any assertion errors * @returns {TestRunner} */ -function is_true (val, msg) { - assert.ok(val, `expect truthy value: ${msg} - value: ${JSON.stringify(val)}`) +function is_true (val, msg, response) { + try { + assert.ok((typeof val === 'string' && val.toLowerCase() === 'true') || val, `expect truthy value: ${msg} - value: ${JSON.stringify(val)}`) + } catch (err) { + err.response = JSON.stringify(response) + throw err + } } /** * Asserts that the given value is falsey * @param {any} the value to check * @param {string} an optional message + * @param {any} debugging metadata to attach to any assertion errors * @returns {TestRunner} */ -function is_false (val, msg) { - assert.ok(!val, `expect falsey value: ${msg} - value: ${JSON.stringify(val)}`) +function is_false (val, msg, response) { + try { + assert.ok((typeof val === 'string' && val.toLowerCase() === 'false') || !val, `expect falsey value: ${msg} - value: ${JSON.stringify(val)}`) + } catch (err) { + err.response = JSON.stringify(response) + throw err + } } /** * Asserts that two values are the same * @param {any} the first value * @param {any} the second value + * @param {any} debugging metadata to attach to any assertion errors * @returns {TestRunner} */ -function match (val1, val2, action) { - // both values are objects - if (typeof val1 === 'object' && typeof val2 === 'object') { - assert.deepEqual(val1, val2, typeof action === 'object' ? JSON.stringify(action) : action) - // the first value is the body as string and the second a pattern string - } else if ( - typeof val1 === 'string' && typeof val2 === 'string' && - val2.startsWith('/') && (val2.endsWith('/\n') || val2.endsWith('/')) - ) { - const regStr = val2 - .replace(/(^|[^\\])#.*/g, '$1') - .replace(/(^|[^\\])\s+/g, '$1') - .slice(1, -1) - // 'm' adds the support for multiline regex - assert.match(val1, new RegExp(regStr, 'm'), `should match pattern provided: ${val2}, but got: ${val1}`) - // everything else - } else { - assert.equal(val1, val2, `should be equal: ${val1} - ${val2}, action: ${JSON.stringify(action)}`) +function match (val1, val2, action, response) { + try { + // both values are objects + if (typeof val1 === 'object' && typeof val2 === 'object') { + assert.deepEqual(val1, val2, typeof action === 'object' ? JSON.stringify(action) : action) + // the first value is the body as string and the second a pattern string + } else if ( + typeof val1 === 'string' && typeof val2 === 'string' && + val2.startsWith('/') && (val2.endsWith('/\n') || val2.endsWith('/')) + ) { + const regStr = val2 + .replace(/(^|[^\\])#.*/g, '$1') + .replace(/(^|[^\\])\s+/g, '$1') + .slice(1, -1) + // 'm' adds the support for multiline regex + assert.match(val1, new RegExp(regStr, 'm'), `should match pattern provided: ${val2}, but got: ${val1}: ${JSON.stringify(action)}`) + } else if (typeof val1 === 'string' && typeof val2 === 'string') { + // string comparison + assert.include(val1, val2, `should include pattern provided: ${val2}, but got: ${val1}: ${JSON.stringify(action)}`) + } else { + // everything else + assert.equal(val1, val2, `should be equal: ${val1} - ${val2}, action: ${JSON.stringify(action)}`) + } + } catch (err) { + err.response = JSON.stringify(response) + throw err } } @@ -727,11 +756,17 @@ function match (val1, val2, action) { * It also verifies that the two values are numbers * @param {any} the first value * @param {any} the second value + * @param {any} debugging metadata to attach to any assertion errors * @returns {TestRunner} */ -function lt (val1, val2) { - ;[val1, val2] = getNumbers(val1, val2) - assert.ok(val1 < val2) +function lt (val1, val2, response) { + try { + ;[val1, val2] = getNumbers(val1, val2) + assert.ok(val1 < val2) + } catch (err) { + err.response = JSON.stringify(response) + throw err + } } /** @@ -739,11 +774,17 @@ function lt (val1, val2) { * It also verifies that the two values are numbers * @param {any} the first value * @param {any} the second value + * @param {any} debugging metadata to attach to any assertion errors * @returns {TestRunner} */ -function gt (val1, val2) { - ;[val1, val2] = getNumbers(val1, val2) - assert.ok(val1 > val2) +function gt (val1, val2, response) { + try { + ;[val1, val2] = getNumbers(val1, val2) + assert.ok(val1 > val2) + } catch (err) { + err.response = JSON.stringify(response) + throw err + } } /** @@ -751,11 +792,17 @@ function gt (val1, val2) { * It also verifies that the two values are numbers * @param {any} the first value * @param {any} the second value + * @param {any} debugging metadata to attach to any assertion errors * @returns {TestRunner} */ -function lte (val1, val2) { - ;[val1, val2] = getNumbers(val1, val2) - assert.ok(val1 <= val2) +function lte (val1, val2, response) { + try { + ;[val1, val2] = getNumbers(val1, val2) + assert.ok(val1 <= val2) + } catch (err) { + err.response = JSON.stringify(response) + throw err + } } /** @@ -763,26 +810,38 @@ function lte (val1, val2) { * It also verifies that the two values are numbers * @param {any} the first value * @param {any} the second value + * @param {any} debugging metadata to attach to any assertion errors * @returns {TestRunner} */ -function gte (val1, val2) { - ;[val1, val2] = getNumbers(val1, val2) - assert.ok(val1 >= val2) +function gte (val1, val2, response) { + try { + ;[val1, val2] = getNumbers(val1, val2) + assert.ok(val1 >= val2) + } catch (err) { + err.response = JSON.stringify(response) + throw err + } } /** * Asserts that the given value has the specified length * @param {string|object|array} the object to check * @param {number} the expected length + * @param {any} debugging metadata to attach to any assertion errors * @returns {TestRunner} */ -function length (val, len) { - if (typeof val === 'string' || Array.isArray(val)) { - assert.equal(val.length, len) - } else if (typeof val === 'object' && val !== null) { - assert.equal(Object.keys(val).length, len) - } else { - assert.fail(`length: the given value is invalid: ${val}`) +function length (val, len, response) { + try { + if (typeof val === 'string' || Array.isArray(val)) { + assert.equal(val.length, len) + } else if (typeof val === 'object' && val !== null) { + assert.equal(Object.keys(val).length, len) + } else { + assert.fail(`length: the given value is invalid: ${val}`) + } + } catch (err) { + err.response = JSON.stringify(response) + throw err } } @@ -813,6 +872,10 @@ function length (val, len) { */ function parseDo (action) { action = JSON.parse(JSON.stringify(action)) + + if (typeof action === 'string') action = {[action]: {}} + if (Array.isArray(action)) action = action[0] + return Object.keys(action).reduce((acc, val) => { switch (val) { case 'catch': diff --git a/test/unit/helpers/bulk.test.ts b/test/unit/helpers/bulk.test.ts index ae217ec3e..62c297ebf 100644 --- a/test/unit/helpers/bulk.test.ts +++ b/test/unit/helpers/bulk.test.ts @@ -18,6 +18,7 @@ */ import FakeTimers from '@sinonjs/fake-timers' +import { AssertionError } from 'assert' import { createReadStream } from 'fs' import * as http from 'http' import { join } from 'path' @@ -1338,6 +1339,37 @@ test('transport options', t => { }) }) + t.test('Should not allow asStream request option', async t => { + t.plan(2) + + const client = new Client({ + node: 'http://localhost:9200', + }) + + try { + await client.helpers.bulk({ + datasource: dataset.slice(), + flushBytes: 1, + concurrency: 1, + onDocument (doc) { + return { index: { _index: 'test' } } + }, + onDrop (doc) { + t.fail('This should never be called') + }, + refreshOnCompletion: true + }, { + headers: { + foo: 'bar' + }, + asStream: true, + }) + } catch (err: any) { + t.ok(err instanceof AssertionError) + t.equal(err.message, 'bulk helper: the asStream request option is not supported') + } + }) + t.end() })