diff --git a/docs/developer/plugin-list.asciidoc b/docs/developer/plugin-list.asciidoc index bf11f87b96ce9..67b7aa8e6a011 100644 --- a/docs/developer/plugin-list.asciidoc +++ b/docs/developer/plugin-list.asciidoc @@ -16,7 +16,7 @@ NOTE: [discrete] === src/plugins -[%header,cols=2*] +[%header,cols=2*] |=== |Name |Description @@ -259,7 +259,7 @@ which will load the visualization's editor. [discrete] === x-pack/plugins -[%header,cols=2*] +[%header,cols=2*] |=== |Name |Description @@ -515,6 +515,10 @@ As a developer you can reuse and extend built-in alerts and actions UI functiona in their infrastructure. +|{kib-repo}blob/{branch}/x-pack/plugins/drilldowns/url_drilldown/README.md[urlDrilldown] +|NOTE: This plugin contains implementation of URL drilldown. For drilldowns infrastructure code refer to ui_actions_enhanced plugin. + + |{kib-repo}blob/{branch}/x-pack/plugins/watcher/README.md[watcher] |This plugins adopts some conventions in addition to or in place of conventions in Kibana (at the time of the plugin's creation): @@ -523,10 +527,6 @@ in their infrastructure. |Contains HTTP endpoints and UiSettings that are slated for removal. -|{kib-repo}blob/{branch}/x-pack/plugins/drilldowns/url_drilldown/README.md[urlDrilldown] -|NOTE: This plugin contains implementation of URL drilldown. For drilldowns infrastructure code refer to ui_actions_enhanced plugin. - - |=== include::{kibana-root}/src/plugins/dashboard/README.asciidoc[leveloffset=+1] diff --git a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md index f7b55b0650d8b..3afd5eaa6f1f7 100644 --- a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md +++ b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md @@ -91,6 +91,7 @@ readonly links: { readonly gettingStarted: string; }; readonly query: { + readonly eql: string; readonly luceneQuerySyntax: string; readonly queryDsl: string; readonly kueryQuerySyntax: string; diff --git a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md index 3f58cf08ee6b6..5249381969b98 100644 --- a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md +++ b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md @@ -17,5 +17,5 @@ export interface DocLinksStart | --- | --- | --- | | [DOC\_LINK\_VERSION](./kibana-plugin-core-public.doclinksstart.doc_link_version.md) | string | | | [ELASTIC\_WEBSITE\_URL](./kibana-plugin-core-public.doclinksstart.elastic_website_url.md) | string | | -| [links](./kibana-plugin-core-public.doclinksstart.links.md) | {
readonly dashboard: {
readonly drilldowns: string;
readonly drilldownsTriggerPicker: string;
readonly urlDrilldownTemplateSyntax: string;
readonly urlDrilldownVariables: string;
};
readonly filebeat: {
readonly base: string;
readonly installation: string;
readonly configuration: string;
readonly elasticsearchOutput: string;
readonly startup: string;
readonly exportedFields: string;
};
readonly auditbeat: {
readonly base: string;
};
readonly metricbeat: {
readonly base: string;
};
readonly heartbeat: {
readonly base: string;
};
readonly logstash: {
readonly base: string;
};
readonly functionbeat: {
readonly base: string;
};
readonly winlogbeat: {
readonly base: string;
};
readonly aggs: {
readonly date_histogram: string;
readonly date_range: string;
readonly filter: string;
readonly filters: string;
readonly geohash_grid: string;
readonly histogram: string;
readonly ip_range: string;
readonly range: string;
readonly significant_terms: string;
readonly terms: string;
readonly avg: string;
readonly avg_bucket: string;
readonly max_bucket: string;
readonly min_bucket: string;
readonly sum_bucket: string;
readonly cardinality: string;
readonly count: string;
readonly cumulative_sum: string;
readonly derivative: string;
readonly geo_bounds: string;
readonly geo_centroid: string;
readonly max: string;
readonly median: string;
readonly min: string;
readonly moving_avg: string;
readonly percentile_ranks: string;
readonly serial_diff: string;
readonly std_dev: string;
readonly sum: string;
readonly top_hits: string;
};
readonly scriptedFields: {
readonly scriptFields: string;
readonly scriptAggs: string;
readonly painless: string;
readonly painlessApi: string;
readonly painlessSyntax: string;
readonly luceneExpressions: string;
};
readonly indexPatterns: {
readonly loadingData: string;
readonly introduction: string;
};
readonly addData: string;
readonly kibana: string;
readonly siem: {
readonly guide: string;
readonly gettingStarted: string;
};
readonly query: {
readonly luceneQuerySyntax: string;
readonly queryDsl: string;
readonly kueryQuerySyntax: string;
};
readonly date: {
readonly dateMath: string;
};
readonly management: Record<string, string>;
readonly visualize: Record<string, string>;
} | | +| [links](./kibana-plugin-core-public.doclinksstart.links.md) | {
readonly dashboard: {
readonly drilldowns: string;
readonly drilldownsTriggerPicker: string;
readonly urlDrilldownTemplateSyntax: string;
readonly urlDrilldownVariables: string;
};
readonly filebeat: {
readonly base: string;
readonly installation: string;
readonly configuration: string;
readonly elasticsearchOutput: string;
readonly startup: string;
readonly exportedFields: string;
};
readonly auditbeat: {
readonly base: string;
};
readonly metricbeat: {
readonly base: string;
};
readonly heartbeat: {
readonly base: string;
};
readonly logstash: {
readonly base: string;
};
readonly functionbeat: {
readonly base: string;
};
readonly winlogbeat: {
readonly base: string;
};
readonly aggs: {
readonly date_histogram: string;
readonly date_range: string;
readonly filter: string;
readonly filters: string;
readonly geohash_grid: string;
readonly histogram: string;
readonly ip_range: string;
readonly range: string;
readonly significant_terms: string;
readonly terms: string;
readonly avg: string;
readonly avg_bucket: string;
readonly max_bucket: string;
readonly min_bucket: string;
readonly sum_bucket: string;
readonly cardinality: string;
readonly count: string;
readonly cumulative_sum: string;
readonly derivative: string;
readonly geo_bounds: string;
readonly geo_centroid: string;
readonly max: string;
readonly median: string;
readonly min: string;
readonly moving_avg: string;
readonly percentile_ranks: string;
readonly serial_diff: string;
readonly std_dev: string;
readonly sum: string;
readonly top_hits: string;
};
readonly scriptedFields: {
readonly scriptFields: string;
readonly scriptAggs: string;
readonly painless: string;
readonly painlessApi: string;
readonly painlessSyntax: string;
readonly luceneExpressions: string;
};
readonly indexPatterns: {
readonly loadingData: string;
readonly introduction: string;
};
readonly addData: string;
readonly kibana: string;
readonly siem: {
readonly guide: string;
readonly gettingStarted: string;
};
readonly query: {
readonly eql: string;
readonly luceneQuerySyntax: string;
readonly queryDsl: string;
readonly kueryQuerySyntax: string;
};
readonly date: {
readonly dateMath: string;
};
readonly management: Record<string, string>;
readonly visualize: Record<string, string>;
} | | diff --git a/docs/setup/production.asciidoc b/docs/setup/production.asciidoc index 3075220e3a47c..e097704e05d40 100644 --- a/docs/setup/production.asciidoc +++ b/docs/setup/production.asciidoc @@ -135,6 +135,7 @@ Settings that must be the same: xpack.security.encryptionKey //decrypting session information xpack.reporting.encryptionKey //decrypting reports xpack.encryptedSavedObjects.encryptionKey // decrypting saved objects +xpack.encryptedSavedObjects.keyRotation.decryptionOnlyKeys // saved objects encryption key rotation, if any -------- Separate configuration files can be used from the command line by using the `-c` flag: diff --git a/package.json b/package.json index ff98d7f85dcef..cebfddbe34e94 100644 --- a/package.json +++ b/package.json @@ -115,8 +115,6 @@ ] }, "dependencies": { - "@babel/core": "^7.11.1", - "@babel/register": "^7.10.5", "@elastic/datemath": "5.0.3", "@elastic/elasticsearch": "7.9.1", "@elastic/eui": "29.0.0", @@ -128,7 +126,6 @@ "@hapi/wreck": "^15.0.2", "@kbn/analytics": "1.0.0", "@kbn/apm-config-loader": "1.0.0", - "@kbn/babel-preset": "1.0.0", "@kbn/config": "1.0.0", "@kbn/config-schema": "1.0.0", "@kbn/i18n": "1.0.0", @@ -214,6 +211,7 @@ "rxjs": "^6.5.5", "seedrandom": "^3.0.5", "semver": "^5.7.0", + "source-map-support": "^0.5.19", "style-it": "^2.1.3", "symbol-observable": "^1.2.0", "tar": "4.4.13", @@ -227,10 +225,12 @@ "yauzl": "^2.10.0" }, "devDependencies": { + "@babel/core": "^7.11.1", "@babel/parser": "^7.11.2", + "@babel/register": "^7.10.5", "@babel/types": "^7.11.0", "@elastic/apm-rum": "^5.6.1", - "@elastic/charts": "21.1.2", + "@elastic/charts": "23.0.0", "@elastic/ems-client": "7.10.0", "@elastic/eslint-config-kibana": "0.15.0", "@elastic/eslint-plugin-eui": "0.0.2", @@ -238,6 +238,7 @@ "@elastic/github-checks-reporter": "0.0.20b3", "@elastic/makelogs": "^6.0.0", "@elastic/ui-ace": "0.2.3", + "@kbn/babel-preset": "1.0.0", "@kbn/dev-utils": "1.0.0", "@kbn/es": "1.0.0", "@kbn/es-archiver": "1.0.0", diff --git a/packages/kbn-babel-preset/node_preset.js b/packages/kbn-babel-preset/node_preset.js index 45afe5d5ebc32..86817ed253e7c 100644 --- a/packages/kbn-babel-preset/node_preset.js +++ b/packages/kbn-babel-preset/node_preset.js @@ -49,13 +49,5 @@ module.exports = (_, options = {}) => { ], require('./common_preset'), ], - plugins: [ - [ - require.resolve('babel-plugin-transform-define'), - { - 'global.__BUILT_WITH_BABEL__': 'true', - }, - ], - ], }; }; diff --git a/packages/kbn-babel-preset/package.json b/packages/kbn-babel-preset/package.json index bc4e0ec338f94..79d2fd8687dae 100644 --- a/packages/kbn-babel-preset/package.json +++ b/packages/kbn-babel-preset/package.json @@ -14,7 +14,6 @@ "@babel/preset-typescript": "^7.10.4", "babel-plugin-add-module-exports": "^1.0.2", "babel-plugin-styled-components": "^1.10.7", - "babel-plugin-transform-define": "^1.3.1", "babel-plugin-transform-react-remove-prop-types": "^0.4.24", "react-is": "^16.8.0", "styled-components": "^5.1.0" diff --git a/packages/kbn-dev-utils/src/plugin_list/discover_plugins.ts b/packages/kbn-dev-utils/src/plugin_list/discover_plugins.ts index 5d92ddb600aa9..e8f6735205b19 100644 --- a/packages/kbn-dev-utils/src/plugin_list/discover_plugins.ts +++ b/packages/kbn-dev-utils/src/plugin_list/discover_plugins.ts @@ -29,7 +29,7 @@ import { extractAsciidocInfo } from './extract_asciidoc_info'; export interface Plugin { id: string; - relativeDir?: string; + relativeDir: string; relativeReadmePath?: string; readmeSnippet?: string; readmeAsciidocAnchor?: string; diff --git a/packages/kbn-dev-utils/src/plugin_list/generate_plugin_list.ts b/packages/kbn-dev-utils/src/plugin_list/generate_plugin_list.ts index e1a1323553113..680c220adb18c 100644 --- a/packages/kbn-dev-utils/src/plugin_list/generate_plugin_list.ts +++ b/packages/kbn-dev-utils/src/plugin_list/generate_plugin_list.ts @@ -24,9 +24,11 @@ import { REPO_ROOT } from '@kbn/utils'; import { Plugins } from './discover_plugins'; +const sortPlugins = (plugins: Plugins) => plugins.sort((a, b) => a.id.localeCompare(b.id)); + function* printPlugins(plugins: Plugins, includes: string[]) { - for (const plugin of plugins) { - const path = plugin.relativeReadmePath || plugin.relativeDir; + for (const plugin of sortPlugins(plugins)) { + const path = normalizePath(plugin.relativeReadmePath || plugin.relativeDir); yield ''; if (plugin.readmeAsciidocAnchor) { @@ -67,7 +69,7 @@ NOTE: [discrete] === src/plugins -[%header,cols=2*] +[%header,cols=2*] |=== |Name |Description @@ -79,7 +81,7 @@ ${Array.from(printPlugins(ossPlugins, includes)).join('\n')} [discrete] === x-pack/plugins -[%header,cols=2*] +[%header,cols=2*] |=== |Name |Description diff --git a/packages/kbn-telemetry-tools/src/tools/serializer.test.ts b/packages/kbn-telemetry-tools/src/tools/serializer.test.ts index 652b26c8edb23..85fb84c714e20 100644 --- a/packages/kbn-telemetry-tools/src/tools/serializer.test.ts +++ b/packages/kbn-telemetry-tools/src/tools/serializer.test.ts @@ -138,4 +138,22 @@ describe('getDescriptor', () => { }, }); }); + + it('serializes RecordWithKnownProps', () => { + const usageInterface = usageInterfaces.get('RecordWithKnownProps')!; + const descriptor = getDescriptor(usageInterface, tsProgram); + expect(descriptor).toEqual({ + prop1: { kind: ts.SyntaxKind.NumberKeyword, type: 'NumberKeyword' }, + prop2: { kind: ts.SyntaxKind.NumberKeyword, type: 'NumberKeyword' }, + }); + }); + + it('serializes IndexedAccessType', () => { + const usageInterface = usageInterfaces.get('IndexedAccessType')!; + const descriptor = getDescriptor(usageInterface, tsProgram); + expect(descriptor).toEqual({ + prop1: { kind: ts.SyntaxKind.StringKeyword, type: 'StringKeyword' }, + prop2: { kind: ts.SyntaxKind.StringKeyword, type: 'StringKeyword' }, + }); + }); }); diff --git a/packages/kbn-telemetry-tools/src/tools/serializer.ts b/packages/kbn-telemetry-tools/src/tools/serializer.ts index cd845a680ad06..ea5f184008026 100644 --- a/packages/kbn-telemetry-tools/src/tools/serializer.ts +++ b/packages/kbn-telemetry-tools/src/tools/serializer.ts @@ -18,7 +18,7 @@ */ import * as ts from 'typescript'; -import { uniqBy } from 'lodash'; +import { uniqBy, pick } from 'lodash'; import { getResolvedModuleSourceFile, getIdentifierDeclarationFromSource, @@ -95,7 +95,16 @@ export function getConstraints(node: ts.Node, program: ts.Program): any { return node.literal.text; } - throw Error(`Unsupported constraint`); + if (ts.isImportSpecifier(node)) { + const source = node.getSourceFile(); + const importedModuleName = getModuleSpecifier(node); + + const declarationSource = getResolvedModuleSourceFile(source, program, importedModuleName); + const declarationNode = getIdentifierDeclarationFromSource(node.name, declarationSource); + return getConstraints(declarationNode, program); + } + + throw Error(`Unsupported constraint of kind ${node.kind} [${ts.SyntaxKind[node.kind]}]`); } export function getDescriptor(node: ts.Node, program: ts.Program): Descriptor | DescriptorValue { @@ -157,9 +166,25 @@ export function getDescriptor(node: ts.Node, program: ts.Program): Descriptor | return { kind: TelemetryKinds.Date, type: 'Date' }; } // Support `Record` - if (symbolName === 'Record' && node.typeArguments![0].kind === ts.SyntaxKind.StringKeyword) { - return { '@@INDEX@@': getDescriptor(node.typeArguments![1], program) }; + if (symbolName === 'Record') { + const descriptor = getDescriptor(node.typeArguments![1], program); + if (node.typeArguments![0].kind === ts.SyntaxKind.StringKeyword) { + return { '@@INDEX@@': descriptor }; + } + const constraints = getConstraints(node.typeArguments![0], program); + const constraintsArray = Array.isArray(constraints) ? constraints : [constraints]; + if (typeof constraintsArray[0] === 'string') { + return constraintsArray.reduce((acc, c) => ({ ...acc, [c]: descriptor }), {}); + } + } + + // Support `Pick` + if (symbolName === 'Pick') { + const parentDescriptor = getDescriptor(node.typeArguments![0], program); + const pickPropNames = getConstraints(node.typeArguments![1], program); + return pick(parentDescriptor, pickPropNames); } + const declaration = (symbol?.getDeclarations() || [])[0]; if (declaration) { return getDescriptor(declaration, program); diff --git a/packages/kbn-telemetry-tools/src/tools/utils.ts b/packages/kbn-telemetry-tools/src/tools/utils.ts index 947a4f66908f6..90ba7f4d9168f 100644 --- a/packages/kbn-telemetry-tools/src/tools/utils.ts +++ b/packages/kbn-telemetry-tools/src/tools/utils.ts @@ -78,14 +78,14 @@ export function getIdentifierDeclarationFromSource(node: ts.Node, source: ts.Sou const identifierName = node.getText(); const identifierDefinition: ts.Node = (source as any).locals.get(identifierName); if (!identifierDefinition) { - throw new Error(`Unable to fine identifier in source ${identifierName}`); + throw new Error(`Unable to find identifier in source ${identifierName}`); } const declarations = (identifierDefinition as any).declarations as ts.Node[]; const latestDeclaration: ts.Node | false | undefined = Array.isArray(declarations) && declarations[declarations.length - 1]; if (!latestDeclaration) { - throw new Error(`Unable to fine declaration for identifier ${identifierName}`); + throw new Error(`Unable to find declaration for identifier ${identifierName}`); } return latestDeclaration; diff --git a/packages/kbn-test/src/functional_tests/lib/babel_register_for_test_plugins.js b/packages/kbn-test/src/functional_tests/lib/babel_register_for_test_plugins.js new file mode 100644 index 0000000000000..44ff579411bd9 --- /dev/null +++ b/packages/kbn-test/src/functional_tests/lib/babel_register_for_test_plugins.js @@ -0,0 +1,39 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +const Path = require('path'); + +const { REPO_ROOT } = require('@kbn/dev-utils'); + +// modifies all future calls to require() to automatically +// compile the required source with babel +require('@babel/register')({ + ignore: [/[\/\\](node_modules|target|dist)[\/\\]/], + only: [ + Path.resolve(REPO_ROOT, 'test'), + Path.resolve(REPO_ROOT, 'x-pack/test'), + Path.resolve(REPO_ROOT, 'examples'), + Path.resolve(REPO_ROOT, 'x-pack/examples'), + // TODO: should should probably remove this link back to the source + Path.resolve(REPO_ROOT, 'x-pack/plugins/task_manager/server/config.ts'), + ], + babelrc: false, + presets: [require.resolve('@kbn/babel-preset/node_preset')], + extensions: ['.js', '.ts', '.tsx'], +}); diff --git a/packages/kbn-test/src/functional_tests/lib/run_kibana_server.js b/packages/kbn-test/src/functional_tests/lib/run_kibana_server.js index fb9f8f7a52408..e7ec99467ecfd 100644 --- a/packages/kbn-test/src/functional_tests/lib/run_kibana_server.js +++ b/packages/kbn-test/src/functional_tests/lib/run_kibana_server.js @@ -17,9 +17,26 @@ * under the License. */ -import { resolve } from 'path'; +import { resolve, relative } from 'path'; import { KIBANA_ROOT, KIBANA_EXEC, KIBANA_EXEC_PATH } from './paths'; +function extendNodeOptions(installDir) { + if (!installDir) { + return {}; + } + + const testOnlyRegisterPath = relative( + installDir, + require.resolve('./babel_register_for_test_plugins') + ); + + return { + NODE_OPTIONS: `--require=${testOnlyRegisterPath}${ + process.env.NODE_OPTIONS ? ` ${process.env.NODE_OPTIONS}` : '' + }`, + }; +} + export async function runKibanaServer({ procs, config, options }) { const { installDir } = options; @@ -29,6 +46,7 @@ export async function runKibanaServer({ procs, config, options }) { env: { FORCE_COLOR: 1, ...process.env, + ...extendNodeOptions(installDir), }, cwd: installDir || KIBANA_ROOT, wait: /http server running/, diff --git a/packages/kbn-ui-shared-deps/package.json b/packages/kbn-ui-shared-deps/package.json index 278e8efd2d29e..e5f1a06e5bffa 100644 --- a/packages/kbn-ui-shared-deps/package.json +++ b/packages/kbn-ui-shared-deps/package.json @@ -9,7 +9,7 @@ "kbn:watch": "node scripts/build --dev --watch" }, "dependencies": { - "@elastic/charts": "21.1.2", + "@elastic/charts": "23.0.0", "@elastic/eui": "29.0.0", "@elastic/numeral": "^2.5.0", "@kbn/i18n": "1.0.0", diff --git a/scripts/build_plugin_list_docs.js b/scripts/build_plugin_list_docs.js index 54821a1b10ee8..6f184ca7b14c6 100644 --- a/scripts/build_plugin_list_docs.js +++ b/scripts/build_plugin_list_docs.js @@ -17,5 +17,5 @@ * under the License. */ -require('../src/setup_node_env/prebuilt_dev_only_entry'); +require('../src/setup_node_env/no_transpilation'); require('@kbn/dev-utils').runPluginListCli(); diff --git a/scripts/es.js b/scripts/es.js index 2d56496f2fdd2..53b01d8cb4414 100644 --- a/scripts/es.js +++ b/scripts/es.js @@ -17,7 +17,7 @@ * under the License. */ -require('../src/setup_node_env/prebuilt_dev_only_entry'); +require('../src/setup_node_env/no_transpilation'); var resolve = require('path').resolve; var pkg = require('../package.json'); diff --git a/scripts/generate_plugin.js b/scripts/generate_plugin.js index f695eabb30f21..af3d31048ecfc 100644 --- a/scripts/generate_plugin.js +++ b/scripts/generate_plugin.js @@ -17,5 +17,5 @@ * under the License. */ -require('../src/setup_node_env/prebuilt_dev_only_entry'); +require('../src/setup_node_env/no_transpilation'); require('@kbn/plugin-generator').runCli(); diff --git a/scripts/plugin_helpers.js b/scripts/plugin_helpers.js index a07ba7a9185f8..f28bf8fcfff90 100644 --- a/scripts/plugin_helpers.js +++ b/scripts/plugin_helpers.js @@ -17,5 +17,5 @@ * under the License. */ -require('../src/setup_node_env/prebuilt_dev_only_entry'); +require('../src/setup_node_env/no_transpilation'); require('@kbn/plugin-helpers').runCli(); diff --git a/scripts/register_git_hook.js b/scripts/register_git_hook.js index af3f54619bcec..50dfeaf46109f 100644 --- a/scripts/register_git_hook.js +++ b/scripts/register_git_hook.js @@ -17,5 +17,5 @@ * under the License. */ -require('../src/setup_node_env/prebuilt_dev_only_entry'); +require('../src/setup_node_env/no_transpilation'); require('@kbn/dev-utils/target/precommit_hook/cli'); diff --git a/scripts/release_notes.js b/scripts/release_notes.js index f46ee5823d70d..ee9275194ae94 100644 --- a/scripts/release_notes.js +++ b/scripts/release_notes.js @@ -17,5 +17,5 @@ * under the License. */ -require('../src/setup_node_env/prebuilt_dev_only_entry'); +require('../src/setup_node_env/no_transpilation'); require('@kbn/release-notes').runReleaseNotesCli(); diff --git a/scripts/telemetry_check.js b/scripts/telemetry_check.js index 06b3ed46bdba6..22a22b401cb15 100644 --- a/scripts/telemetry_check.js +++ b/scripts/telemetry_check.js @@ -17,5 +17,5 @@ * under the License. */ -require('../src/setup_node_env/prebuilt_dev_only_entry'); +require('../src/setup_node_env/no_transpilation'); require('@kbn/telemetry-tools').runTelemetryCheck(); diff --git a/scripts/telemetry_extract.js b/scripts/telemetry_extract.js index 051bee26537b9..e2fbb64c26719 100644 --- a/scripts/telemetry_extract.js +++ b/scripts/telemetry_extract.js @@ -17,5 +17,5 @@ * under the License. */ -require('../src/setup_node_env/prebuilt_dev_only_entry'); +require('../src/setup_node_env/no_transpilation'); require('@kbn/telemetry-tools').runTelemetryExtract(); diff --git a/src/cli/index.js b/src/cli/index.js index 45f88eaf82a5b..e5480d2137624 100644 --- a/src/cli/index.js +++ b/src/cli/index.js @@ -18,5 +18,6 @@ */ require('../apm')(); -require('../setup_node_env'); +require('../setup_node_env/no_transpilation'); +require('../setup_node_env/babel_register/polyfill'); require('./cli'); diff --git a/src/cli/serve/integration_tests/invalid_config.test.ts b/src/cli/serve/integration_tests/invalid_config.test.ts index fd6fa1bf192fc..a72142faa22fe 100644 --- a/src/cli/serve/integration_tests/invalid_config.test.ts +++ b/src/cli/serve/integration_tests/invalid_config.test.ts @@ -18,10 +18,10 @@ */ import { spawnSync } from 'child_process'; -import { resolve } from 'path'; -const ROOT_DIR = resolve(__dirname, '../../../../'); -const INVALID_CONFIG_PATH = resolve(__dirname, '__fixtures__/invalid_config.yml'); +import { REPO_ROOT } from '@kbn/dev-utils'; + +const INVALID_CONFIG_PATH = require.resolve('./__fixtures__/invalid_config.yml'); interface LogEntry { message: string; @@ -35,11 +35,11 @@ describe('cli invalid config support', function () { function () { // Unused keys only throw once LegacyService starts, so disable migrations so that Core // will finish the start lifecycle without a running Elasticsearch instance. - const { error, status, stdout } = spawnSync( + const { error, status, stdout, stderr } = spawnSync( process.execPath, - ['src/cli', '--config', INVALID_CONFIG_PATH, '--migrations.skip=true'], + ['scripts/kibana', '--config', INVALID_CONFIG_PATH, '--migrations.skip=true'], { - cwd: ROOT_DIR, + cwd: REPO_ROOT, } ); @@ -57,13 +57,21 @@ describe('cli invalid config support', function () { })); expect(error).toBe(undefined); - expect(status).toBe(64); + + if (!fatalLogLine) { + throw new Error( + `cli did not log the expected fatal error message:\n\nstdout: \n${stdout}\n\nstderr:\n${stderr}` + ); + } + expect(fatalLogLine.message).toContain( 'Error: Unknown configuration key(s): "unknown.key", "other.unknown.key", "other.third", "some.flat.key", ' + '"some.array". Check for spelling errors and ensure that expected plugins are installed.' ); expect(fatalLogLine.tags).toEqual(['fatal', 'root']); expect(fatalLogLine.type).toEqual('log'); + + expect(status).toBe(64); }, 20 * 1000 ); diff --git a/src/core/public/doc_links/doc_links_service.ts b/src/core/public/doc_links/doc_links_service.ts index 47f58a3a9fcbf..629bf97c24887 100644 --- a/src/core/public/doc_links/doc_links_service.ts +++ b/src/core/public/doc_links/doc_links_service.ts @@ -119,6 +119,7 @@ export class DocLinksService { gettingStarted: `${ELASTIC_WEBSITE_URL}guide/en/security/${DOC_LINK_VERSION}/index.html`, }, query: { + eql: `${ELASTICSEARCH_DOCS}eql.html`, luceneQuerySyntax: `${ELASTICSEARCH_DOCS}query-dsl-query-string-query.html#query-string-syntax`, queryDsl: `${ELASTICSEARCH_DOCS}query-dsl.html`, kueryQuerySyntax: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/kuery-query.html`, @@ -227,6 +228,7 @@ export interface DocLinksStart { readonly gettingStarted: string; }; readonly query: { + readonly eql: string; readonly luceneQuerySyntax: string; readonly queryDsl: string; readonly kueryQuerySyntax: string; diff --git a/src/core/public/public.api.md b/src/core/public/public.api.md index 5970c9a8571c4..08491dc76cd27 100644 --- a/src/core/public/public.api.md +++ b/src/core/public/public.api.md @@ -539,6 +539,7 @@ export interface DocLinksStart { readonly gettingStarted: string; }; readonly query: { + readonly eql: string; readonly luceneQuerySyntax: string; readonly queryDsl: string; readonly kueryQuerySyntax: string; diff --git a/src/dev/build/tasks/copy_source_task.ts b/src/dev/build/tasks/copy_source_task.ts index 948e2357effb0..78e1395586a17 100644 --- a/src/dev/build/tasks/copy_source_task.ts +++ b/src/dev/build/tasks/copy_source_task.ts @@ -37,6 +37,8 @@ export const CopySource: Task = { '!src/cli/repl/**', '!src/functional_test_runner/**', '!src/dev/**', + '!src/setup_node_env/babel_register/index.js', + '!src/setup_node_env/babel_register/register.js', '!**/public/**', 'typings/**', 'config/kibana.yml', diff --git a/src/dev/build/tasks/os_packages/docker_generator/resources/bin/kibana-docker b/src/dev/build/tasks/os_packages/docker_generator/resources/bin/kibana-docker index 2770f288b6af8..959e1f8dc3e72 100755 --- a/src/dev/build/tasks/os_packages/docker_generator/resources/bin/kibana-docker +++ b/src/dev/build/tasks/os_packages/docker_generator/resources/bin/kibana-docker @@ -159,6 +159,7 @@ kibana_vars=( xpack.code.security.gitHostWhitelist xpack.code.security.gitProtocolWhitelist xpack.encryptedSavedObjects.encryptionKey + xpack.encryptedSavedObjects.keyRotation.decryptionOnlyKeys xpack.graph.enabled xpack.graph.canEditDrillDownUrls xpack.graph.savePolicy diff --git a/src/fixtures/telemetry_collectors/constants.ts b/src/fixtures/telemetry_collectors/constants.ts index 60df05bac2aeb..8896c294676c4 100644 --- a/src/fixtures/telemetry_collectors/constants.ts +++ b/src/fixtures/telemetry_collectors/constants.ts @@ -66,3 +66,7 @@ export interface MappedTypes { [key in 'prop3']: number; }; } + +export type RecordWithKnownProps = Record; + +export type IndexedAccessType = Pick; diff --git a/src/plugins/dashboard/public/application/top_nav/get_top_nav_config.ts b/src/plugins/dashboard/public/application/top_nav/get_top_nav_config.ts index dbdadeb4e4e7c..77c4a2235d471 100644 --- a/src/plugins/dashboard/public/application/top_nav/get_top_nav_config.ts +++ b/src/plugins/dashboard/public/application/top_nav/get_top_nav_config.ts @@ -48,12 +48,12 @@ export function getTopNavConfig( ]; case ViewMode.EDIT: return [ - getCreateNewConfig(actions[TopNavIds.VISUALIZE]), - getSaveConfig(actions[TopNavIds.SAVE]), - getViewConfig(actions[TopNavIds.EXIT_EDIT_MODE]), - getAddConfig(actions[TopNavIds.ADD_EXISTING]), getOptionsConfig(actions[TopNavIds.OPTIONS]), getShareConfig(actions[TopNavIds.SHARE]), + getAddConfig(actions[TopNavIds.ADD_EXISTING]), + getViewConfig(actions[TopNavIds.EXIT_EDIT_MODE]), + getSaveConfig(actions[TopNavIds.SAVE]), + getCreateNewConfig(actions[TopNavIds.VISUALIZE]), ]; default: return []; @@ -79,7 +79,9 @@ function getFullScreenConfig(action: NavAction) { */ function getEditConfig(action: NavAction) { return { + emphasize: true, id: 'edit', + iconType: 'pencil', label: i18n.translate('dashboard.topNave.editButtonAriaLabel', { defaultMessage: 'edit', }), @@ -168,7 +170,7 @@ function getAddConfig(action: NavAction) { function getCreateNewConfig(action: NavAction) { return { emphasize: true, - iconType: 'plusInCircle', + iconType: 'plusInCircleFilled', id: 'addNew', label: i18n.translate('dashboard.topNave.addNewButtonAriaLabel', { defaultMessage: 'Create new', diff --git a/src/plugins/embeddable/public/lib/panel/_embeddable_panel.scss b/src/plugins/embeddable/public/lib/panel/_embeddable_panel.scss index 36a7fee14cce1..cdc0f9f0e0451 100644 --- a/src/plugins/embeddable/public/lib/panel/_embeddable_panel.scss +++ b/src/plugins/embeddable/public/lib/panel/_embeddable_panel.scss @@ -54,9 +54,14 @@ .embPanel__titleInner { overflow: hidden; display: flex; + align-items: center; padding-right: $euiSizeS; } + .embPanel__titleTooltipAnchor { + max-width: 100%; + } + .embPanel__titleText { @include euiTextTruncate; } diff --git a/src/plugins/embeddable/public/lib/panel/panel_header/panel_header.tsx b/src/plugins/embeddable/public/lib/panel/panel_header/panel_header.tsx index c538b98949a43..ea6a6a78c2b67 100644 --- a/src/plugins/embeddable/public/lib/panel/panel_header/panel_header.tsx +++ b/src/plugins/embeddable/public/lib/panel/panel_header/panel_header.tsx @@ -99,16 +99,6 @@ function renderNotifications( }); } -function renderTooltip(description: string) { - return ( - description !== '' && ( - - - - ) - ); -} - type EmbeddableWithDescription = IEmbeddable & { getDescription: () => string }; function getViewDescription(embeddable: IEmbeddable | EmbeddableWithDescription) { @@ -134,9 +124,10 @@ export function PanelHeader({ embeddable, headerId, }: PanelHeaderProps) { - const viewDescription = getViewDescription(embeddable); - const showTitle = !hidePanelTitle && (!isViewMode || title || viewDescription !== ''); - const showPanelBar = !isViewMode || badges.length > 0 || notifications.length > 0 || showTitle; + const description = getViewDescription(embeddable); + const showTitle = !hidePanelTitle && (!isViewMode || title); + const showPanelBar = + !isViewMode || badges.length > 0 || notifications.length > 0 || showTitle || description; const classes = classNames('embPanel__header', { // eslint-disable-next-line @typescript-eslint/naming-convention 'embPanel__header--floater': !showPanelBar, @@ -174,26 +165,36 @@ export function PanelHeader({ ); } + const renderTitle = () => { + const titleComponent = showTitle ? ( + + {title || placeholderTitle} + + ) : undefined; + return description ? ( + + + {titleComponent} + + + ) : ( + titleComponent + ); + }; + return (

- {showTitle ? ( - - - {getAriaLabel()} - {renderTooltip(viewDescription)} - - ) : ( - {getAriaLabel()} - )} + {getAriaLabel()} + {renderTitle()} {renderBadges(badges, embeddable)}

{renderNotifications(notifications, embeddable)} diff --git a/src/plugins/home/public/application/components/app_navigation_handler.ts b/src/plugins/home/public/application/components/app_navigation_handler.ts index 91407ffcaf226..b6230bc9f1e38 100644 --- a/src/plugins/home/public/application/components/app_navigation_handler.ts +++ b/src/plugins/home/public/application/components/app_navigation_handler.ts @@ -24,6 +24,7 @@ export const createAppNavigationHandler = (targetUrl: string) => (event: MouseEv if (event.altKey || event.metaKey || event.ctrlKey) { return; } + const { application, addBasePath } = getServices(); event.preventDefault(); - getServices().application.navigateToUrl(targetUrl); + application.navigateToUrl(addBasePath(targetUrl)); }; diff --git a/src/plugins/navigation/public/top_nav_menu/__snapshots__/top_nav_menu_item.test.tsx.snap b/src/plugins/navigation/public/top_nav_menu/__snapshots__/top_nav_menu_item.test.tsx.snap index 570699aa0c0e2..155377e5ea335 100644 --- a/src/plugins/navigation/public/top_nav_menu/__snapshots__/top_nav_menu_item.test.tsx.snap +++ b/src/plugins/navigation/public/top_nav_menu/__snapshots__/top_nav_menu_item.test.tsx.snap @@ -2,7 +2,6 @@ exports[`TopNavMenu Should render emphasized item which should be clickable 1`] = ` * > * { // TEMP fix to adjust spacing between EuiHeaderList__list items margin: 0 $euiSizeXS; diff --git a/src/plugins/navigation/public/top_nav_menu/top_nav_menu_item.tsx b/src/plugins/navigation/public/top_nav_menu/top_nav_menu_item.tsx index 96a205b737273..e503ebb839f48 100644 --- a/src/plugins/navigation/public/top_nav_menu/top_nav_menu_item.tsx +++ b/src/plugins/navigation/public/top_nav_menu/top_nav_menu_item.tsx @@ -48,7 +48,7 @@ export function TopNavMenuItem(props: TopNavMenuData) { }; const btn = props.emphasize ? ( - + {upperFirst(props.label || props.id!)} ) : ( diff --git a/src/plugins/visualize/public/application/utils/get_top_nav_config.tsx b/src/plugins/visualize/public/application/utils/get_top_nav_config.tsx index 12720f3f22e7c..cb68a647cb81d 100644 --- a/src/plugins/visualize/public/application/utils/get_top_nav_config.tsx +++ b/src/plugins/visualize/public/application/utils/get_top_nav_config.tsx @@ -175,54 +175,61 @@ export const getTopNavConfig = ( }; const topNavMenu: TopNavMenuData[] = [ - ...(originatingApp && ((savedVis && savedVis.id) || embeddableId) - ? [ - { - id: 'saveAndReturn', - label: i18n.translate('visualize.topNavMenu.saveAndReturnVisualizationButtonLabel', { - defaultMessage: 'Save and return', - }), - emphasize: true, - iconType: 'check', - description: i18n.translate( - 'visualize.topNavMenu.saveAndReturnVisualizationButtonAriaLabel', - { - defaultMessage: 'Finish editing visualization and return to the last app', - } - ), - testId: 'visualizesaveAndReturnButton', - disableButton: hasUnappliedChanges, - tooltip() { - if (hasUnappliedChanges) { - return i18n.translate( - 'visualize.topNavMenu.saveAndReturnVisualizationDisabledButtonTooltip', - { - defaultMessage: 'Apply or Discard your changes before finishing', - } - ); - } - }, - run: async () => { - const saveOptions = { - confirmOverwrite: false, - returnToOrigin: true, - }; - if ( - originatingApp === 'dashboards' && - dashboard.dashboardFeatureFlagConfig.allowByValueEmbeddables && - !savedVis - ) { - return createVisReference(); - } - return doSave(saveOptions); + { + id: 'inspector', + label: i18n.translate('visualize.topNavMenu.openInspectorButtonLabel', { + defaultMessage: 'inspect', + }), + description: i18n.translate('visualize.topNavMenu.openInspectorButtonAriaLabel', { + defaultMessage: 'Open Inspector for visualization', + }), + testId: 'openInspectorButton', + disableButton() { + return !embeddableHandler.hasInspector || !embeddableHandler.hasInspector(); + }, + run: openInspector, + tooltip() { + if (!embeddableHandler.hasInspector || !embeddableHandler.hasInspector()) { + return i18n.translate('visualize.topNavMenu.openInspectorDisabledButtonTooltip', { + defaultMessage: `This visualization doesn't support any inspectors.`, + }); + } + }, + }, + { + id: 'share', + label: i18n.translate('visualize.topNavMenu.shareVisualizationButtonLabel', { + defaultMessage: 'share', + }), + description: i18n.translate('visualize.topNavMenu.shareVisualizationButtonAriaLabel', { + defaultMessage: 'Share Visualization', + }), + testId: 'shareTopNavButton', + run: (anchorElement) => { + if (share && !embeddableId) { + // TODO: support sharing in by-value mode + share.toggleShareContextMenu({ + anchorElement, + allowEmbed: true, + allowShortUrl: visualizeCapabilities.createShortUrl, + shareableUrl: unhashUrl(window.location.href), + objectId: savedVis?.id, + objectType: 'visualization', + sharingData: { + title: savedVis?.title, }, - }, - ] - : []), + isDirty: hasUnappliedChanges || hasUnsavedChanges, + }); + } + }, + // disable the Share button if no action specified + disableButton: !share || !!embeddableId, + }, ...(visualizeCapabilities.save && !embeddableId ? [ { id: 'save', + iconType: savedVis?.id && originatingApp ? undefined : 'save', label: savedVis?.id && originatingApp ? i18n.translate('visualize.topNavMenu.saveVisualizationAsButtonLabel', { @@ -303,56 +310,50 @@ export const getTopNavConfig = ( }, ] : []), - { - id: 'share', - label: i18n.translate('visualize.topNavMenu.shareVisualizationButtonLabel', { - defaultMessage: 'share', - }), - description: i18n.translate('visualize.topNavMenu.shareVisualizationButtonAriaLabel', { - defaultMessage: 'Share Visualization', - }), - testId: 'shareTopNavButton', - run: (anchorElement) => { - if (share && !embeddableId) { - // TODO: support sharing in by-value mode - share.toggleShareContextMenu({ - anchorElement, - allowEmbed: true, - allowShortUrl: visualizeCapabilities.createShortUrl, - shareableUrl: unhashUrl(window.location.href), - objectId: savedVis?.id, - objectType: 'visualization', - sharingData: { - title: savedVis?.title, + ...(originatingApp && ((savedVis && savedVis.id) || embeddableId) + ? [ + { + id: 'saveAndReturn', + label: i18n.translate('visualize.topNavMenu.saveAndReturnVisualizationButtonLabel', { + defaultMessage: 'Save and return', + }), + emphasize: true, + iconType: 'checkInCircleFilled', + description: i18n.translate( + 'visualize.topNavMenu.saveAndReturnVisualizationButtonAriaLabel', + { + defaultMessage: 'Finish editing visualization and return to the last app', + } + ), + testId: 'visualizesaveAndReturnButton', + disableButton: hasUnappliedChanges, + tooltip() { + if (hasUnappliedChanges) { + return i18n.translate( + 'visualize.topNavMenu.saveAndReturnVisualizationDisabledButtonTooltip', + { + defaultMessage: 'Apply or Discard your changes before finishing', + } + ); + } }, - isDirty: hasUnappliedChanges || hasUnsavedChanges, - }); - } - }, - // disable the Share button if no action specified - disableButton: !share || !!embeddableId, - }, - { - id: 'inspector', - label: i18n.translate('visualize.topNavMenu.openInspectorButtonLabel', { - defaultMessage: 'inspect', - }), - description: i18n.translate('visualize.topNavMenu.openInspectorButtonAriaLabel', { - defaultMessage: 'Open Inspector for visualization', - }), - testId: 'openInspectorButton', - disableButton() { - return !embeddableHandler.hasInspector || !embeddableHandler.hasInspector(); - }, - run: openInspector, - tooltip() { - if (!embeddableHandler.hasInspector || !embeddableHandler.hasInspector()) { - return i18n.translate('visualize.topNavMenu.openInspectorDisabledButtonTooltip', { - defaultMessage: `This visualization doesn't support any inspectors.`, - }); - } - }, - }, + run: async () => { + const saveOptions = { + confirmOverwrite: false, + returnToOrigin: true, + }; + if ( + originatingApp === 'dashboards' && + dashboard.dashboardFeatureFlagConfig.allowByValueEmbeddables && + !savedVis + ) { + return createVisReference(); + } + return doSave(saveOptions); + }, + }, + ] + : []), ]; return topNavMenu; diff --git a/src/setup_node_env/babel_register/register.js b/src/setup_node_env/babel_register/register.js index 6d573d8922244..3c0bd387c8e44 100644 --- a/src/setup_node_env/babel_register/register.js +++ b/src/setup_node_env/babel_register/register.js @@ -46,26 +46,12 @@ var ignore = [ // ignore paths matching `/canvas/canvas_plugin/` /[\/\\]canvas[\/\\]canvas_plugin[\/\\]/, -]; -if (global.__BUILT_WITH_BABEL__) { - // when building the Kibana source we replace the statement - // `global.__BUILT_WITH_BABEL__` with the value `true` so that - // when @babel/register is required for the first time by users - // it will exclude kibana's `src` directory. - // - // We still need @babel/register for plugins though, we've been - // building their server code at require-time since version 4.2 - // TODO: the plugin install process could transpile plugin server code... - ignore.push(resolve(__dirname, '../../../src')); -} else { - ignore.push( - // ignore any path in the packages, unless it is in the package's - // root `src` directory, in any test or __tests__ directory, or it - // ends with .test.js, .test.ts, or .test.tsx - /[\/\\]packages[\/\\](eslint-|kbn-)[^\/\\]+[\/\\](?!src[\/\\].*|(.+[\/\\])?(test|__tests__)[\/\\].+|.+\.test\.(js|ts|tsx)$)(.+$)/ - ); -} + // ignore any path in the packages, unless it is in the package's + // root `src` directory, in any test or __tests__ directory, or it + // ends with .test.js, .test.ts, or .test.tsx + /[\/\\]packages[\/\\](eslint-|kbn-)[^\/\\]+[\/\\](?!src[\/\\].*|(.+[\/\\])?(test|__tests__)[\/\\].+|.+\.test\.(js|ts|tsx)$)(.+$)/, +]; // modifies all future calls to require() to automatically // compile the required source with babel diff --git a/src/setup_node_env/index.js b/src/setup_node_env/index.js index d84249df7fd8f..60f0982f50d20 100644 --- a/src/setup_node_env/index.js +++ b/src/setup_node_env/index.js @@ -17,5 +17,5 @@ * under the License. */ -require('./prebuilt_dev_only_entry'); +require('./no_transpilation'); require('./babel_register'); diff --git a/src/setup_node_env/prebuilt_dev_only_entry.js b/src/setup_node_env/no_transpilation.js similarity index 100% rename from src/setup_node_env/prebuilt_dev_only_entry.js rename to src/setup_node_env/no_transpilation.js diff --git a/test/common/services/kibana_server/extend_es_archiver.js b/test/common/services/kibana_server/extend_es_archiver.js index 4efdfc4dddf77..f6e14061aed2a 100644 --- a/test/common/services/kibana_server/extend_es_archiver.js +++ b/test/common/services/kibana_server/extend_es_archiver.js @@ -33,9 +33,15 @@ export function extendEsArchiver({ esArchiver, kibanaServer, retry, defaults }) // esArchiver methods return a stats object, with information about the indexes created const stats = await originalMethod.apply(esArchiver, args); + const statsKeys = Object.keys(stats); + const kibanaKeys = statsKeys.filter( + // this also matches stats keys like '.kibana_1' and '.kibana_2,.kibana_1' + (key) => key.includes(KIBANA_INDEX) && (stats[key].created || stats[key].deleted) + ); + // if the kibana index was created by the esArchiver then update the uiSettings // with the defaults to make sure that they are always in place initially - if (stats[KIBANA_INDEX] && (stats[KIBANA_INDEX].created || stats[KIBANA_INDEX].deleted)) { + if (kibanaKeys.length > 0) { await retry.try(async () => { await kibanaServer.uiSettings.update(defaults); }); diff --git a/x-pack/.telemetryrc.json b/x-pack/.telemetryrc.json index db50727c599a9..ae85efcda32d5 100644 --- a/x-pack/.telemetryrc.json +++ b/x-pack/.telemetryrc.json @@ -1,8 +1,5 @@ { "output": "plugins/telemetry_collection_xpack/schema/xpack_plugins.json", "root": "plugins/", - "exclude": [ - "plugins/alerts/server/usage/alerts_usage_collector.ts", - "plugins/apm/server/lib/apm_telemetry/index.ts" - ] + "exclude": [] } diff --git a/x-pack/package.json b/x-pack/package.json index 5742200b55d9f..4145d8d72cc63 100644 --- a/x-pack/package.json +++ b/x-pack/package.json @@ -32,6 +32,7 @@ "@cypress/webpack-preprocessor": "^5.4.1", "@elastic/apm-rum-react": "^1.2.5", "@elastic/maki": "6.3.0", + "@kbn/babel-preset": "1.0.0", "@kbn/dev-utils": "1.0.0", "@kbn/es": "1.0.0", "@kbn/expect": "1.0.0", @@ -280,7 +281,6 @@ "@elastic/node-crypto": "1.2.1", "@elastic/numeral": "^2.5.0", "@elastic/safer-lodash-set": "0.0.0", - "@kbn/babel-preset": "1.0.0", "@kbn/config-schema": "1.0.0", "@kbn/i18n": "1.0.0", "@kbn/interpreter": "1.0.0", diff --git a/x-pack/plugins/alerts/server/usage/alerts_usage_collector.ts b/x-pack/plugins/alerts/server/usage/alerts_usage_collector.ts index 64d3ad54a2318..de82dd31877af 100644 --- a/x-pack/plugins/alerts/server/usage/alerts_usage_collector.ts +++ b/x-pack/plugins/alerts/server/usage/alerts_usage_collector.ts @@ -4,11 +4,44 @@ * you may not use this file except in compliance with the Elastic License. */ -import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; +import { MakeSchemaFrom, UsageCollectionSetup } from 'src/plugins/usage_collection/server'; import { get } from 'lodash'; import { TaskManagerStartContract } from '../../../task_manager/server'; import { AlertsUsage } from './types'; +const byTypeSchema: MakeSchemaFrom['count_by_type'] = { + // TODO: Find out an automated way to populate the keys or reformat these into an array (and change the Remote Telemetry indexer accordingly) + DYNAMIC_KEY: { type: 'long' }, + // Known alerts (searching the use of the alerts API `registerType`: + // Built-in + '__index-threshold': { type: 'long' }, + // APM + apm__error_rate: { type: 'long' }, // eslint-disable-line @typescript-eslint/naming-convention + apm__transaction_error_rate: { type: 'long' }, // eslint-disable-line @typescript-eslint/naming-convention + apm__transaction_duration: { type: 'long' }, // eslint-disable-line @typescript-eslint/naming-convention + apm__transaction_duration_anomaly: { type: 'long' }, // eslint-disable-line @typescript-eslint/naming-convention + // Infra + metrics__alert__threshold: { type: 'long' }, // eslint-disable-line @typescript-eslint/naming-convention + metrics__alert__inventory__threshold: { type: 'long' }, // eslint-disable-line @typescript-eslint/naming-convention + logs__alert__document__count: { type: 'long' }, // eslint-disable-line @typescript-eslint/naming-convention + // Monitoring + monitoring_alert_cluster_health: { type: 'long' }, + monitoring_alert_cpu_usage: { type: 'long' }, + monitoring_alert_disk_usage: { type: 'long' }, + monitoring_alert_elasticsearch_version_mismatch: { type: 'long' }, + monitoring_alert_kibana_version_mismatch: { type: 'long' }, + monitoring_alert_license_expiration: { type: 'long' }, + monitoring_alert_logstash_version_mismatch: { type: 'long' }, + monitoring_alert_nodes_changed: { type: 'long' }, + // Security Solution + siem__signals: { type: 'long' }, // eslint-disable-line @typescript-eslint/naming-convention + siem__notifications: { type: 'long' }, // eslint-disable-line @typescript-eslint/naming-convention + // Uptime + xpack__uptime__alerts__monitorStatus: { type: 'long' }, // eslint-disable-line @typescript-eslint/naming-convention + xpack__uptime__alerts__tls: { type: 'long' }, // eslint-disable-line @typescript-eslint/naming-convention + xpack__uptime__alerts__durationAnomaly: { type: 'long' }, // eslint-disable-line @typescript-eslint/naming-convention +}; + export function createAlertsUsageCollector( usageCollection: UsageCollectionSetup, taskManager: TaskManagerStartContract @@ -50,6 +83,28 @@ export function createAlertsUsageCollector( }; } }, + schema: { + count_total: { type: 'long' }, + count_active_total: { type: 'long' }, + count_disabled_total: { type: 'long' }, + throttle_time: { + min: { type: 'long' }, + avg: { type: 'float' }, + max: { type: 'long' }, + }, + schedule_time: { + min: { type: 'long' }, + avg: { type: 'float' }, + max: { type: 'long' }, + }, + connectors_per_alert: { + min: { type: 'long' }, + avg: { type: 'float' }, + max: { type: 'long' }, + }, + count_active_by_type: byTypeSchema, + count_by_type: byTypeSchema, + }, }); } diff --git a/x-pack/plugins/apm/common/__snapshots__/apm_telemetry.test.ts.snap b/x-pack/plugins/apm/common/__snapshots__/apm_telemetry.test.ts.snap index 663411dff76ff..9f7a911bf21c7 100644 --- a/x-pack/plugins/apm/common/__snapshots__/apm_telemetry.test.ts.snap +++ b/x-pack/plugins/apm/common/__snapshots__/apm_telemetry.test.ts.snap @@ -11,6 +11,67 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "apm": { "properties": { + "services_per_agent": { + "properties": { + "dotnet": { + "type": "long" + }, + "go": { + "type": "long" + }, + "java": { + "type": "long" + }, + "js-base": { + "type": "long" + }, + "nodejs": { + "type": "long" + }, + "python": { + "type": "long" + }, + "ruby": { + "type": "long" + }, + "rum-js": { + "type": "long" + }, + "otlp": { + "type": "long" + }, + "opentelemetry/cpp": { + "type": "long" + }, + "opentelemetry/dotnet": { + "type": "long" + }, + "opentelemetry/erlang": { + "type": "long" + }, + "opentelemetry/go": { + "type": "long" + }, + "opentelemetry/java": { + "type": "long" + }, + "opentelemetry/nodejs": { + "type": "long" + }, + "opentelemetry/php": { + "type": "long" + }, + "opentelemetry/python": { + "type": "long" + }, + "opentelemetry/ruby": { + "type": "long" + }, + "opentelemetry/webjs": { + "type": "long" + } + } + }, "agents": { "properties": { "dotnet": { @@ -18,8 +79,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -27,49 +87,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -82,8 +133,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -91,49 +141,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -146,8 +187,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -155,49 +195,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -210,8 +241,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -219,49 +249,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -274,8 +295,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -283,49 +303,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -338,8 +349,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -347,49 +357,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -402,8 +403,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -411,49 +411,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -466,8 +457,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -475,49 +465,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -530,8 +511,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -539,49 +519,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -594,8 +565,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -603,49 +573,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -658,8 +619,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -667,49 +627,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -722,8 +673,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -731,49 +681,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -786,8 +727,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -795,49 +735,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -850,8 +781,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -859,49 +789,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -914,8 +835,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -923,49 +843,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -978,8 +889,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -987,49 +897,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -1042,8 +943,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -1051,49 +951,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 - } + "type": "keyword" + }, + "composite": { + "type": "keyword" + } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -1106,8 +997,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -1115,49 +1005,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -1170,8 +1051,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "agent": { "properties": { "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, @@ -1179,49 +1059,40 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "framework": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "language": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } }, "runtime": { "properties": { - "composite": { - "type": "keyword", - "ignore_above": 1024 - }, "name": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "version": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" + }, + "composite": { + "type": "keyword" } } } @@ -1231,6 +1102,39 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the } } }, + "has_any_services": { + "type": "boolean" + }, + "version": { + "properties": { + "apm_server": { + "properties": { + "major": { + "type": "long" + }, + "minor": { + "type": "long" + }, + "patch": { + "type": "long" + } + } + } + } + }, + "environments": { + "properties": { + "services_without_environments": { + "type": "long" + }, + "services_with_multiple_environments": { + "type": "long" + }, + "top_environments": { + "type": "keyword" + } + } + }, "aggregated_transactions": { "properties": { "current_implementation": { @@ -1240,9 +1144,6 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the }, "transaction_count": { "type": "long" - }, - "ratio": { - "type": "float" } } }, @@ -1253,67 +1154,77 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the }, "transaction_count": { "type": "long" - }, - "ratio": { - "type": "float" } } }, - "with_country": { + "no_rum": { "properties": { "expected_metric_document_count": { "type": "long" }, "transaction_count": { "type": "long" + } + } + }, + "no_rum_no_observer_name": { + "properties": { + "expected_metric_document_count": { + "type": "long" }, - "ratio": { - "type": "float" + "transaction_count": { + "type": "long" } } - } - } - }, - "environments": { - "properties": { - "services_without_environment": { - "type": "long" }, - "services_with_multiple_environments": { - "type": "long" + "only_rum": { + "properties": { + "expected_metric_document_count": { + "type": "long" + }, + "transaction_count": { + "type": "long" + } + } }, - "top_enviroments": { - "type": "keyword", - "ignore_above": 1024 + "only_rum_no_observer_name": { + "properties": { + "expected_metric_document_count": { + "type": "long" + }, + "transaction_count": { + "type": "long" + } + } } } }, "cloud": { "properties": { "availability_zone": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "provider": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" }, "region": { - "type": "keyword", - "ignore_above": 1024 + "type": "keyword" } } }, "counts": { "properties": { - "agent_configuration": { + "transaction": { "properties": { + "1d": { + "type": "long" + }, "all": { "type": "long" } } }, - "error": { + "span": { "properties": { "1d": { "type": "long" @@ -1323,21 +1234,27 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the } } }, - "max_error_groups_per_service": { + "error": { "properties": { "1d": { "type": "long" + }, + "all": { + "type": "long" } } }, - "max_transaction_groups_per_service": { + "metric": { "properties": { "1d": { "type": "long" + }, + "all": { + "type": "long" } } }, - "metric": { + "sourcemap": { "properties": { "1d": { "type": "long" @@ -1357,14 +1274,14 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the } } }, - "services": { + "agent_configuration": { "properties": { - "1d": { + "all": { "type": "long" } } }, - "sourcemap": { + "max_transaction_groups_per_service": { "properties": { "1d": { "type": "long" @@ -1374,7 +1291,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the } } }, - "span": { + "max_error_groups_per_service": { "properties": { "1d": { "type": "long" @@ -1388,10 +1305,13 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the "properties": { "1d": { "type": "long" + }, + "all": { + "type": "long" } } }, - "transaction": { + "services": { "properties": { "1d": { "type": "long" @@ -1470,55 +1390,22 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the } } }, - "has_any_services": { - "type": "boolean" - }, - "indices": { + "retainment": { "properties": { - "all": { - "properties": { - "total": { - "properties": { - "docs": { - "properties": { - "count": { - "type": "long" - } - } - }, - "store": { - "properties": { - "size_in_bytes": { - "type": "long" - } - } - } - } - } - } - }, - "shards": { + "span": { "properties": { - "total": { + "ms": { "type": "long" } } - } - } - }, - "integrations": { - "properties": { - "ml": { + }, + "transaction": { "properties": { - "all_jobs_count": { + "ms": { "type": "long" } } - } - } - }, - "retainment": { - "properties": { + }, "error": { "properties": { "ms": { @@ -1533,106 +1420,63 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the } } }, - "onboarding": { + "sourcemap": { "properties": { "ms": { "type": "long" } } }, - "span": { + "onboarding": { "properties": { "ms": { "type": "long" } } - }, - "transaction": { + } + } + }, + "integrations": { + "properties": { + "ml": { "properties": { - "ms": { + "all_jobs_count": { "type": "long" } } } } }, - "services_per_agent": { + "indices": { "properties": { - "dotnet": { - "type": "long", - "null_value": 0 - }, - "go": { - "type": "long", - "null_value": 0 - }, - "java": { - "type": "long", - "null_value": 0 - }, - "js-base": { - "type": "long", - "null_value": 0 - }, - "nodejs": { - "type": "long", - "null_value": 0 - }, - "python": { - "type": "long", - "null_value": 0 - }, - "ruby": { - "type": "long", - "null_value": 0 - }, - "rum-js": { - "type": "long", - "null_value": 0 - }, - "otlp": { - "type": "long", - "null_value": 0 - }, - "opentelemetry/cpp": { - "type": "long", - "null_value": 0 - }, - "opentelemetry/dotnet": { - "type": "long", - "null_value": 0 - }, - "opentelemetry/erlang": { - "type": "long", - "null_value": 0 - }, - "opentelemetry/go": { - "type": "long", - "null_value": 0 - }, - "opentelemetry/java": { - "type": "long", - "null_value": 0 - }, - "opentelemetry/nodejs": { - "type": "long", - "null_value": 0 - }, - "opentelemetry/php": { - "type": "long", - "null_value": 0 - }, - "opentelemetry/python": { - "type": "long", - "null_value": 0 - }, - "opentelemetry/ruby": { - "type": "long", - "null_value": 0 + "shards": { + "properties": { + "total": { + "type": "long" + } + } }, - "opentelemetry/webjs": { - "type": "long", - "null_value": 0 + "all": { + "properties": { + "total": { + "properties": { + "docs": { + "properties": { + "count": { + "type": "long" + } + } + }, + "store": { + "properties": { + "size_in_bytes": { + "type": "long" + } + } + } + } + } + } } } }, @@ -1649,7 +1493,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the } } }, - "agent_configuration": { + "cloud": { "properties": { "took": { "properties": { @@ -1660,7 +1504,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the } } }, - "agents": { + "processor_events": { "properties": { "took": { "properties": { @@ -1671,7 +1515,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the } } }, - "cardinality": { + "agent_configuration": { "properties": { "took": { "properties": { @@ -1682,7 +1526,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the } } }, - "cloud": { + "services": { "properties": { "took": { "properties": { @@ -1693,7 +1537,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the } } }, - "environments": { + "versions": { "properties": { "took": { "properties": { @@ -1715,17 +1559,6 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the } } }, - "indices_stats": { - "properties": { - "took": { - "properties": { - "ms": { - "type": "long" - } - } - } - } - }, "integrations": { "properties": { "took": { @@ -1737,7 +1570,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the } } }, - "processor_events": { + "agents": { "properties": { "took": { "properties": { @@ -1748,7 +1581,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the } } }, - "services": { + "indices_stats": { "properties": { "took": { "properties": { @@ -1759,7 +1592,7 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the } } }, - "versions": { + "cardinality": { "properties": { "took": { "properties": { @@ -1771,23 +1604,6 @@ exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the } } } - }, - "version": { - "properties": { - "apm_server": { - "properties": { - "major": { - "type": "long" - }, - "minor": { - "type": "long" - }, - "patch": { - "type": "long" - } - } - } - } } } } diff --git a/x-pack/plugins/apm/common/apm_telemetry.ts b/x-pack/plugins/apm/common/apm_telemetry.ts index 874cee05553d4..faf5b21d69c7b 100644 --- a/x-pack/plugins/apm/common/apm_telemetry.ts +++ b/x-pack/plugins/apm/common/apm_telemetry.ts @@ -4,260 +4,39 @@ * you may not use this file except in compliance with the Elastic License. */ import { produce } from 'immer'; -import { AGENT_NAMES } from './agent_name'; +// eslint-disable-next-line @kbn/eslint/no-restricted-paths +import { apmSchema } from '../server/lib/apm_telemetry/schema'; + +function schemaToMapping(schemaLeaf: any): any { + // convert "array" definition to mapping + if (schemaLeaf.type === 'array') { + return schemaToMapping(schemaLeaf.items); + } + + if (typeof schemaLeaf.type === 'string') { + return schemaLeaf; + } + + return Object.entries(schemaLeaf).reduce((acc, [key, value]) => { + const propMapping = schemaToMapping(value); + + return { + ...acc, + [key]: + typeof propMapping.type === 'string' + ? propMapping + : { properties: propMapping }, + }; + }, {}); +} /** - * Generate an object containing the mapping used for APM telemetry. Can be used - * with the `upload-telemetry-data` script or to update the mapping in the - * telemetry repository. - * - * This function breaks things up to make the mapping easier to understand. + * Generate an object containing the mapping used for APM telemetry based on the schema specified + * in the usage collector. Can be used with the `upload-telemetry-data` script or to update the + * mapping in the telemetry repository. */ export function getApmTelemetryMapping() { - const keyword = { - type: 'keyword', - ignore_above: 1024, - }; - - const float = { - type: 'float', - }; - - const long = { - type: 'long', - }; - - const allProperties = { - properties: { - all: long, - }, - }; - - const oneDayProperties = { - properties: { - '1d': long, - }, - }; - - const oneDayAllProperties = { - properties: { - '1d': long, - all: long, - }, - }; - - const msProperties = { - properties: { - ms: long, - }, - }; - - const tookProperties = { - properties: { - took: msProperties, - }, - }; - - const compositeNameVersionProperties = { - properties: { - composite: keyword, - name: keyword, - version: keyword, - }, - }; - - const agentProperties = { - properties: { version: keyword }, - }; - - const serviceProperties = { - properties: { - framework: compositeNameVersionProperties, - language: compositeNameVersionProperties, - runtime: compositeNameVersionProperties, - }, - }; - - const aggregatedTransactionsProperties = { - properties: { - expected_metric_document_count: long, - transaction_count: long, - ratio: float, - }, - }; - - return { - properties: { - agents: { - properties: AGENT_NAMES.reduce>( - (previousValue, currentValue) => { - previousValue[currentValue] = { - properties: { - agent: agentProperties, - service: serviceProperties, - }, - }; - - return previousValue; - }, - {} - ), - }, - aggregated_transactions: { - properties: { - current_implementation: aggregatedTransactionsProperties, - no_observer_name: aggregatedTransactionsProperties, - with_country: aggregatedTransactionsProperties, - }, - }, - environments: { - properties: { - services_without_environment: long, - services_with_multiple_environments: long, - top_enviroments: keyword, - }, - }, - cloud: { - properties: { - availability_zone: keyword, - provider: keyword, - region: keyword, - }, - }, - counts: { - properties: { - agent_configuration: allProperties, - error: oneDayAllProperties, - max_error_groups_per_service: oneDayProperties, - max_transaction_groups_per_service: oneDayProperties, - metric: oneDayAllProperties, - onboarding: oneDayAllProperties, - services: oneDayProperties, - sourcemap: oneDayAllProperties, - span: oneDayAllProperties, - traces: oneDayProperties, - transaction: oneDayAllProperties, - }, - }, - cardinality: { - properties: { - client: { - properties: { - geo: { - properties: { - country_iso_code: { properties: { rum: oneDayProperties } }, - }, - }, - }, - }, - user_agent: { - properties: { - original: { - properties: { - all_agents: oneDayProperties, - rum: oneDayProperties, - }, - }, - }, - }, - transaction: { - properties: { - name: { - properties: { - all_agents: oneDayProperties, - rum: oneDayProperties, - }, - }, - }, - }, - }, - }, - has_any_services: { - type: 'boolean', - }, - indices: { - properties: { - all: { - properties: { - total: { - properties: { - docs: { - properties: { - count: long, - }, - }, - store: { - properties: { - size_in_bytes: long, - }, - }, - }, - }, - }, - }, - shards: { - properties: { - total: long, - }, - }, - }, - }, - integrations: { - properties: { - ml: { - properties: { - all_jobs_count: long, - }, - }, - }, - }, - retainment: { - properties: { - error: msProperties, - metric: msProperties, - onboarding: msProperties, - span: msProperties, - transaction: msProperties, - }, - }, - services_per_agent: { - properties: AGENT_NAMES.reduce>( - (previousValue, currentValue) => { - previousValue[currentValue] = { ...long, null_value: 0 }; - return previousValue; - }, - {} - ), - }, - tasks: { - properties: { - aggregated_transactions: tookProperties, - agent_configuration: tookProperties, - agents: tookProperties, - cardinality: tookProperties, - cloud: tookProperties, - environments: tookProperties, - groupings: tookProperties, - indices_stats: tookProperties, - integrations: tookProperties, - processor_events: tookProperties, - services: tookProperties, - versions: tookProperties, - }, - }, - version: { - properties: { - apm_server: { - properties: { - major: long, - minor: long, - patch: long, - }, - }, - }, - }, - }, - }; + return { properties: schemaToMapping(apmSchema) }; } /** diff --git a/x-pack/plugins/apm/server/lib/apm_telemetry/index.ts b/x-pack/plugins/apm/server/lib/apm_telemetry/index.ts index f78280aa7428e..c93fdfc15fe3c 100644 --- a/x-pack/plugins/apm/server/lib/apm_telemetry/index.ts +++ b/x-pack/plugins/apm/server/lib/apm_telemetry/index.ts @@ -6,7 +6,6 @@ import { Observable } from 'rxjs'; import { take } from 'rxjs/operators'; import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; -import { DeepRequired } from 'utility-types'; import { CoreSetup, Logger, @@ -21,14 +20,14 @@ import { APM_TELEMETRY_SAVED_OBJECT_ID, APM_TELEMETRY_SAVED_OBJECT_TYPE, } from '../../../common/apm_saved_object_constants'; -import { getApmTelemetryMapping } from '../../../common/apm_telemetry'; import { getInternalSavedObjectsClient } from '../helpers/get_internal_saved_objects_client'; import { getApmIndices } from '../settings/apm_indices/get_apm_indices'; import { collectDataTelemetry, CollectTelemetryParams, } from './collect_data_telemetry'; -import { APMDataTelemetry } from './types'; +import { APMUsage } from './types'; +import { apmSchema } from './schema'; const APM_TELEMETRY_TASK_NAME = 'apm-telemetry-task'; @@ -107,9 +106,9 @@ export async function createApmTelemetry({ ); }; - const collector = usageCollector.makeUsageCollector({ + const collector = usageCollector.makeUsageCollector({ type: 'apm', - schema: getApmTelemetryMapping(), + schema: apmSchema, fetch: async () => { try { const { kibanaVersion: storedKibanaVersion, ...data } = ( @@ -117,9 +116,7 @@ export async function createApmTelemetry({ APM_TELEMETRY_SAVED_OBJECT_TYPE, APM_TELEMETRY_SAVED_OBJECT_ID ) - ).attributes as { kibanaVersion: string } & DeepRequired< - APMDataTelemetry - >; + ).attributes as { kibanaVersion: string } & APMUsage; return data; } catch (err) { diff --git a/x-pack/plugins/apm/server/lib/apm_telemetry/schema.ts b/x-pack/plugins/apm/server/lib/apm_telemetry/schema.ts new file mode 100644 index 0000000000000..4bbda9add0fdb --- /dev/null +++ b/x-pack/plugins/apm/server/lib/apm_telemetry/schema.ts @@ -0,0 +1,206 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { MakeSchemaFrom } from 'src/plugins/usage_collection/server'; +import { + AggregatedTransactionsCounts, + APMUsage, + TimeframeMap, + TimeframeMap1d, + TimeframeMapAll, +} from './types'; +import { AgentName } from '../../../typings/es_schemas/ui/fields/agent'; + +const long: { type: 'long' } = { type: 'long' }; + +const aggregatedTransactionCountSchema: MakeSchemaFrom = { + expected_metric_document_count: long, + transaction_count: long, +}; + +const timeframeMap1dSchema: MakeSchemaFrom = { + '1d': long, +}; + +const timeframeMapAllSchema: MakeSchemaFrom = { + all: long, +}; + +const timeframeMapSchema: MakeSchemaFrom = { + ...timeframeMap1dSchema, + ...timeframeMapAllSchema, +}; + +const agentSchema: MakeSchemaFrom['agents'][AgentName] = { + agent: { + version: { type: 'array', items: { type: 'keyword' } }, + }, + service: { + framework: { + name: { type: 'array', items: { type: 'keyword' } }, + version: { type: 'array', items: { type: 'keyword' } }, + composite: { type: 'array', items: { type: 'keyword' } }, + }, + language: { + name: { type: 'array', items: { type: 'keyword' } }, + version: { type: 'array', items: { type: 'keyword' } }, + composite: { type: 'array', items: { type: 'keyword' } }, + }, + runtime: { + name: { type: 'array', items: { type: 'keyword' } }, + version: { type: 'array', items: { type: 'keyword' } }, + composite: { type: 'array', items: { type: 'keyword' } }, + }, + }, +}; + +const apmPerAgentSchema: Pick< + MakeSchemaFrom, + 'services_per_agent' | 'agents' +> = { + // services_per_agent: AGENT_NAMES.reduce( + // (acc, name) => ({ ...acc, [name]: long }), + // {} as Record + // ), + // agents: AGENT_NAMES.reduce( + // (acc, name) => ({ ...acc, [name]: agentSchema }), + // {} as Record + // ), + // TODO: Find a way for `@kbn/telemetry-tools` to understand and evaluate expressions. + // In the meanwhile, we'll have to maintain these lists up to date (TS will remind us to update) + services_per_agent: { + dotnet: long, + go: long, + java: long, + 'js-base': long, + nodejs: long, + python: long, + ruby: long, + 'rum-js': long, + otlp: long, + 'opentelemetry/cpp': long, + 'opentelemetry/dotnet': long, + 'opentelemetry/erlang': long, + 'opentelemetry/go': long, + 'opentelemetry/java': long, + 'opentelemetry/nodejs': long, + 'opentelemetry/php': long, + 'opentelemetry/python': long, + 'opentelemetry/ruby': long, + 'opentelemetry/webjs': long, + }, + agents: { + dotnet: agentSchema, + go: agentSchema, + java: agentSchema, + 'js-base': agentSchema, + nodejs: agentSchema, + python: agentSchema, + ruby: agentSchema, + 'rum-js': agentSchema, + otlp: agentSchema, + 'opentelemetry/cpp': agentSchema, + 'opentelemetry/dotnet': agentSchema, + 'opentelemetry/erlang': agentSchema, + 'opentelemetry/go': agentSchema, + 'opentelemetry/java': agentSchema, + 'opentelemetry/nodejs': agentSchema, + 'opentelemetry/php': agentSchema, + 'opentelemetry/python': agentSchema, + 'opentelemetry/ruby': agentSchema, + 'opentelemetry/webjs': agentSchema, + }, +}; + +export const apmSchema: MakeSchemaFrom = { + ...apmPerAgentSchema, + has_any_services: { type: 'boolean' }, + version: { + apm_server: { + major: long, + minor: long, + patch: long, + }, + }, + environments: { + services_without_environments: long, + services_with_multiple_environments: long, + top_environments: { type: 'array', items: { type: 'keyword' } }, + }, + aggregated_transactions: { + current_implementation: aggregatedTransactionCountSchema, + no_observer_name: aggregatedTransactionCountSchema, + no_rum: aggregatedTransactionCountSchema, + no_rum_no_observer_name: aggregatedTransactionCountSchema, + only_rum: aggregatedTransactionCountSchema, + only_rum_no_observer_name: aggregatedTransactionCountSchema, + }, + cloud: { + availability_zone: { type: 'array', items: { type: 'keyword' } }, + provider: { type: 'array', items: { type: 'keyword' } }, + region: { type: 'array', items: { type: 'keyword' } }, + }, + counts: { + transaction: timeframeMapSchema, + span: timeframeMapSchema, + error: timeframeMapSchema, + metric: timeframeMapSchema, + sourcemap: timeframeMapSchema, + onboarding: timeframeMapSchema, + agent_configuration: timeframeMapAllSchema, + max_transaction_groups_per_service: timeframeMapSchema, + max_error_groups_per_service: timeframeMapSchema, + traces: timeframeMapSchema, + services: timeframeMapSchema, + }, + cardinality: { + client: { geo: { country_iso_code: { rum: timeframeMap1dSchema } } }, + user_agent: { + original: { + all_agents: timeframeMap1dSchema, + rum: timeframeMap1dSchema, + }, + }, + transaction: { + name: { + all_agents: timeframeMap1dSchema, + rum: timeframeMap1dSchema, + }, + }, + }, + retainment: { + span: { ms: long }, + transaction: { ms: long }, + error: { ms: long }, + metric: { ms: long }, + sourcemap: { ms: long }, + onboarding: { ms: long }, + }, + integrations: { ml: { all_jobs_count: long } }, + + indices: { + shards: { total: long }, + all: { + total: { + docs: { count: long }, + store: { size_in_bytes: long }, + }, + }, + }, + tasks: { + aggregated_transactions: { took: { ms: long } }, + cloud: { took: { ms: long } }, + processor_events: { took: { ms: long } }, + agent_configuration: { took: { ms: long } }, + services: { took: { ms: long } }, + versions: { took: { ms: long } }, + groupings: { took: { ms: long } }, + integrations: { took: { ms: long } }, + agents: { took: { ms: long } }, + indices_stats: { took: { ms: long } }, + cardinality: { took: { ms: long } }, + }, +}; diff --git a/x-pack/plugins/apm/server/lib/apm_telemetry/types.ts b/x-pack/plugins/apm/server/lib/apm_telemetry/types.ts index c7af292e817c7..7ed79752b43c4 100644 --- a/x-pack/plugins/apm/server/lib/apm_telemetry/types.ts +++ b/x-pack/plugins/apm/server/lib/apm_telemetry/types.ts @@ -20,7 +20,7 @@ export interface AggregatedTransactionsCounts { transaction_count: number; } -export type APMDataTelemetry = DeepPartial<{ +export interface APMUsage { has_any_services: boolean; services_per_agent: Record; version: { @@ -139,6 +139,8 @@ export type APMDataTelemetry = DeepPartial<{ | 'cardinality', { took: { ms: number } } >; -}>; +} + +export type APMDataTelemetry = DeepPartial; export type APMTelemetry = APMDataTelemetry; diff --git a/x-pack/plugins/encrypted_saved_objects/server/config.test.ts b/x-pack/plugins/encrypted_saved_objects/server/config.test.ts index 3f8074eb15c0c..cbe987830717f 100644 --- a/x-pack/plugins/encrypted_saved_objects/server/config.test.ts +++ b/x-pack/plugins/encrypted_saved_objects/server/config.test.ts @@ -6,9 +6,8 @@ jest.mock('crypto', () => ({ randomBytes: jest.fn() })); -import { first } from 'rxjs/operators'; -import { loggingSystemMock, coreMock } from 'src/core/server/mocks'; -import { createConfig$, ConfigSchema } from './config'; +import { loggingSystemMock } from 'src/core/server/mocks'; +import { createConfig, ConfigSchema } from './config'; describe('config schema', () => { it('generates proper defaults', () => { @@ -16,6 +15,9 @@ describe('config schema', () => { Object { "enabled": true, "encryptionKey": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "keyRotation": Object { + "decryptionOnlyKeys": Array [], + }, } `); @@ -23,12 +25,41 @@ describe('config schema', () => { Object { "enabled": true, "encryptionKey": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "keyRotation": Object { + "decryptionOnlyKeys": Array [], + }, } `); expect(ConfigSchema.validate({}, { dist: true })).toMatchInlineSnapshot(` Object { "enabled": true, + "keyRotation": Object { + "decryptionOnlyKeys": Array [], + }, + } + `); + }); + + it('properly validates config', () => { + expect( + ConfigSchema.validate( + { + encryptionKey: 'a'.repeat(32), + keyRotation: { decryptionOnlyKeys: ['b'.repeat(32), 'c'.repeat(32)] }, + }, + { dist: true } + ) + ).toMatchInlineSnapshot(` + Object { + "enabled": true, + "encryptionKey": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "keyRotation": Object { + "decryptionOnlyKeys": Array [ + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "cccccccccccccccccccccccccccccccc", + ], + }, } `); }); @@ -46,21 +77,65 @@ describe('config schema', () => { `"[encryptionKey]: value has length [3] but it must have a minimum length of [32]."` ); }); + + it('should throw error if any of the xpack.encryptedSavedObjects.keyRotation.decryptionOnlyKeys is less than 32 characters', () => { + expect(() => + ConfigSchema.validate({ + keyRotation: { decryptionOnlyKeys: ['a'.repeat(32), 'b'.repeat(31)] }, + }) + ).toThrowErrorMatchingInlineSnapshot( + `"[keyRotation.decryptionOnlyKeys.1]: value has length [31] but it must have a minimum length of [32]."` + ); + + expect(() => + ConfigSchema.validate( + { keyRotation: { decryptionOnlyKeys: ['a'.repeat(32), 'b'.repeat(31)] } }, + { dist: true } + ) + ).toThrowErrorMatchingInlineSnapshot( + `"[keyRotation.decryptionOnlyKeys.1]: value has length [31] but it must have a minimum length of [32]."` + ); + }); + + it('should throw error if any of the xpack.encryptedSavedObjects.keyRotation.decryptionOnlyKeys is equal to xpack.encryptedSavedObjects.encryptionKey', () => { + expect(() => + ConfigSchema.validate({ + encryptionKey: 'a'.repeat(32), + keyRotation: { decryptionOnlyKeys: ['a'.repeat(32)] }, + }) + ).toThrowErrorMatchingInlineSnapshot( + `"\`keyRotation.decryptionOnlyKeys\` cannot contain primary encryption key specified in \`encryptionKey\`."` + ); + + expect(() => + ConfigSchema.validate( + { + encryptionKey: 'a'.repeat(32), + keyRotation: { decryptionOnlyKeys: ['a'.repeat(32)] }, + }, + { dist: true } + ) + ).toThrowErrorMatchingInlineSnapshot( + `"\`keyRotation.decryptionOnlyKeys\` cannot contain primary encryption key specified in \`encryptionKey\`."` + ); + }); }); -describe('createConfig$()', () => { - it('should log a warning, set xpack.encryptedSavedObjects.encryptionKey and usingEphemeralEncryptionKey=true when encryptionKey is not set', async () => { +describe('createConfig()', () => { + it('should log a warning, set xpack.encryptedSavedObjects.encryptionKey and usingEphemeralEncryptionKey=true when encryptionKey is not set', () => { const mockRandomBytes = jest.requireMock('crypto').randomBytes; mockRandomBytes.mockReturnValue('ab'.repeat(16)); - const contextMock = coreMock.createPluginInitializerContext({}); - const config = await createConfig$(contextMock).pipe(first()).toPromise(); + const logger = loggingSystemMock.create().get(); + const config = createConfig(ConfigSchema.validate({}, { dist: true }), logger); expect(config).toEqual({ - config: { encryptionKey: 'ab'.repeat(16) }, + enabled: true, + encryptionKey: 'ab'.repeat(16), + keyRotation: { decryptionOnlyKeys: [] }, usingEphemeralEncryptionKey: true, }); - expect(loggingSystemMock.collect(contextMock.logger).warn).toMatchInlineSnapshot(` + expect(loggingSystemMock.collect(logger).warn).toMatchInlineSnapshot(` Array [ Array [ "Generating a random key for xpack.encryptedSavedObjects.encryptionKey. To be able to decrypt encrypted saved objects attributes after restart, please set xpack.encryptedSavedObjects.encryptionKey in kibana.yml", @@ -70,15 +145,18 @@ describe('createConfig$()', () => { }); it('should not log a warning and set usingEphemeralEncryptionKey=false when encryptionKey is set', async () => { - const contextMock = coreMock.createPluginInitializerContext({ - encryptionKey: 'supersecret', - }); - const config = await createConfig$(contextMock).pipe(first()).toPromise(); + const logger = loggingSystemMock.create().get(); + const config = createConfig( + ConfigSchema.validate({ encryptionKey: 'supersecret'.repeat(3) }, { dist: true }), + logger + ); expect(config).toEqual({ - config: { encryptionKey: 'supersecret' }, + enabled: true, + encryptionKey: 'supersecret'.repeat(3), + keyRotation: { decryptionOnlyKeys: [] }, usingEphemeralEncryptionKey: false, }); - expect(loggingSystemMock.collect(contextMock.logger).warn).toEqual([]); + expect(loggingSystemMock.collect(logger).warn).toEqual([]); }); }); diff --git a/x-pack/plugins/encrypted_saved_objects/server/config.ts b/x-pack/plugins/encrypted_saved_objects/server/config.ts index 9c751a9c67f52..f06c6fa1823ba 100644 --- a/x-pack/plugins/encrypted_saved_objects/server/config.ts +++ b/x-pack/plugins/encrypted_saved_objects/server/config.ts @@ -5,41 +5,50 @@ */ import crypto from 'crypto'; -import { map } from 'rxjs/operators'; import { schema, TypeOf } from '@kbn/config-schema'; -import { PluginInitializerContext } from 'src/core/server'; +import { Logger } from 'src/core/server'; -export const ConfigSchema = schema.object({ - enabled: schema.boolean({ defaultValue: true }), - encryptionKey: schema.conditional( - schema.contextRef('dist'), - true, - schema.maybe(schema.string({ minLength: 32 })), - schema.string({ minLength: 32, defaultValue: 'a'.repeat(32) }) - ), -}); +export type ConfigType = ReturnType; -export function createConfig$(context: PluginInitializerContext) { - return context.config.create>().pipe( - map((config) => { - const logger = context.logger.get('config'); +export const ConfigSchema = schema.object( + { + enabled: schema.boolean({ defaultValue: true }), + encryptionKey: schema.conditional( + schema.contextRef('dist'), + true, + schema.maybe(schema.string({ minLength: 32 })), + schema.string({ minLength: 32, defaultValue: 'a'.repeat(32) }) + ), + keyRotation: schema.object({ + decryptionOnlyKeys: schema.arrayOf(schema.string({ minLength: 32 }), { defaultValue: [] }), + }), + }, + { + validate(value) { + const decryptionOnlyKeys = value.keyRotation?.decryptionOnlyKeys ?? []; + if (value.encryptionKey && decryptionOnlyKeys.includes(value.encryptionKey)) { + return '`keyRotation.decryptionOnlyKeys` cannot contain primary encryption key specified in `encryptionKey`.'; + } + }, + } +); - let encryptionKey = config.encryptionKey; - const usingEphemeralEncryptionKey = encryptionKey === undefined; - if (encryptionKey === undefined) { - logger.warn( - 'Generating a random key for xpack.encryptedSavedObjects.encryptionKey. ' + - 'To be able to decrypt encrypted saved objects attributes after restart, ' + - 'please set xpack.encryptedSavedObjects.encryptionKey in kibana.yml' - ); +export function createConfig(config: TypeOf, logger: Logger) { + let encryptionKey = config.encryptionKey; + const usingEphemeralEncryptionKey = encryptionKey === undefined; + if (encryptionKey === undefined) { + logger.warn( + 'Generating a random key for xpack.encryptedSavedObjects.encryptionKey. ' + + 'To be able to decrypt encrypted saved objects attributes after restart, ' + + 'please set xpack.encryptedSavedObjects.encryptionKey in kibana.yml' + ); - encryptionKey = crypto.randomBytes(16).toString('hex'); - } + encryptionKey = crypto.randomBytes(16).toString('hex'); + } - return { - config: { ...config, encryptionKey }, - usingEphemeralEncryptionKey, - }; - }) - ); + return { + ...config, + encryptionKey, + usingEphemeralEncryptionKey, + }; } diff --git a/x-pack/plugins/encrypted_saved_objects/server/crypto/encrypted_saved_objects_service.test.ts b/x-pack/plugins/encrypted_saved_objects/server/crypto/encrypted_saved_objects_service.test.ts index 42d2e2ffd1516..88d57072697fe 100644 --- a/x-pack/plugins/encrypted_saved_objects/server/crypto/encrypted_saved_objects_service.test.ts +++ b/x-pack/plugins/encrypted_saved_objects/server/crypto/encrypted_saved_objects_service.test.ts @@ -14,40 +14,44 @@ import { EncryptionError } from './encryption_error'; import { loggingSystemMock } from 'src/core/server/mocks'; import { encryptedSavedObjectsAuditLoggerMock } from '../audit/index.mock'; -const crypto = nodeCrypto({ encryptionKey: 'encryption-key-abc' }); +function createNodeCryptMock(encryptionKey: string) { + const crypto = nodeCrypto({ encryptionKey }); + const nodeCryptoMock: jest.Mocked = { + encrypt: jest.fn(), + decrypt: jest.fn(), + encryptSync: jest.fn(), + decryptSync: jest.fn(), + }; -const mockNodeCrypto: jest.Mocked = { - encrypt: jest.fn(), - decrypt: jest.fn(), - encryptSync: jest.fn(), - decryptSync: jest.fn(), -}; - -let service: EncryptedSavedObjectsService; -let mockAuditLogger: jest.Mocked; - -beforeEach(() => { // Call actual `@elastic/node-crypto` by default, but allow to override implementation in tests. - mockNodeCrypto.encrypt.mockImplementation(async (input: any, aad?: string) => + nodeCryptoMock.encrypt.mockImplementation(async (input: any, aad?: string) => crypto.encrypt(input, aad) ); - mockNodeCrypto.decrypt.mockImplementation( + nodeCryptoMock.decrypt.mockImplementation( async (encryptedOutput: string | Buffer, aad?: string) => crypto.decrypt(encryptedOutput, aad) ); - mockNodeCrypto.encryptSync.mockImplementation((input: any, aad?: string) => + nodeCryptoMock.encryptSync.mockImplementation((input: any, aad?: string) => crypto.encryptSync(input, aad) ); - mockNodeCrypto.decryptSync.mockImplementation((encryptedOutput: string | Buffer, aad?: string) => + nodeCryptoMock.decryptSync.mockImplementation((encryptedOutput: string | Buffer, aad?: string) => crypto.decryptSync(encryptedOutput, aad) ); + return nodeCryptoMock; +} + +let mockNodeCrypto: jest.Mocked; +let service: EncryptedSavedObjectsService; +let mockAuditLogger: jest.Mocked; +beforeEach(() => { + mockNodeCrypto = createNodeCryptMock('encryption-key-abc'); mockAuditLogger = encryptedSavedObjectsAuditLoggerMock.create(); - service = new EncryptedSavedObjectsService( - mockNodeCrypto, - loggingSystemMock.create().get(), - mockAuditLogger - ); + service = new EncryptedSavedObjectsService({ + primaryCrypto: mockNodeCrypto, + logger: loggingSystemMock.create().get(), + audit: mockAuditLogger, + }); }); afterEach(() => jest.resetAllMocks()); @@ -229,11 +233,11 @@ describe('#encryptAttributes', () => { async (valueToEncrypt, aad) => `|${valueToEncrypt}|${aad}|` ); - service = new EncryptedSavedObjectsService( - mockNodeCrypto, - loggingSystemMock.create().get(), - mockAuditLogger - ); + service = new EncryptedSavedObjectsService({ + primaryCrypto: mockNodeCrypto, + logger: loggingSystemMock.create().get(), + audit: mockAuditLogger, + }); }); it('does not encrypt attributes for unknown types', async () => { @@ -304,6 +308,34 @@ describe('#encryptAttributes', () => { ); }); + it('encrypts only using primary crypto', async () => { + const attributes = { attrOne: 'one', attrTwo: 'two', attrThree: 'three', attrFour: null }; + + const decryptionOnlyCrypto = createNodeCryptMock('some-key'); + service = new EncryptedSavedObjectsService({ + primaryCrypto: mockNodeCrypto, + decryptionOnlyCryptos: [decryptionOnlyCrypto], + logger: loggingSystemMock.create().get(), + audit: mockAuditLogger, + }); + service.registerType({ + type: 'known-type-1', + attributesToEncrypt: new Set(['attrOne', 'attrThree', 'attrFour']), + }); + + await expect( + service.encryptAttributes({ type: 'known-type-1', id: 'object-id' }, attributes) + ).resolves.toEqual({ + attrOne: '|one|["known-type-1","object-id",{"attrTwo":"two"}]|', + attrTwo: 'two', + attrThree: '|three|["known-type-1","object-id",{"attrTwo":"two"}]|', + attrFour: null, + }); + + expect(decryptionOnlyCrypto.encrypt).not.toHaveBeenCalled(); + expect(decryptionOnlyCrypto.encryptSync).not.toHaveBeenCalled(); + }); + it('encrypts only attributes that are supposed to be encrypted even if not all provided', async () => { const attributes = { attrTwo: 'two', attrThree: 'three' }; @@ -923,11 +955,11 @@ describe('#decryptAttributes', () => { }); it('fails if encrypted with another encryption key', async () => { - service = new EncryptedSavedObjectsService( - nodeCrypto({ encryptionKey: 'encryption-key-abc*' }), - loggingSystemMock.create().get(), - mockAuditLogger - ); + service = new EncryptedSavedObjectsService({ + primaryCrypto: nodeCrypto({ encryptionKey: 'encryption-key-abc*' }), + logger: loggingSystemMock.create().get(), + audit: mockAuditLogger, + }); service.registerType({ type: 'known-type-1', @@ -949,6 +981,123 @@ describe('#decryptAttributes', () => { ); }); }); + + describe('with decryption only keys', () => { + function getService(primaryCrypto: Crypto, decryptionOnlyCryptos?: Readonly) { + const esoService = new EncryptedSavedObjectsService({ + primaryCrypto, + decryptionOnlyCryptos, + logger: loggingSystemMock.create().get(), + audit: mockAuditLogger, + }); + + esoService.registerType({ + type: 'known-type-1', + attributesToEncrypt: new Set(['attrOne', 'attrThree', 'attrFour']), + }); + + return esoService; + } + + const attributes = { attrOne: 'one', attrTwo: 'two', attrThree: 'three', attrFour: null }; + + let decryptionOnlyCryptoOne: jest.Mocked; + let decryptionOnlyCryptoTwo: jest.Mocked; + beforeEach(() => { + decryptionOnlyCryptoOne = createNodeCryptMock('old-key-one'); + decryptionOnlyCryptoTwo = createNodeCryptMock('old-key-two'); + + service = getService(mockNodeCrypto, [decryptionOnlyCryptoOne, decryptionOnlyCryptoTwo]); + }); + + it('does not use decryption only keys if we can decrypt using primary key', async () => { + const encryptedAttributes = await service.encryptAttributes( + { type: 'known-type-1', id: 'object-id' }, + attributes + ); + + await expect( + service.decryptAttributes({ type: 'known-type-1', id: 'object-id' }, encryptedAttributes) + ).resolves.toEqual({ attrOne: 'one', attrTwo: 'two', attrThree: 'three', attrFour: null }); + expect(mockAuditLogger.decryptAttributesSuccess).toHaveBeenCalledTimes(1); + expect(mockAuditLogger.decryptAttributesSuccess).toHaveBeenCalledWith( + ['attrOne', 'attrThree'], + { type: 'known-type-1', id: 'object-id' }, + undefined + ); + + expect(decryptionOnlyCryptoOne.decrypt).not.toHaveBeenCalled(); + expect(decryptionOnlyCryptoTwo.decrypt).not.toHaveBeenCalled(); + }); + + it('uses decryption only keys if cannot decrypt using primary key', async () => { + const encryptedAttributes = await getService(decryptionOnlyCryptoOne).encryptAttributes( + { type: 'known-type-1', id: 'object-id' }, + attributes + ); + + await expect( + service.decryptAttributes({ type: 'known-type-1', id: 'object-id' }, encryptedAttributes) + ).resolves.toEqual({ attrOne: 'one', attrTwo: 'two', attrThree: 'three', attrFour: null }); + expect(mockAuditLogger.decryptAttributesSuccess).toHaveBeenCalledTimes(1); + expect(mockAuditLogger.decryptAttributesSuccess).toHaveBeenCalledWith( + ['attrOne', 'attrThree'], + { type: 'known-type-1', id: 'object-id' }, + undefined + ); + + // One call per attributes, we have 2 of them. + expect(mockNodeCrypto.decrypt).toHaveBeenCalledTimes(2); + expect(decryptionOnlyCryptoOne.decrypt).toHaveBeenCalledTimes(2); + expect(decryptionOnlyCryptoTwo.decrypt).not.toHaveBeenCalled(); + }); + + it('uses all available decryption only keys if needed', async () => { + const encryptedAttributes = await getService(decryptionOnlyCryptoTwo).encryptAttributes( + { type: 'known-type-1', id: 'object-id' }, + attributes + ); + + await expect( + service.decryptAttributes({ type: 'known-type-1', id: 'object-id' }, encryptedAttributes) + ).resolves.toEqual({ attrOne: 'one', attrTwo: 'two', attrThree: 'three', attrFour: null }); + expect(mockAuditLogger.decryptAttributesSuccess).toHaveBeenCalledTimes(1); + expect(mockAuditLogger.decryptAttributesSuccess).toHaveBeenCalledWith( + ['attrOne', 'attrThree'], + { type: 'known-type-1', id: 'object-id' }, + undefined + ); + + // One call per attributes, we have 2 of them. + expect(mockNodeCrypto.decrypt).toHaveBeenCalledTimes(2); + expect(decryptionOnlyCryptoOne.decrypt).toHaveBeenCalledTimes(2); + expect(decryptionOnlyCryptoTwo.decrypt).toHaveBeenCalledTimes(2); + }); + + it('does not use primary encryption key if `omitPrimaryEncryptionKey` is specified', async () => { + const encryptedAttributes = await getService(decryptionOnlyCryptoOne).encryptAttributes( + { type: 'known-type-1', id: 'object-id' }, + attributes + ); + + await expect( + service.decryptAttributes({ type: 'known-type-1', id: 'object-id' }, encryptedAttributes, { + omitPrimaryEncryptionKey: true, + }) + ).resolves.toEqual({ attrOne: 'one', attrTwo: 'two', attrThree: 'three', attrFour: null }); + expect(mockAuditLogger.decryptAttributesSuccess).toHaveBeenCalledTimes(1); + expect(mockAuditLogger.decryptAttributesSuccess).toHaveBeenCalledWith( + ['attrOne', 'attrThree'], + { type: 'known-type-1', id: 'object-id' }, + undefined + ); + + // One call per attributes, we have 2 of them. + expect(mockNodeCrypto.decrypt).not.toHaveBeenCalled(); + expect(decryptionOnlyCryptoOne.decrypt).toHaveBeenCalledTimes(2); + expect(decryptionOnlyCryptoTwo.decrypt).not.toHaveBeenCalled(); + }); + }); }); describe('#encryptAttributesSync', () => { @@ -957,11 +1106,11 @@ describe('#encryptAttributesSync', () => { (valueToEncrypt, aad) => `|${valueToEncrypt}|${aad}|` ); - service = new EncryptedSavedObjectsService( - mockNodeCrypto, - loggingSystemMock.create().get(), - mockAuditLogger - ); + service = new EncryptedSavedObjectsService({ + primaryCrypto: mockNodeCrypto, + logger: loggingSystemMock.create().get(), + audit: mockAuditLogger, + }); }); it('does not encrypt attributes that are not supposed to be encrypted', () => { @@ -996,6 +1145,34 @@ describe('#encryptAttributesSync', () => { }); }); + it('encrypts only using primary crypto', async () => { + const attributes = { attrOne: 'one', attrTwo: 'two', attrThree: 'three', attrFour: null }; + + const decryptionOnlyCrypto = createNodeCryptMock('some-key'); + service = new EncryptedSavedObjectsService({ + primaryCrypto: mockNodeCrypto, + decryptionOnlyCryptos: [decryptionOnlyCrypto], + logger: loggingSystemMock.create().get(), + audit: mockAuditLogger, + }); + service.registerType({ + type: 'known-type-1', + attributesToEncrypt: new Set(['attrOne', 'attrThree', 'attrFour']), + }); + + expect( + service.encryptAttributesSync({ type: 'known-type-1', id: 'object-id' }, attributes) + ).toEqual({ + attrOne: '|one|["known-type-1","object-id",{"attrTwo":"two"}]|', + attrTwo: 'two', + attrThree: '|three|["known-type-1","object-id",{"attrTwo":"two"}]|', + attrFour: null, + }); + + expect(decryptionOnlyCrypto.encrypt).not.toHaveBeenCalled(); + expect(decryptionOnlyCrypto.encryptSync).not.toHaveBeenCalled(); + }); + it('encrypts only attributes that are supposed to be encrypted even if not all provided', () => { const attributes = { attrTwo: 'two', attrThree: 'three' }; @@ -1459,11 +1636,11 @@ describe('#decryptAttributesSync', () => { }); it('fails if encrypted with another encryption key', () => { - service = new EncryptedSavedObjectsService( - nodeCrypto({ encryptionKey: 'encryption-key-abc*' }), - loggingSystemMock.create().get(), - mockAuditLogger - ); + service = new EncryptedSavedObjectsService({ + primaryCrypto: nodeCrypto({ encryptionKey: 'encryption-key-abc*' }), + logger: loggingSystemMock.create().get(), + audit: mockAuditLogger, + }); service.registerType({ type: 'known-type-1', @@ -1478,4 +1655,132 @@ describe('#decryptAttributesSync', () => { ).toThrowError(EncryptionError); }); }); + + describe('with decryption only keys', () => { + function getService(primaryCrypto: Crypto, decryptionOnlyCryptos?: Readonly) { + const esoService = new EncryptedSavedObjectsService({ + primaryCrypto, + decryptionOnlyCryptos, + logger: loggingSystemMock.create().get(), + audit: mockAuditLogger, + }); + + esoService.registerType({ + type: 'known-type-1', + attributesToEncrypt: new Set(['attrOne', 'attrThree', 'attrFour']), + }); + + return esoService; + } + + const attributes = { attrOne: 'one', attrTwo: 'two', attrThree: 'three', attrFour: null }; + + let decryptionOnlyCryptoOne: jest.Mocked; + let decryptionOnlyCryptoTwo: jest.Mocked; + beforeEach(() => { + decryptionOnlyCryptoOne = createNodeCryptMock('old-key-one'); + decryptionOnlyCryptoTwo = createNodeCryptMock('old-key-two'); + + service = getService(mockNodeCrypto, [decryptionOnlyCryptoOne, decryptionOnlyCryptoTwo]); + }); + + it('does not use decryption only keys if we can decrypt using primary key', () => { + const encryptedAttributes = service.encryptAttributesSync( + { type: 'known-type-1', id: 'object-id' }, + attributes + ); + + expect( + service.decryptAttributesSync( + { type: 'known-type-1', id: 'object-id' }, + encryptedAttributes + ) + ).toEqual({ attrOne: 'one', attrTwo: 'two', attrThree: 'three', attrFour: null }); + expect(mockAuditLogger.decryptAttributesSuccess).toHaveBeenCalledTimes(1); + expect(mockAuditLogger.decryptAttributesSuccess).toHaveBeenCalledWith( + ['attrOne', 'attrThree'], + { type: 'known-type-1', id: 'object-id' }, + undefined + ); + + expect(decryptionOnlyCryptoOne.decryptSync).not.toHaveBeenCalled(); + expect(decryptionOnlyCryptoTwo.decryptSync).not.toHaveBeenCalled(); + }); + + it('uses decryption only keys if cannot decrypt using primary key', () => { + const encryptedAttributes = getService(decryptionOnlyCryptoOne).encryptAttributesSync( + { type: 'known-type-1', id: 'object-id' }, + attributes + ); + + expect( + service.decryptAttributesSync( + { type: 'known-type-1', id: 'object-id' }, + encryptedAttributes + ) + ).toEqual({ attrOne: 'one', attrTwo: 'two', attrThree: 'three', attrFour: null }); + expect(mockAuditLogger.decryptAttributesSuccess).toHaveBeenCalledTimes(1); + expect(mockAuditLogger.decryptAttributesSuccess).toHaveBeenCalledWith( + ['attrOne', 'attrThree'], + { type: 'known-type-1', id: 'object-id' }, + undefined + ); + + // One call per attributes, we have 2 of them. + expect(mockNodeCrypto.decryptSync).toHaveBeenCalledTimes(2); + expect(decryptionOnlyCryptoOne.decryptSync).toHaveBeenCalledTimes(2); + expect(decryptionOnlyCryptoTwo.decryptSync).not.toHaveBeenCalled(); + }); + + it('uses all available decryption only keys if needed', () => { + const encryptedAttributes = getService(decryptionOnlyCryptoTwo).encryptAttributesSync( + { type: 'known-type-1', id: 'object-id' }, + attributes + ); + + expect( + service.decryptAttributesSync( + { type: 'known-type-1', id: 'object-id' }, + encryptedAttributes + ) + ).toEqual({ attrOne: 'one', attrTwo: 'two', attrThree: 'three', attrFour: null }); + expect(mockAuditLogger.decryptAttributesSuccess).toHaveBeenCalledTimes(1); + expect(mockAuditLogger.decryptAttributesSuccess).toHaveBeenCalledWith( + ['attrOne', 'attrThree'], + { type: 'known-type-1', id: 'object-id' }, + undefined + ); + + // One call per attributes, we have 2 of them. + expect(mockNodeCrypto.decryptSync).toHaveBeenCalledTimes(2); + expect(decryptionOnlyCryptoOne.decryptSync).toHaveBeenCalledTimes(2); + expect(decryptionOnlyCryptoTwo.decryptSync).toHaveBeenCalledTimes(2); + }); + + it('does not use primary encryption key if `omitPrimaryEncryptionKey` is specified', () => { + const encryptedAttributes = getService(decryptionOnlyCryptoOne).encryptAttributesSync( + { type: 'known-type-1', id: 'object-id' }, + attributes + ); + + expect( + service.decryptAttributesSync( + { type: 'known-type-1', id: 'object-id' }, + encryptedAttributes, + { omitPrimaryEncryptionKey: true } + ) + ).toEqual({ attrOne: 'one', attrTwo: 'two', attrThree: 'three', attrFour: null }); + expect(mockAuditLogger.decryptAttributesSuccess).toHaveBeenCalledTimes(1); + expect(mockAuditLogger.decryptAttributesSuccess).toHaveBeenCalledWith( + ['attrOne', 'attrThree'], + { type: 'known-type-1', id: 'object-id' }, + undefined + ); + + // One call per attributes, we have 2 of them. + expect(mockNodeCrypto.decryptSync).not.toHaveBeenCalled(); + expect(decryptionOnlyCryptoOne.decryptSync).toHaveBeenCalledTimes(2); + expect(decryptionOnlyCryptoTwo.decryptSync).not.toHaveBeenCalled(); + }); + }); }); diff --git a/x-pack/plugins/encrypted_saved_objects/server/crypto/encrypted_saved_objects_service.ts b/x-pack/plugins/encrypted_saved_objects/server/crypto/encrypted_saved_objects_service.ts index 82d6bb9be15f6..1f1093a179538 100644 --- a/x-pack/plugins/encrypted_saved_objects/server/crypto/encrypted_saved_objects_service.ts +++ b/x-pack/plugins/encrypted_saved_objects/server/crypto/encrypted_saved_objects_service.ts @@ -52,6 +52,38 @@ interface CommonParameters { user?: AuthenticatedUser; } +/** + * Describes parameters for the decrypt methods. + */ +interface DecryptParameters extends CommonParameters { + /** + * Indicates whether decryption should only be performed using secondary decryption-only keys. + */ + omitPrimaryEncryptionKey?: boolean; +} + +interface EncryptedSavedObjectsServiceOptions { + /** + * Service logger instance. + */ + logger: Logger; + + /** + * Audit logger instance. + */ + audit: EncryptedSavedObjectsAuditLogger; + + /** + * NodeCrypto instance used for both encryption and decryption. + */ + primaryCrypto: Crypto; + + /** + * NodeCrypto instances used ONLY for decryption (i.e. rotated encryption keys). + */ + decryptionOnlyCryptos?: Readonly; +} + /** * Utility function that gives array representation of the saved object descriptor respecting * optional `namespace` property. @@ -79,16 +111,7 @@ export class EncryptedSavedObjectsService { EncryptedSavedObjectAttributesDefinition > = new Map(); - /** - * @param crypto nodeCrypto instance. - * @param logger Ordinary logger instance. - * @param audit Audit logger instance. - */ - constructor( - private readonly crypto: Readonly, - private readonly logger: Logger, - private readonly audit: EncryptedSavedObjectsAuditLogger - ) {} + constructor(private readonly options: EncryptedSavedObjectsServiceOptions) {} /** * Registers saved object type as the one that contains attributes that should be encrypted. @@ -136,7 +159,7 @@ export class EncryptedSavedObjectsService { descriptor: SavedObjectDescriptor, attributes: T, originalAttributes?: T, - params?: CommonParameters + params?: DecryptParameters ) { const typeDefinition = this.typeDefinitions.get(descriptor.type); if (typeDefinition === undefined) { @@ -174,7 +197,7 @@ export class EncryptedSavedObjectsService { Object.fromEntries( Object.entries(attributes).filter(([key]) => !typeDefinition.shouldBeStripped(key)) ) as T, - { user: params?.user } + params ); } catch (err) { decryptionError = err; @@ -210,10 +233,10 @@ export class EncryptedSavedObjectsService { try { encryptedAttributes[attributeName] = (yield [attributeValue, encryptionAAD])!; } catch (err) { - this.logger.error( + this.options.logger.error( `Failed to encrypt "${attributeName}" attribute: ${err.message || err}` ); - this.audit.encryptAttributeFailure(attributeName, descriptor, params?.user); + this.options.audit.encryptAttributeFailure(attributeName, descriptor, params?.user); throw new EncryptionError( `Unable to encrypt attribute "${attributeName}"`, @@ -229,7 +252,7 @@ export class EncryptedSavedObjectsService { // not the case we should collect and log them to make troubleshooting easier. const encryptedAttributesKeys = Object.keys(encryptedAttributes); if (encryptedAttributesKeys.length !== typeDefinition.attributesToEncrypt.size) { - this.logger.debug( + this.options.logger.debug( `The following attributes of saved object "${descriptorToArray( descriptor )}" should have been encrypted: ${Array.from( @@ -242,7 +265,7 @@ export class EncryptedSavedObjectsService { return attributes; } - this.audit.encryptAttributesSuccess(encryptedAttributesKeys, descriptor, params?.user); + this.options.audit.encryptAttributesSuccess(encryptedAttributesKeys, descriptor, params?.user); return { ...attributes, @@ -270,7 +293,9 @@ export class EncryptedSavedObjectsService { while (!iteratorResult.done) { const [attributeValue, encryptionAAD] = iteratorResult.value; try { - iteratorResult = iterator.next(await this.crypto.encrypt(attributeValue, encryptionAAD)); + iteratorResult = iterator.next( + await this.options.primaryCrypto.encrypt(attributeValue, encryptionAAD) + ); } catch (err) { iterator.throw!(err); } @@ -299,7 +324,9 @@ export class EncryptedSavedObjectsService { while (!iteratorResult.done) { const [attributeValue, encryptionAAD] = iteratorResult.value; try { - iteratorResult = iterator.next(this.crypto.encryptSync(attributeValue, encryptionAAD)); + iteratorResult = iterator.next( + this.options.primaryCrypto.encryptSync(attributeValue, encryptionAAD) + ); } catch (err) { iterator.throw!(err); } @@ -321,19 +348,31 @@ export class EncryptedSavedObjectsService { public async decryptAttributes>( descriptor: SavedObjectDescriptor, attributes: T, - params?: CommonParameters + params?: DecryptParameters ): Promise { + const decrypters = this.getDecrypters(params?.omitPrimaryEncryptionKey); const iterator = this.attributesToDecryptIterator(descriptor, attributes, params); let iteratorResult = iterator.next(); while (!iteratorResult.done) { const [attributeValue, encryptionAAD] = iteratorResult.value; - try { - iteratorResult = iterator.next( - (await this.crypto.decrypt(attributeValue, encryptionAAD)) as string - ); - } catch (err) { - iterator.throw!(err); + + let decryptionError; + for (const decrypter of decrypters) { + try { + iteratorResult = iterator.next(await decrypter.decrypt(attributeValue, encryptionAAD)); + decryptionError = undefined; + break; + } catch (err) { + // Remember the error thrown when we tried to decrypt with the primary key. + if (!decryptionError) { + decryptionError = err; + } + } + } + + if (decryptionError) { + iterator.throw!(decryptionError); } } @@ -353,17 +392,31 @@ export class EncryptedSavedObjectsService { public decryptAttributesSync>( descriptor: SavedObjectDescriptor, attributes: T, - params?: CommonParameters + params?: DecryptParameters ): T { + const decrypters = this.getDecrypters(params?.omitPrimaryEncryptionKey); const iterator = this.attributesToDecryptIterator(descriptor, attributes, params); let iteratorResult = iterator.next(); while (!iteratorResult.done) { const [attributeValue, encryptionAAD] = iteratorResult.value; - try { - iteratorResult = iterator.next(this.crypto.decryptSync(attributeValue, encryptionAAD)); - } catch (err) { - iterator.throw!(err); + + let decryptionError; + for (const decrypter of decrypters) { + try { + iteratorResult = iterator.next(decrypter.decryptSync(attributeValue, encryptionAAD)); + decryptionError = undefined; + break; + } catch (err) { + // Remember the error thrown when we tried to decrypt with the primary key. + if (!decryptionError) { + decryptionError = err; + } + } + } + + if (decryptionError) { + iterator.throw!(decryptionError); } } @@ -388,7 +441,7 @@ export class EncryptedSavedObjectsService { } if (typeof attributeValue !== 'string') { - this.audit.decryptAttributeFailure(attributeName, descriptor, params?.user); + this.options.audit.decryptAttributeFailure(attributeName, descriptor, params?.user); throw new Error( `Encrypted "${attributeName}" attribute should be a string, but found ${typeDetect( attributeValue @@ -401,8 +454,10 @@ export class EncryptedSavedObjectsService { try { decryptedAttributes[attributeName] = (yield [attributeValue, encryptionAAD])!; } catch (err) { - this.logger.error(`Failed to decrypt "${attributeName}" attribute: ${err.message || err}`); - this.audit.decryptAttributeFailure(attributeName, descriptor, params?.user); + this.options.logger.error( + `Failed to decrypt "${attributeName}" attribute: ${err.message || err}` + ); + this.options.audit.decryptAttributeFailure(attributeName, descriptor, params?.user); throw new EncryptionError( `Unable to decrypt attribute "${attributeName}"`, @@ -417,7 +472,7 @@ export class EncryptedSavedObjectsService { // not the case we should collect and log them to make troubleshooting easier. const decryptedAttributesKeys = Object.keys(decryptedAttributes); if (decryptedAttributesKeys.length !== typeDefinition.attributesToEncrypt.size) { - this.logger.debug( + this.options.logger.debug( `The following attributes of saved object "${descriptorToArray( descriptor )}" should have been decrypted: ${Array.from( @@ -430,7 +485,7 @@ export class EncryptedSavedObjectsService { return attributes; } - this.audit.decryptAttributesSuccess(decryptedAttributesKeys, descriptor, params?.user); + this.options.audit.decryptAttributesSuccess(decryptedAttributesKeys, descriptor, params?.user); return { ...attributes, @@ -459,7 +514,7 @@ export class EncryptedSavedObjectsService { } if (Object.keys(attributesAAD).length === 0) { - this.logger.debug( + this.options.logger.debug( `The AAD for saved object "${descriptorToArray( descriptor )}" does not include any attributes.` @@ -468,4 +523,23 @@ export class EncryptedSavedObjectsService { return stringify([...descriptorToArray(descriptor), attributesAAD]); } + + /** + * Returns list of NodeCrypto instances used for decryption. + * @param omitPrimaryEncryptionKey Specifies whether returned decrypters shouldn't include primary + * encryption/decryption crypto. + */ + private getDecrypters(omitPrimaryEncryptionKey?: boolean) { + if (omitPrimaryEncryptionKey) { + if (!this.options.decryptionOnlyCryptos || this.options.decryptionOnlyCryptos.length === 0) { + throw new Error( + `"omitPrimaryEncryptionKey" cannot be set when secondary keys aren't configured.` + ); + } + + return this.options.decryptionOnlyCryptos; + } + + return [this.options.primaryCrypto, ...(this.options.decryptionOnlyCryptos ?? [])]; + } } diff --git a/x-pack/plugins/encrypted_saved_objects/server/crypto/encryption_key_rotation_service.mocks.ts b/x-pack/plugins/encrypted_saved_objects/server/crypto/encryption_key_rotation_service.mocks.ts new file mode 100644 index 0000000000000..2d14577f91567 --- /dev/null +++ b/x-pack/plugins/encrypted_saved_objects/server/crypto/encryption_key_rotation_service.mocks.ts @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { EncryptionKeyRotationService } from './encryption_key_rotation_service'; + +function createEncryptionKeyRotationServiceMock() { + return ({ rotate: jest.fn() } as unknown) as jest.Mocked; +} + +export const encryptionKeyRotationServiceMock = { + create: createEncryptionKeyRotationServiceMock, +}; diff --git a/x-pack/plugins/encrypted_saved_objects/server/crypto/encryption_key_rotation_service.test.ts b/x-pack/plugins/encrypted_saved_objects/server/crypto/encryption_key_rotation_service.test.ts new file mode 100644 index 0000000000000..8607b81e7205e --- /dev/null +++ b/x-pack/plugins/encrypted_saved_objects/server/crypto/encryption_key_rotation_service.test.ts @@ -0,0 +1,502 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { + SavedObject, + SavedObjectsClientContract, + SavedObjectsServiceStart, +} from '../../../../../src/core/server'; +import { EncryptionError, EncryptionErrorOperation } from './encryption_error'; +import { EncryptionKeyRotationService } from './encryption_key_rotation_service'; +import { EncryptedSavedObjectsService } from './encrypted_saved_objects_service'; + +import { + coreMock, + httpServerMock, + loggingSystemMock, + savedObjectsClientMock, + savedObjectsTypeRegistryMock, +} from '../../../../../src/core/server/mocks'; +import { encryptedSavedObjectsServiceMock } from './index.mock'; + +function getMockSavedObject(savedObject?: Partial>) { + const id = savedObject?.id ?? `id-1`; + return { + id, + type: `type-${id}`, + references: [], + attributes: { attr: `attr-${id}` }, + score: 0, + ...savedObject, + }; +} + +let mockEncryptionService: jest.Mocked; +let mockRetrieveClient: jest.Mocked; +let mockUpdateClient: jest.Mocked; +let mockSavedObjects: jest.Mocked; +let service: EncryptionKeyRotationService; +beforeEach(() => { + mockEncryptionService = encryptedSavedObjectsServiceMock.create(); + mockEncryptionService.isRegistered.mockImplementation( + (type) => type !== 'type-id-3' && type !== 'type-id-6' + ); + mockEncryptionService.decryptAttributes.mockImplementation(async (descriptor, { attr }) => ({ + attr: `decrypted-${attr}`, + })); + + const coreSetupMock = coreMock.createSetup(); + const coreStartMock = coreMock.createStart(); + coreSetupMock.getStartServices.mockResolvedValue([coreStartMock, {}, {}]); + + mockSavedObjects = coreStartMock.savedObjects; + const typeRegistryMock = savedObjectsTypeRegistryMock.create(); + typeRegistryMock.getAllTypes.mockReturnValue([ + { name: 'type-id-1', namespaceType: 'single', mappings: { properties: {} }, hidden: false }, + { name: 'type-id-2', namespaceType: 'single', mappings: { properties: {} }, hidden: true }, + { name: 'type-id-3', namespaceType: 'single', mappings: { properties: {} }, hidden: false }, + { name: 'type-id-4', namespaceType: 'multiple', mappings: { properties: {} }, hidden: true }, + { name: 'type-id-5', namespaceType: 'single', mappings: { properties: {} }, hidden: false }, + { name: 'type-id-6', namespaceType: 'single', mappings: { properties: {} }, hidden: true }, + ]); + typeRegistryMock.isSingleNamespace.mockImplementation((type) => type !== 'type-id-4'); + mockSavedObjects.getTypeRegistry.mockReturnValue(typeRegistryMock); + + mockRetrieveClient = savedObjectsClientMock.create(); + mockRetrieveClient.find.mockResolvedValue({ total: 0, saved_objects: [], per_page: 0, page: 0 }); + mockUpdateClient = savedObjectsClientMock.create(); + mockSavedObjects.getScopedClient.mockImplementation((request, params) => + params?.excludedWrappers?.[0] === 'encryptedSavedObjects' + ? mockRetrieveClient + : mockUpdateClient + ); + + service = new EncryptionKeyRotationService({ + logger: loggingSystemMock.create().get(), + service: mockEncryptionService, + getStartServices: coreSetupMock.getStartServices, + }); +}); + +it('correctly setups Saved Objects clients', async () => { + const mockRequest = httpServerMock.createKibanaRequest(); + await service.rotate(mockRequest, { batchSize: 10000 }); + + expect(mockSavedObjects.getScopedClient).toHaveBeenCalledTimes(2); + expect(mockSavedObjects.getScopedClient).toHaveBeenCalledWith(mockRequest, { + includedHiddenTypes: ['type-id-2', 'type-id-4'], + excludedWrappers: ['encryptedSavedObjects'], + }); + expect(mockSavedObjects.getScopedClient).toHaveBeenCalledWith(mockRequest, { + includedHiddenTypes: ['type-id-2', 'type-id-4'], + }); +}); + +it('bails out if specified type is not registered', async () => { + mockEncryptionService.isRegistered.mockImplementation((type) => type !== 'type-unknown'); + + await expect( + service.rotate(httpServerMock.createKibanaRequest(), { + batchSize: 10000, + type: 'type-unknown', + }) + ).resolves.toEqual({ + total: 0, + successful: 0, + failed: 0, + }); + + expect(mockSavedObjects.getScopedClient).not.toHaveBeenCalled(); +}); + +it('does not perform rotation if there are no Saved Objects to process', async () => { + await expect( + service.rotate(httpServerMock.createKibanaRequest(), { batchSize: 12345 }) + ).resolves.toEqual({ + total: 0, + successful: 0, + failed: 0, + }); + + expect(mockRetrieveClient.find).toHaveBeenCalledTimes(1); + expect(mockRetrieveClient.find).toHaveBeenCalledWith({ + type: ['type-id-1', 'type-id-2', 'type-id-4', 'type-id-5'], + perPage: 12345, + namespaces: ['*'], + sortField: 'updated_at', + sortOrder: 'asc', + }); + + await expect( + service.rotate(httpServerMock.createKibanaRequest(), { batchSize: 54321, type: 'type-id-2' }) + ).resolves.toEqual({ + total: 0, + successful: 0, + failed: 0, + }); + + expect(mockRetrieveClient.find).toHaveBeenCalledTimes(2); + expect(mockRetrieveClient.find).toHaveBeenCalledWith({ + type: ['type-id-2'], + perPage: 54321, + namespaces: ['*'], + sortField: 'updated_at', + sortOrder: 'asc', + }); + + expect(mockEncryptionService.decryptAttributes).not.toHaveBeenCalled(); + expect(mockUpdateClient.bulkUpdate).not.toHaveBeenCalled(); +}); + +it('throws if Saved Object attributes cannot be decrypted because of unknown reason', async () => { + mockRetrieveClient.find.mockResolvedValue({ + total: 2, + saved_objects: [getMockSavedObject({ id: 'id-1' }), getMockSavedObject({ id: 'id-2' })], + per_page: 2, + page: 0, + }); + + const decryptionFailure = new Error('Oh no!'); + mockEncryptionService.decryptAttributes.mockRejectedValue(decryptionFailure); + + await expect( + service.rotate(httpServerMock.createKibanaRequest(), { batchSize: 12345 }) + ).rejects.toBe(decryptionFailure); + + expect(mockUpdateClient.bulkUpdate).not.toHaveBeenCalled(); +}); + +it('does not perform rotation if Saved Object attributes cannot be decrypted', async () => { + mockRetrieveClient.find.mockResolvedValue({ + total: 2, + saved_objects: [getMockSavedObject({ id: 'id-1' }), getMockSavedObject({ id: 'id-2' })], + per_page: 2, + page: 0, + }); + + mockEncryptionService.decryptAttributes.mockRejectedValue( + new EncryptionError('some-message', 'attr', EncryptionErrorOperation.Decryption) + ); + + await expect( + service.rotate(httpServerMock.createKibanaRequest(), { batchSize: 12345 }) + ).resolves.toEqual({ + total: 2, + successful: 0, + failed: 0, + }); + + expect(mockEncryptionService.decryptAttributes).toHaveBeenCalledTimes(2); + expect(mockUpdateClient.bulkUpdate).not.toHaveBeenCalled(); +}); + +it('properly rotates encryption key', async () => { + const savedObjects = [ + getMockSavedObject({ id: 'id-1' }), + getMockSavedObject({ id: 'id-2', namespaces: ['ns-1'] }), + getMockSavedObject({ id: 'id-4', namespaces: ['ns-2', 'ns-3'] }), + ]; + mockRetrieveClient.find.mockResolvedValue({ + total: 3, + saved_objects: savedObjects, + per_page: 3, + page: 0, + }); + mockUpdateClient.bulkUpdate.mockResolvedValue({ + saved_objects: savedObjects.map((object) => ({ ...object, attributes: {} })), + }); + + await expect( + service.rotate(httpServerMock.createKibanaRequest(), { batchSize: 12345 }) + ).resolves.toEqual({ + total: 3, + successful: 3, + failed: 0, + }); + + expect(mockEncryptionService.decryptAttributes).toHaveBeenCalledTimes(3); + expect(mockEncryptionService.decryptAttributes).toHaveBeenCalledWith( + { type: 'type-id-1', id: 'id-1' }, + { attr: 'attr-id-1' }, + { omitPrimaryEncryptionKey: true } + ); + expect(mockEncryptionService.decryptAttributes).toHaveBeenCalledWith( + { type: 'type-id-2', id: 'id-2', namespace: 'ns-1' }, + { attr: 'attr-id-2' }, + { omitPrimaryEncryptionKey: true } + ); + expect(mockEncryptionService.decryptAttributes).toHaveBeenCalledWith( + { type: 'type-id-4', id: 'id-4' }, + { attr: 'attr-id-4' }, + { omitPrimaryEncryptionKey: true } + ); + + expect(mockUpdateClient.bulkUpdate).toHaveBeenCalledTimes(1); + expect(mockUpdateClient.bulkUpdate).toHaveBeenCalledWith([ + { ...savedObjects[0], attributes: { attr: 'decrypted-attr-id-1' } }, + { ...savedObjects[1], namespace: 'ns-1', attributes: { attr: 'decrypted-attr-id-2' } }, + { ...savedObjects[2], namespace: 'ns-2', attributes: { attr: 'decrypted-attr-id-4' } }, + ]); +}); + +it('skips objects that cannot be decrypted', async () => { + const savedObjects = [ + getMockSavedObject({ id: 'id-1' }), + getMockSavedObject({ id: 'id-2', namespaces: ['ns-1'] }), + getMockSavedObject({ id: 'id-4', namespaces: ['ns-2', 'ns-3'] }), + ]; + mockRetrieveClient.find.mockResolvedValue({ + total: 3, + saved_objects: savedObjects, + per_page: 3, + page: 0, + }); + mockUpdateClient.bulkUpdate.mockResolvedValue({ + saved_objects: [ + { ...savedObjects[0], attributes: {} }, + { ...savedObjects[2], attributes: {} }, + ], + }); + + mockEncryptionService.decryptAttributes.mockImplementation(async ({ type }, { attr }) => { + if (type === 'type-id-2') { + throw new EncryptionError('some-message', 'attr', EncryptionErrorOperation.Decryption); + } + + return { attr: `decrypted-${attr}` }; + }); + + await expect( + service.rotate(httpServerMock.createKibanaRequest(), { batchSize: 12345 }) + ).resolves.toEqual({ + total: 3, + successful: 2, + failed: 0, + }); + + expect(mockEncryptionService.decryptAttributes).toHaveBeenCalledTimes(3); + expect(mockUpdateClient.bulkUpdate).toHaveBeenCalledTimes(1); + expect(mockUpdateClient.bulkUpdate).toHaveBeenCalledWith([ + { ...savedObjects[0], attributes: { attr: 'decrypted-attr-id-1' } }, + { ...savedObjects[2], namespace: 'ns-2', attributes: { attr: 'decrypted-attr-id-4' } }, + ]); +}); + +it('marks object that we could not update as failed', async () => { + const savedObjects = [ + getMockSavedObject({ id: 'id-1' }), + getMockSavedObject({ id: 'id-2', namespaces: ['ns-1'] }), + getMockSavedObject({ id: 'id-4', namespaces: ['ns-2', 'ns-3'] }), + ]; + mockRetrieveClient.find.mockResolvedValue({ + total: 3, + saved_objects: savedObjects, + per_page: 3, + page: 0, + }); + mockUpdateClient.bulkUpdate.mockResolvedValue({ + saved_objects: [{ ...savedObjects[0], attributes: {} }, { error: new Error('Oh no!') } as any], + }); + + mockEncryptionService.decryptAttributes.mockImplementation(async ({ type }, { attr }) => { + if (type === 'type-id-2') { + throw new EncryptionError('some-message', 'attr', EncryptionErrorOperation.Decryption); + } + + return { attr: `decrypted-${attr}` }; + }); + + await expect( + service.rotate(httpServerMock.createKibanaRequest(), { batchSize: 12345 }) + ).resolves.toEqual({ + total: 3, + successful: 1, + failed: 1, + }); + + expect(mockEncryptionService.decryptAttributes).toHaveBeenCalledTimes(3); + expect(mockUpdateClient.bulkUpdate).toHaveBeenCalledTimes(1); + expect(mockUpdateClient.bulkUpdate).toHaveBeenCalledWith([ + { ...savedObjects[0], attributes: { attr: 'decrypted-attr-id-1' } }, + { ...savedObjects[2], namespace: 'ns-2', attributes: { attr: 'decrypted-attr-id-4' } }, + ]); +}); + +it('iterates until number of returned results less than batch size', async () => { + const savedObjectsBatch0 = [ + getMockSavedObject({ id: 'id-1', type: 'type-id-1' }), + getMockSavedObject({ id: 'id-2', type: 'type-id-1' }), + getMockSavedObject({ id: 'id-3', type: 'type-id-1' }), + ]; + + const savedObjectsBatch1 = [ + getMockSavedObject({ id: 'id-4', type: 'type-id-1' }), + getMockSavedObject({ id: 'id-5', type: 'type-id-1' }), + ]; + + // During first request we had 100 objects in total. + mockRetrieveClient.find.mockResolvedValueOnce({ + total: 100, + saved_objects: savedObjectsBatch0, + per_page: 3, + page: 0, + }); + mockUpdateClient.bulkUpdate.mockResolvedValueOnce({ + saved_objects: [ + { ...savedObjectsBatch0[0], attributes: {} }, + { ...savedObjectsBatch0[1], attributes: {} }, + { ...savedObjectsBatch0[2], attributes: {} }, + ], + }); + + // But when we fetch data for the second time we have just two objects left (e.g. they were removed). + mockRetrieveClient.find.mockResolvedValueOnce({ + total: 2, + saved_objects: savedObjectsBatch1, + per_page: 2, + page: 0, + }); + mockUpdateClient.bulkUpdate.mockResolvedValueOnce({ + saved_objects: [ + { ...savedObjectsBatch1[0], attributes: {} }, + { ...savedObjectsBatch1[1], attributes: {} }, + ], + }); + + await expect( + service.rotate(httpServerMock.createKibanaRequest(), { batchSize: 3 }) + ).resolves.toEqual({ + total: 100, + successful: 5, + failed: 0, + }); + + expect(mockRetrieveClient.find).toHaveBeenCalledTimes(2); + expect(mockEncryptionService.decryptAttributes).toHaveBeenCalledTimes(5); + expect(mockUpdateClient.bulkUpdate).toHaveBeenCalledTimes(2); + expect(mockUpdateClient.bulkUpdate).toHaveBeenCalledWith([ + { ...savedObjectsBatch0[0], attributes: { attr: 'decrypted-attr-id-1' } }, + { ...savedObjectsBatch0[1], attributes: { attr: 'decrypted-attr-id-2' } }, + { ...savedObjectsBatch0[2], attributes: { attr: 'decrypted-attr-id-3' } }, + ]); + expect(mockUpdateClient.bulkUpdate).toHaveBeenCalledWith([ + { ...savedObjectsBatch1[0], attributes: { attr: 'decrypted-attr-id-4' } }, + { ...savedObjectsBatch1[1], attributes: { attr: 'decrypted-attr-id-5' } }, + ]); +}); + +it('iterates until no new objects are returned', async () => { + const savedObjectBatches = [ + [ + getMockSavedObject({ id: 'id-1', type: 'type-id-1' }), + getMockSavedObject({ id: 'id-2', type: 'type-id-1' }), + getMockSavedObject({ id: 'id-3', type: 'type-id-1' }), + ], + [ + getMockSavedObject({ id: 'id-4', type: 'type-id-1' }), + getMockSavedObject({ id: 'id-5', type: 'type-id-1' }), + getMockSavedObject({ id: 'id-6', type: 'type-id-1' }), + ], + [ + getMockSavedObject({ id: 'id-7', type: 'type-id-1' }), + getMockSavedObject({ id: 'id-8', type: 'type-id-1' }), + getMockSavedObject({ id: 'id-9', type: 'type-id-1' }), + ], + [ + getMockSavedObject({ id: 'id-1', type: 'type-id-1' }), + getMockSavedObject({ id: 'id-2', type: 'type-id-1' }), + getMockSavedObject({ id: 'id-3', type: 'type-id-1' }), + ], + ]; + + for (const batch of savedObjectBatches) { + mockRetrieveClient.find.mockResolvedValueOnce({ + total: 100, + saved_objects: batch, + per_page: 3, + page: 0, + }); + mockUpdateClient.bulkUpdate.mockResolvedValueOnce({ + saved_objects: batch.map((object) => ({ ...object, attributes: {} })), + }); + } + + await expect( + service.rotate(httpServerMock.createKibanaRequest(), { batchSize: 3 }) + ).resolves.toEqual({ + total: 100, + successful: 9, + failed: 0, + }); + + expect(mockRetrieveClient.find).toHaveBeenCalledTimes(4); + // We don't decrypt\update same object twice, so neither object from the last batch is decrypted or updated. + expect(mockEncryptionService.decryptAttributes).toHaveBeenCalledTimes(9); + expect(mockUpdateClient.bulkUpdate).toHaveBeenCalledTimes(3); + for (const batch of savedObjectBatches.slice(0, 3)) { + expect(mockUpdateClient.bulkUpdate).toHaveBeenCalledWith( + batch.map((object) => ({ + ...object, + attributes: { attr: `decrypted-${object.attributes.attr}` }, + })) + ); + } +}); + +it('iterates until max number of batches is reached', async () => { + // Simulate the scenario when we're getting more records then was indicated by the `total` field + // returned with the first batch, and every such batch includes documents we haven't processed yet. + const savedObjectBatches = [ + [ + getMockSavedObject({ id: 'id-1', type: 'type-id-1' }), + getMockSavedObject({ id: 'id-2', type: 'type-id-1' }), + ], + [ + getMockSavedObject({ id: 'id-3', type: 'type-id-1' }), + getMockSavedObject({ id: 'id-4', type: 'type-id-1' }), + ], + [ + getMockSavedObject({ id: 'id-5', type: 'type-id-1' }), + getMockSavedObject({ id: 'id-6', type: 'type-id-1' }), + ], + [ + getMockSavedObject({ id: 'id-7', type: 'type-id-1' }), + getMockSavedObject({ id: 'id-8', type: 'type-id-1' }), + ], + ]; + + for (const batch of savedObjectBatches) { + mockRetrieveClient.find.mockResolvedValueOnce({ + total: 3, + saved_objects: batch, + per_page: 2, + page: 0, + }); + mockUpdateClient.bulkUpdate.mockResolvedValueOnce({ + saved_objects: batch.map((object) => ({ ...object, attributes: {} })), + }); + } + + await expect( + service.rotate(httpServerMock.createKibanaRequest(), { batchSize: 2 }) + ).resolves.toEqual({ + total: 3, + successful: 6, + failed: 0, + }); + + expect(mockRetrieveClient.find).toHaveBeenCalledTimes(3); + expect(mockEncryptionService.decryptAttributes).toHaveBeenCalledTimes(6); + expect(mockUpdateClient.bulkUpdate).toHaveBeenCalledTimes(3); + for (const batch of savedObjectBatches.slice(0, 3)) { + expect(mockUpdateClient.bulkUpdate).toHaveBeenCalledWith( + batch.map((object) => ({ + ...object, + attributes: { attr: `decrypted-${object.attributes.attr}` }, + })) + ); + } +}); diff --git a/x-pack/plugins/encrypted_saved_objects/server/crypto/encryption_key_rotation_service.ts b/x-pack/plugins/encrypted_saved_objects/server/crypto/encryption_key_rotation_service.ts new file mode 100644 index 0000000000000..fb1b6db45e762 --- /dev/null +++ b/x-pack/plugins/encrypted_saved_objects/server/crypto/encryption_key_rotation_service.ts @@ -0,0 +1,268 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { + ISavedObjectTypeRegistry, + KibanaRequest, + Logger, + SavedObject, + SavedObjectsBulkUpdateObject, + StartServicesAccessor, +} from 'src/core/server'; +import { AuthenticatedUser, SecurityPluginSetup } from '../../../security/server'; +import { getDescriptorNamespace } from '../saved_objects/get_descriptor_namespace'; +import { EncryptedSavedObjectsService } from './encrypted_saved_objects_service'; +import { EncryptionError } from './encryption_error'; + +interface EncryptionKeyRotationServiceOptions { + logger: Logger; + service: PublicMethodsOf; + getStartServices: StartServicesAccessor; + security?: SecurityPluginSetup; +} + +interface EncryptionKeyRotationParams { + /** + * The maximum number of the objects we fetch and process in one iteration. + */ + batchSize: number; + + /** + * Optionally allows to limit key rotation to only specified Saved Object type. + */ + type?: string; +} + +interface EncryptionKeyRotationResult { + /** + * The total number of the Saved Objects encrypted by the Encrypted Saved Objects plugin. + */ + total: number; + + /** + * The number of the Saved Objects that were still encrypted with one of the secondary encryption + * keys and were successfully re-encrypted with the primary key. + */ + successful: number; + + /** + * The number of the Saved Objects that were still encrypted with one of the secondary encryption + * keys that we failed to re-encrypt with the primary key. + */ + failed: number; +} + +/** + * Service that deals with encryption key rotation matters. + */ +export class EncryptionKeyRotationService { + constructor(private readonly options: EncryptionKeyRotationServiceOptions) {} + + public async rotate( + request: KibanaRequest, + { batchSize, type }: EncryptionKeyRotationParams + ): Promise { + const [{ savedObjects }] = await this.options.getStartServices(); + const typeRegistry = savedObjects.getTypeRegistry(); + + // We need to retrieve all SavedObject types which have encrypted attributes, specifically + // collecting those that are hidden as they are ignored by the Saved Objects client by default. + this.options.logger.debug('Retrieving Saved Object types that require encryption.'); + const registeredSavedObjectTypes = []; + const registeredHiddenSavedObjectTypes = []; + for (const knownType of typeRegistry.getAllTypes()) { + if (this.options.service.isRegistered(knownType.name) && (!type || knownType.name === type)) { + registeredSavedObjectTypes.push(knownType.name); + + if (knownType.hidden) { + registeredHiddenSavedObjectTypes.push(knownType.name); + } + } + } + + const result = { total: 0, successful: 0, failed: 0 }; + if (registeredSavedObjectTypes.length === 0) { + this.options.logger.info( + type + ? `Saved Object type "${type}" is not registered, encryption key rotation is not needed.` + : 'There are no registered Saved Object types that can have encrypted attributes, encryption key rotation is not needed.' + ); + return result; + } + + this.options.logger.info( + `Saved Objects with the following types [${registeredSavedObjectTypes}] will be processed.` + ); + + // We need two separate Saved Objects clients for the retrieval and update. For retrieval we + // don't want to have Encrypted Saved Objects wrapper so that it doesn't strip encrypted + // attributes. But for the update we want to have it so that it automatically re-encrypts + // attributes with the new primary encryption key. + const user = this.options.security?.authc.getCurrentUser(request) ?? undefined; + const retrieveClient = savedObjects.getScopedClient(request, { + includedHiddenTypes: registeredHiddenSavedObjectTypes, + excludedWrappers: ['encryptedSavedObjects'], + }); + const updateClient = savedObjects.getScopedClient(request, { + includedHiddenTypes: registeredHiddenSavedObjectTypes, + }); + + // Keeps track of object IDs that have been processed already. + const processedObjectIDs = new Set(); + + // Until we get scroll/search_after support in Saved Objects client we have to retrieve as much objects as allowed + // by the `batchSize` parameter. Instead of using paging functionality (size/from or page/perPage parameters) that + // has certain performance issues and is also limited by the maximum result window setting on .kibana index + // (10,000 by default) we always fetch the first page of the results sorted by the `updated_at` field. This way we + // can prioritize "old" objects that have a higher chance to have been encrypted with the old encryption keys, since + // all newly created or updated objects are always encrypted with the current primary key. Re-encryption of the + // "old" objects with the primary key implicitly bumps up their `updated_at` field so that these objects won't be + // included into the first page of the results during next iteration. Additionally we track IDs of all processed + // objects so that eventually we can detect that first page consists of only objects encrypted with the current + // primary key and stop iterating. + // + // LIMITATION: if we have a lot of "old" objects encrypted with the _unknown_ encryption key it may either + // significantly slow down rotation or prevent it from happening completely since such objects will be included into + // every batch we fetch and if their number is equal to or greater than `batchSize` we won't be able to process any + // object. Another and more complex case when we can be hit by this limitation is when users have multiple Kibana + // instances configured with different primary encryption keys, these time even "new" objects may require rotation, + // but they may be included into 2+ page of the results. We can potentially detect such cases and issue a warning, + // but it's not an easy task: if we detect a case when none of the objects from the very first batch cannot be + // decrypted with the decryption only keys we'll need to check how many of them can be decrypted at all using all + // available keys including the current primary one. + // + // Also theoretically if `batchSize` is less than `index.max_result_window` we could try to rely on the paging + // functionality and switch to the second page, but the issue here is that objects can be deleted in the meantime + // so that unprocessed objects may get into the first page and we'll miss them. We can of course oscillate between + // the first and the second pages or do multiple rotation passes, but it'd complicate code significantly. + let batch = 0; + let maxBatches = 0; + while (true) { + this.options.logger.debug(`Fetching ${batchSize} objects (batch #${batch}).`); + const savedObjectsToDecrypt = await retrieveClient.find({ + type: registeredSavedObjectTypes, + perPage: batchSize, + namespaces: ['*'], + sortField: 'updated_at', + sortOrder: 'asc', + }); + + // We use `total` only from the first batch just as an approximate indicator for the consumer since total number + // can change from batch to batch, but it won't affect the actual processing logic. + if (batch === 0) { + this.options.logger.debug(`Found ${savedObjectsToDecrypt.total} objects.`); + result.total = savedObjectsToDecrypt.total; + // Since we process live data there is a theoretical chance that we may be getting new + // objects in every batch effectively making this loop infinite. To prevent this we want to + // limit a number of batches we process during single rotation request giving enough room + // for the Saved Objects occasionally created during rotation. + maxBatches = Math.ceil((savedObjectsToDecrypt.total * 2) / batchSize); + } + + this.options.logger.debug( + `Decrypting ${savedObjectsToDecrypt.saved_objects.length} objects (batch #${batch}).` + ); + const savedObjectsToEncrypt = await this.getSavedObjectsToReEncrypt( + savedObjectsToDecrypt.saved_objects, + processedObjectIDs, + typeRegistry, + user + ); + if (savedObjectsToEncrypt.length === 0) { + break; + } + + this.options.logger.debug( + `Re-encrypting ${savedObjectsToEncrypt.length} objects (batch #${batch}).` + ); + try { + const succeeded = ( + await updateClient.bulkUpdate(savedObjectsToEncrypt) + ).saved_objects.filter((savedObject) => !savedObject.error).length; + + this.options.logger.debug( + `Successfully re-encrypted ${succeeded} out of ${savedObjectsToEncrypt.length} objects (batch #${batch}).` + ); + + result.successful += succeeded; + result.failed += savedObjectsToEncrypt.length - succeeded; + } catch (err) { + this.options.logger.error( + `Failed to re-encrypt saved objects (batch #${batch}): ${err.message}` + ); + result.failed += savedObjectsToEncrypt.length; + } + + if (savedObjectsToDecrypt.total <= batchSize || ++batch >= maxBatches) { + break; + } + } + + this.options.logger.info( + `Encryption key rotation is completed. ${result.successful} objects out ouf ${result.total} were successfully re-encrypted with the primary encryption key and ${result.failed} objects failed.` + ); + + return result; + } + + /** + * Takes a list of Saved Objects and tries to decrypt their attributes with the secondary encryption + * keys, silently skipping those that cannot be decrypted. The objects that were decrypted with the + * decryption-only keys will be returned and grouped by the namespace. + * @param savedObjects Saved Objects to decrypt attributes for. + * @param processedObjectIDs Set of Saved Object IDs that were already processed. + * @param typeRegistry Saved Objects type registry. + * @param user The user that initiated decryption. + */ + private async getSavedObjectsToReEncrypt( + savedObjects: SavedObject[], + processedObjectIDs: Set, + typeRegistry: ISavedObjectTypeRegistry, + user?: AuthenticatedUser + ) { + const decryptedSavedObjects: SavedObjectsBulkUpdateObject[] = []; + for (const savedObject of savedObjects) { + // We shouldn't process objects that we already processed during previous iterations. + if (processedObjectIDs.has(savedObject.id)) { + continue; + } else { + processedObjectIDs.add(savedObject.id); + } + + let decryptedAttributes; + try { + decryptedAttributes = await this.options.service.decryptAttributes( + { + type: savedObject.type, + id: savedObject.id, + namespace: getDescriptorNamespace( + typeRegistry, + savedObject.type, + savedObject.namespaces + ), + }, + savedObject.attributes as Record, + { omitPrimaryEncryptionKey: true, user } + ); + } catch (err) { + if (!(err instanceof EncryptionError)) { + throw err; + } + + continue; + } + + decryptedSavedObjects.push({ + ...savedObject, + attributes: decryptedAttributes, + // `bulkUpdate` expects objects with a single `namespace`. + namespace: savedObject.namespaces?.[0], + }); + } + + return decryptedSavedObjects; + } +} diff --git a/x-pack/plugins/encrypted_saved_objects/server/crypto/index.mock.ts b/x-pack/plugins/encrypted_saved_objects/server/crypto/index.mock.ts index 3e4983deca625..4410cbac7beb9 100644 --- a/x-pack/plugins/encrypted_saved_objects/server/crypto/index.mock.ts +++ b/x-pack/plugins/encrypted_saved_objects/server/crypto/index.mock.ts @@ -5,3 +5,4 @@ */ export { encryptedSavedObjectsServiceMock } from './encrypted_saved_objects_service.mocks'; +export { encryptionKeyRotationServiceMock } from './encryption_key_rotation_service.mocks'; diff --git a/x-pack/plugins/encrypted_saved_objects/server/crypto/index.ts b/x-pack/plugins/encrypted_saved_objects/server/crypto/index.ts index 75445bd24eba8..ff5e5fdc01059 100644 --- a/x-pack/plugins/encrypted_saved_objects/server/crypto/index.ts +++ b/x-pack/plugins/encrypted_saved_objects/server/crypto/index.ts @@ -12,3 +12,4 @@ export { } from './encrypted_saved_objects_service'; export { EncryptionError } from './encryption_error'; export { EncryptedSavedObjectAttributesDefinition } from './encrypted_saved_object_type_definition'; +export { EncryptionKeyRotationService } from './encryption_key_rotation_service'; diff --git a/x-pack/plugins/encrypted_saved_objects/server/plugin.test.ts b/x-pack/plugins/encrypted_saved_objects/server/plugin.test.ts index 57108954f2568..8d8f1a51f6802 100644 --- a/x-pack/plugins/encrypted_saved_objects/server/plugin.test.ts +++ b/x-pack/plugins/encrypted_saved_objects/server/plugin.test.ts @@ -5,6 +5,7 @@ */ import { Plugin } from './plugin'; +import { ConfigSchema } from './config'; import { coreMock } from 'src/core/server/mocks'; import { securityMock } from '../../security/server/mocks'; @@ -12,7 +13,9 @@ import { securityMock } from '../../security/server/mocks'; describe('EncryptedSavedObjects Plugin', () => { describe('setup()', () => { it('exposes proper contract', async () => { - const plugin = new Plugin(coreMock.createPluginInitializerContext()); + const plugin = new Plugin( + coreMock.createPluginInitializerContext(ConfigSchema.validate({}, { dist: true })) + ); await expect(plugin.setup(coreMock.createSetup(), { security: securityMock.createSetup() })) .resolves.toMatchInlineSnapshot(` Object { @@ -26,7 +29,9 @@ describe('EncryptedSavedObjects Plugin', () => { describe('start()', () => { it('exposes proper contract', async () => { - const plugin = new Plugin(coreMock.createPluginInitializerContext()); + const plugin = new Plugin( + coreMock.createPluginInitializerContext(ConfigSchema.validate({}, { dist: true })) + ); await plugin.setup(coreMock.createSetup(), { security: securityMock.createSetup() }); const startContract = plugin.start(); diff --git a/x-pack/plugins/encrypted_saved_objects/server/plugin.ts b/x-pack/plugins/encrypted_saved_objects/server/plugin.ts index 69777798ddf19..6e3724fa3fe58 100644 --- a/x-pack/plugins/encrypted_saved_objects/server/plugin.ts +++ b/x-pack/plugins/encrypted_saved_objects/server/plugin.ts @@ -4,19 +4,22 @@ * you may not use this file except in compliance with the Elastic License. */ +import { first, map } from 'rxjs/operators'; import nodeCrypto from '@elastic/node-crypto'; import { Logger, PluginInitializerContext, CoreSetup } from 'src/core/server'; -import { first } from 'rxjs/operators'; +import { TypeOf } from '@kbn/config-schema'; import { SecurityPluginSetup } from '../../security/server'; -import { createConfig$ } from './config'; +import { createConfig, ConfigSchema } from './config'; import { EncryptedSavedObjectsService, EncryptedSavedObjectTypeRegistration, EncryptionError, + EncryptionKeyRotationService, } from './crypto'; import { EncryptedSavedObjectsAuditLogger } from './audit'; import { setupSavedObjects, ClientInstanciator } from './saved_objects'; import { getCreateMigration, CreateEncryptedSavedObjectsMigrationFn } from './create_migration'; +import { defineRoutes } from './routes'; export interface PluginsSetup { security?: SecurityPluginSetup; @@ -48,18 +51,29 @@ export class Plugin { core: CoreSetup, deps: PluginsSetup ): Promise { - const { - config: { encryptionKey }, - usingEphemeralEncryptionKey, - } = await createConfig$(this.initializerContext).pipe(first()).toPromise(); - - const crypto = nodeCrypto({ encryptionKey }); - + const config = await this.initializerContext.config + .create>() + .pipe( + map((rawConfig) => createConfig(rawConfig, this.initializerContext.logger.get('config'))) + ) + .pipe(first()) + .toPromise(); const auditLogger = new EncryptedSavedObjectsAuditLogger( deps.security?.audit.getLogger('encryptedSavedObjects') ); + + const primaryCrypto = nodeCrypto({ encryptionKey: config.encryptionKey }); + const decryptionOnlyCryptos = config.keyRotation.decryptionOnlyKeys.map((decryptionKey) => + nodeCrypto({ encryptionKey: decryptionKey }) + ); + const service = Object.freeze( - new EncryptedSavedObjectsService(crypto, this.logger, auditLogger) + new EncryptedSavedObjectsService({ + primaryCrypto, + decryptionOnlyCryptos, + logger: this.logger, + audit: auditLogger, + }) ); this.savedObjectsSetup = setupSavedObjects({ @@ -69,18 +83,33 @@ export class Plugin { getStartServices: core.getStartServices, }); + defineRoutes({ + router: core.http.createRouter(), + logger: this.initializerContext.logger.get('routes'), + encryptionKeyRotationService: Object.freeze( + new EncryptionKeyRotationService({ + logger: this.logger.get('key-rotation-service'), + service, + getStartServices: core.getStartServices, + security: deps.security, + }) + ), + config, + }); + return { registerType: (typeRegistration: EncryptedSavedObjectTypeRegistration) => service.registerType(typeRegistration), - usingEphemeralEncryptionKey, + usingEphemeralEncryptionKey: config.usingEphemeralEncryptionKey, createMigration: getCreateMigration( service, (typeRegistration: EncryptedSavedObjectTypeRegistration) => { - const serviceForMigration = new EncryptedSavedObjectsService( - crypto, - this.logger, - auditLogger - ); + const serviceForMigration = new EncryptedSavedObjectsService({ + primaryCrypto, + decryptionOnlyCryptos, + logger: this.logger, + audit: auditLogger, + }); serviceForMigration.registerType(typeRegistration); return serviceForMigration; } diff --git a/x-pack/plugins/encrypted_saved_objects/server/routes/index.mock.ts b/x-pack/plugins/encrypted_saved_objects/server/routes/index.mock.ts new file mode 100644 index 0000000000000..b3d54c7f1ecac --- /dev/null +++ b/x-pack/plugins/encrypted_saved_objects/server/routes/index.mock.ts @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { ConfigSchema, createConfig } from '../config'; + +import { httpServiceMock, loggingSystemMock } from '../../../../../src/core/server/mocks'; +import { encryptionKeyRotationServiceMock } from '../crypto/index.mock'; + +export const routeDefinitionParamsMock = { + create: (config: Record = {}) => ({ + router: httpServiceMock.createRouter(), + logger: loggingSystemMock.create().get(), + config: createConfig(ConfigSchema.validate(config), loggingSystemMock.create().get()), + encryptionKeyRotationService: encryptionKeyRotationServiceMock.create(), + }), +}; diff --git a/x-pack/plugins/encrypted_saved_objects/server/routes/index.ts b/x-pack/plugins/encrypted_saved_objects/server/routes/index.ts new file mode 100644 index 0000000000000..72af8060de827 --- /dev/null +++ b/x-pack/plugins/encrypted_saved_objects/server/routes/index.ts @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { IRouter, Logger } from '../../../../../src/core/server'; +import { ConfigType } from '../config'; +import { EncryptionKeyRotationService } from '../crypto'; + +import { defineKeyRotationRoutes } from './key_rotation'; + +/** + * Describes parameters used to define HTTP routes. + */ +export interface RouteDefinitionParams { + router: IRouter; + logger: Logger; + config: ConfigType; + encryptionKeyRotationService: PublicMethodsOf; +} + +export function defineRoutes(params: RouteDefinitionParams) { + defineKeyRotationRoutes(params); +} diff --git a/x-pack/plugins/encrypted_saved_objects/server/routes/key_rotation.test.ts b/x-pack/plugins/encrypted_saved_objects/server/routes/key_rotation.test.ts new file mode 100644 index 0000000000000..ced4dda48fcd2 --- /dev/null +++ b/x-pack/plugins/encrypted_saved_objects/server/routes/key_rotation.test.ts @@ -0,0 +1,172 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { Type } from '@kbn/config-schema'; +import { + IRouter, + kibanaResponseFactory, + RequestHandler, + RequestHandlerContext, + RouteConfig, +} from '../../../../../src/core/server'; +import { defineKeyRotationRoutes } from './key_rotation'; + +import { httpServerMock } from '../../../../../src/core/server/mocks'; +import { routeDefinitionParamsMock } from './index.mock'; +import { EncryptionKeyRotationService } from '../crypto'; + +describe('Key rotation routes', () => { + let router: jest.Mocked; + let mockContext: RequestHandlerContext; + let mockEncryptionKeyRotationService: jest.Mocked; + beforeEach(() => { + const routeParamsMock = routeDefinitionParamsMock.create({ + keyRotation: { decryptionOnlyKeys: ['b'.repeat(32)] }, + }); + router = routeParamsMock.router; + mockEncryptionKeyRotationService = routeParamsMock.encryptionKeyRotationService; + + mockContext = ({} as unknown) as RequestHandlerContext; + + defineKeyRotationRoutes(routeParamsMock); + }); + + describe('rotate', () => { + let routeHandler: RequestHandler; + let routeConfig: RouteConfig; + beforeEach(() => { + const [rotateRouteConfig, rotateRouteHandler] = router.post.mock.calls.find( + ([{ path }]) => path === '/api/encrypted_saved_objects/_rotate_key' + )!; + + routeConfig = rotateRouteConfig; + routeHandler = rotateRouteHandler; + }); + + it('correctly defines route.', () => { + expect(routeConfig.options).toEqual({ tags: ['access:rotateEncryptionKey'] }); + expect(routeConfig.validate).toEqual({ + body: undefined, + query: expect.any(Type), + params: undefined, + }); + + const queryValidator = (routeConfig.validate as any).query as Type; + expect( + queryValidator.validate({ + batchSize: 100, + type: 'some-type', + }) + ).toEqual({ + batchSize: 100, + type: 'some-type', + }); + expect(queryValidator.validate({ batchSize: 1 })).toEqual({ batchSize: 1 }); + expect(queryValidator.validate({ batchSize: 10000 })).toEqual({ batchSize: 10000 }); + expect(queryValidator.validate({})).toEqual({ batchSize: 10000 }); + + expect(() => queryValidator.validate({ batchSize: 0 })).toThrowErrorMatchingInlineSnapshot( + `"[batchSize]: Value must be equal to or greater than [1]."` + ); + expect(() => + queryValidator.validate({ batchSize: 10001 }) + ).toThrowErrorMatchingInlineSnapshot( + `"[batchSize]: Value must be equal to or lower than [10000]."` + ); + + expect(() => queryValidator.validate({ type: 100 })).toThrowErrorMatchingInlineSnapshot( + `"[type]: expected value of type [string] but got [number]"` + ); + }); + + it('returns 400 if decryption only keys are not specified.', async () => { + const routeParamsMock = routeDefinitionParamsMock.create(); + defineKeyRotationRoutes(routeParamsMock); + const [, rotateRouteHandler] = routeParamsMock.router.post.mock.calls.find( + ([{ path }]) => path === '/api/encrypted_saved_objects/_rotate_key' + )!; + + await expect( + rotateRouteHandler(mockContext, httpServerMock.createKibanaRequest(), kibanaResponseFactory) + ).resolves.toEqual({ + status: 400, + payload: + 'Kibana is not configured to support encryption key rotation. Update `kibana.yml` to include `xpack.encryptedSavedObjects.keyRotation.decryptionOnlyKeys` to rotate your encryption keys.', + options: { + body: + 'Kibana is not configured to support encryption key rotation. Update `kibana.yml` to include `xpack.encryptedSavedObjects.keyRotation.decryptionOnlyKeys` to rotate your encryption keys.', + }, + }); + }); + + it('returns 500 if `rotate` throws unhandled exception.', async () => { + const unhandledException = new Error('Something went wrong.'); + mockEncryptionKeyRotationService.rotate.mockRejectedValue(unhandledException); + + const mockRequest = httpServerMock.createKibanaRequest({ query: { batchSize: 1234 } }); + const response = await routeHandler(mockContext, mockRequest, kibanaResponseFactory); + + expect(response.status).toBe(500); + expect(response.payload).toEqual(unhandledException); + expect(mockEncryptionKeyRotationService.rotate).toHaveBeenCalledWith(mockRequest, { + batchSize: 1234, + }); + }); + + it('returns whatever `rotate` returns.', async () => { + const mockRequest = httpServerMock.createKibanaRequest({ query: { batchSize: 1234 } }); + mockEncryptionKeyRotationService.rotate.mockResolvedValue({ + total: 3, + successful: 6, + failed: 0, + }); + + await expect(routeHandler(mockContext, mockRequest, kibanaResponseFactory)).resolves.toEqual({ + status: 200, + payload: { total: 3, successful: 6, failed: 0 }, + options: { body: { total: 3, successful: 6, failed: 0 } }, + }); + }); + + it('returns 429 if called while rotation is in progress.', async () => { + const mockRequest = httpServerMock.createKibanaRequest({ query: { batchSize: 1234 } }); + mockEncryptionKeyRotationService.rotate.mockResolvedValue({ + total: 3, + successful: 6, + failed: 0, + }); + + // Run rotation, but don't wait until it's complete. + const firstRequestPromise = routeHandler(mockContext, mockRequest, kibanaResponseFactory); + + // Try to run rotation once again. + await expect(routeHandler(mockContext, mockRequest, kibanaResponseFactory)).resolves.toEqual({ + status: 429, + payload: + 'Encryption key rotation is in progress already. Please wait until it is completed and try again.', + options: { + statusCode: 429, + body: + 'Encryption key rotation is in progress already. Please wait until it is completed and try again.', + }, + }); + + // Initial request properly resolves. + await expect(firstRequestPromise).resolves.toEqual({ + status: 200, + payload: { total: 3, successful: 6, failed: 0 }, + options: { body: { total: 3, successful: 6, failed: 0 } }, + }); + + // And subsequent requests resolve properly too. + await expect(routeHandler(mockContext, mockRequest, kibanaResponseFactory)).resolves.toEqual({ + status: 200, + payload: { total: 3, successful: 6, failed: 0 }, + options: { body: { total: 3, successful: 6, failed: 0 } }, + }); + }); + }); +}); diff --git a/x-pack/plugins/encrypted_saved_objects/server/routes/key_rotation.ts b/x-pack/plugins/encrypted_saved_objects/server/routes/key_rotation.ts new file mode 100644 index 0000000000000..48b29387106ee --- /dev/null +++ b/x-pack/plugins/encrypted_saved_objects/server/routes/key_rotation.ts @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { schema } from '@kbn/config-schema'; +import { RouteDefinitionParams } from '.'; + +/** + * The default maximum value of from + size for searches to .kibana index. Since we cannot use scroll + * or search_after functionality with the .kibana index we limit maximum batch size with this value. + */ +const DEFAULT_MAX_RESULT_WINDOW = 10000; + +/** + * Defines routes that are used for encryption key rotation. + */ +export function defineKeyRotationRoutes({ + encryptionKeyRotationService, + router, + logger, + config, +}: RouteDefinitionParams) { + let rotationInProgress = false; + router.post( + { + path: '/api/encrypted_saved_objects/_rotate_key', + validate: { + query: schema.object({ + batchSize: schema.number({ + min: 1, + max: DEFAULT_MAX_RESULT_WINDOW, + defaultValue: DEFAULT_MAX_RESULT_WINDOW, + }), + type: schema.maybe(schema.string()), + }), + }, + options: { + tags: ['access:rotateEncryptionKey'], + }, + }, + async (context, request, response) => { + if (config.keyRotation.decryptionOnlyKeys.length === 0) { + return response.badRequest({ + body: + 'Kibana is not configured to support encryption key rotation. Update `kibana.yml` to include `xpack.encryptedSavedObjects.keyRotation.decryptionOnlyKeys` to rotate your encryption keys.', + }); + } + + if (rotationInProgress) { + return response.customError({ + body: + 'Encryption key rotation is in progress already. Please wait until it is completed and try again.', + statusCode: 429, + }); + } + + rotationInProgress = true; + try { + return response.ok({ + body: await encryptionKeyRotationService.rotate(request, { + batchSize: request.query.batchSize, + type: request.query.type, + }), + }); + } catch (err) { + logger.error(err); + return response.customError({ body: err, statusCode: 500 }); + } finally { + rotationInProgress = false; + } + } + ); +} diff --git a/x-pack/plugins/encrypted_saved_objects/server/saved_objects/get_descriptor_namespace.test.ts b/x-pack/plugins/encrypted_saved_objects/server/saved_objects/get_descriptor_namespace.test.ts index 7ba90a5a76ab3..33ea1d8c3acec 100644 --- a/x-pack/plugins/encrypted_saved_objects/server/saved_objects/get_descriptor_namespace.test.ts +++ b/x-pack/plugins/encrypted_saved_objects/server/saved_objects/get_descriptor_namespace.test.ts @@ -66,5 +66,16 @@ describe('getDescriptorNamespace', () => { 'foo-namespace' ); }); + + it('returns the provided namespace if it is in array format', () => { + const mockBaseTypeRegistry = savedObjectsTypeRegistryMock.create(); + mockBaseTypeRegistry.isSingleNamespace.mockReturnValue(true); + mockBaseTypeRegistry.isMultiNamespace.mockReturnValue(false); + mockBaseTypeRegistry.isNamespaceAgnostic.mockReturnValue(false); + + expect(getDescriptorNamespace(mockBaseTypeRegistry, 'singletype', ['foo-namespace'])).toEqual( + 'foo-namespace' + ); + }); }); }); diff --git a/x-pack/plugins/encrypted_saved_objects/server/saved_objects/get_descriptor_namespace.ts b/x-pack/plugins/encrypted_saved_objects/server/saved_objects/get_descriptor_namespace.ts index 7201f13fb930b..7c237b82cbb15 100644 --- a/x-pack/plugins/encrypted_saved_objects/server/saved_objects/get_descriptor_namespace.ts +++ b/x-pack/plugins/encrypted_saved_objects/server/saved_objects/get_descriptor_namespace.ts @@ -9,9 +9,13 @@ import { ISavedObjectTypeRegistry, SavedObjectsUtils } from '../../../../../src/ export const getDescriptorNamespace = ( typeRegistry: ISavedObjectTypeRegistry, type: string, - namespace?: string + namespace?: string | string[] ) => { - const descriptorNamespace = typeRegistry.isSingleNamespace(type) ? namespace : undefined; + const descriptorNamespace = typeRegistry.isSingleNamespace(type) + ? Array.isArray(namespace) + ? namespace[0] + : namespace + : undefined; return normalizeNamespace(descriptorNamespace); }; diff --git a/x-pack/plugins/infra/public/alerting/common/components/get_alert_preview.ts b/x-pack/plugins/infra/public/alerting/common/components/get_alert_preview.ts index 207d8a722a8c6..ea50ea6f11f3a 100644 --- a/x-pack/plugins/infra/public/alerting/common/components/get_alert_preview.ts +++ b/x-pack/plugins/infra/public/alerting/common/components/get_alert_preview.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { HttpSetup } from 'src/core/public'; +import type { HttpHandler } from 'src/core/public'; import { INFRA_ALERT_PREVIEW_PATH, METRIC_THRESHOLD_ALERT_TYPE_ID, @@ -22,7 +22,7 @@ export async function getAlertPreview({ params, alertType, }: { - fetch: HttpSetup['fetch']; + fetch: HttpHandler; params: AlertPreviewRequestParams; alertType: PreviewableAlertTypes; }): Promise { diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_cleanup.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_cleanup.ts index 6fa2ac175ace6..4fdd6bdd282ba 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_cleanup.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_cleanup.ts @@ -5,21 +5,25 @@ */ import * as rt from 'io-ts'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getDatafeedId, getJobId } from '../../../../../common/log_analysis'; -import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; + +interface DeleteJobsRequestArgs { + spaceId: string; + sourceId: string; + jobTypes: JobType[]; +} export const callDeleteJobs = async ( - spaceId: string, - sourceId: string, - jobTypes: JobType[] + requestArgs: DeleteJobsRequestArgs, + fetch: HttpHandler ) => { + const { spaceId, sourceId, jobTypes } = requestArgs; + // NOTE: Deleting the jobs via this API will delete the datafeeds at the same time - const deleteJobsResponse = await npStart.http.fetch('/api/ml/jobs/delete_jobs', { + const deleteJobsResponse = await fetch('/api/ml/jobs/delete_jobs', { method: 'POST', body: JSON.stringify( deleteJobsRequestPayloadRT.encode({ @@ -28,28 +32,29 @@ export const callDeleteJobs = async ( ), }); - return pipe( - deleteJobsResponsePayloadRT.decode(deleteJobsResponse), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(deleteJobsResponsePayloadRT)(deleteJobsResponse); }; -export const callGetJobDeletionTasks = async () => { - const jobDeletionTasksResponse = await npStart.http.fetch('/api/ml/jobs/deleting_jobs_tasks'); +export const callGetJobDeletionTasks = async (fetch: HttpHandler) => { + const jobDeletionTasksResponse = await fetch('/api/ml/jobs/deleting_jobs_tasks'); - return pipe( - getJobDeletionTasksResponsePayloadRT.decode(jobDeletionTasksResponse), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getJobDeletionTasksResponsePayloadRT)(jobDeletionTasksResponse); }; +interface StopDatafeedsRequestArgs { + spaceId: string; + sourceId: string; + jobTypes: JobType[]; +} + export const callStopDatafeeds = async ( - spaceId: string, - sourceId: string, - jobTypes: JobType[] + requestArgs: StopDatafeedsRequestArgs, + fetch: HttpHandler ) => { + const { spaceId, sourceId, jobTypes } = requestArgs; + // Stop datafeed due to https://github.com/elastic/kibana/issues/44652 - const stopDatafeedResponse = await npStart.http.fetch('/api/ml/jobs/stop_datafeeds', { + const stopDatafeedResponse = await fetch('/api/ml/jobs/stop_datafeeds', { method: 'POST', body: JSON.stringify( stopDatafeedsRequestPayloadRT.encode({ @@ -58,10 +63,7 @@ export const callStopDatafeeds = async ( ), }); - return pipe( - stopDatafeedsResponsePayloadRT.decode(stopDatafeedResponse), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(stopDatafeedsResponsePayloadRT)(stopDatafeedResponse); }; export const deleteJobsRequestPayloadRT = rt.type({ diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_get_jobs_summary_api.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_get_jobs_summary_api.ts index 7441c0ab7d34c..7cb477dbe5b37 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_get_jobs_summary_api.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_get_jobs_summary_api.ts @@ -4,21 +4,24 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { pipe } from 'fp-ts/lib/pipeable'; import * as rt from 'io-ts'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getJobId, jobCustomSettingsRT } from '../../../../../common/log_analysis'; -import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; + +interface RequestArgs { + spaceId: string; + sourceId: string; + jobTypes: JobType[]; +} export const callJobsSummaryAPI = async ( - spaceId: string, - sourceId: string, - jobTypes: JobType[] + requestArgs: RequestArgs, + fetch: HttpHandler ) => { - const response = await npStart.http.fetch('/api/ml/jobs/jobs_summary', { + const { spaceId, sourceId, jobTypes } = requestArgs; + const response = await fetch('/api/ml/jobs/jobs_summary', { method: 'POST', body: JSON.stringify( fetchJobStatusRequestPayloadRT.encode({ @@ -26,10 +29,7 @@ export const callJobsSummaryAPI = async ( }) ), }); - return pipe( - fetchJobStatusResponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(fetchJobStatusResponsePayloadRT)(response); }; export const fetchJobStatusRequestPayloadRT = rt.type({ diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_get_module.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_get_module.ts index b6b40d6dc651f..2bf18d4e52c79 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_get_module.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_get_module.ts @@ -4,24 +4,18 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { pipe } from 'fp-ts/lib/pipeable'; import * as rt from 'io-ts'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { jobCustomSettingsRT } from '../../../../../common/log_analysis'; -import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; -export const callGetMlModuleAPI = async (moduleId: string) => { - const response = await npStart.http.fetch(`/api/ml/modules/get_module/${moduleId}`, { +export const callGetMlModuleAPI = async (moduleId: string, fetch: HttpHandler) => { + const response = await fetch(`/api/ml/modules/get_module/${moduleId}`, { method: 'GET', }); - return pipe( - getMlModuleResponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getMlModuleResponsePayloadRT)(response); }; const jobDefinitionRT = rt.type({ diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_setup_module_api.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_setup_module_api.ts index 7c8d63374924c..1f203ef9618b8 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_setup_module_api.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/ml_setup_module_api.ts @@ -4,27 +4,38 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { pipe } from 'fp-ts/lib/pipeable'; import * as rt from 'io-ts'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getJobIdPrefix, jobCustomSettingsRT } from '../../../../../common/log_analysis'; -import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; - -export const callSetupMlModuleAPI = async ( - moduleId: string, - start: number | undefined, - end: number | undefined, - spaceId: string, - sourceId: string, - indexPattern: string, - jobOverrides: SetupMlModuleJobOverrides[] = [], - datafeedOverrides: SetupMlModuleDatafeedOverrides[] = [], - query?: object -) => { - const response = await npStart.http.fetch(`/api/ml/modules/setup/${moduleId}`, { +import { decodeOrThrow } from '../../../../../common/runtime_types'; + +interface RequestArgs { + moduleId: string; + start?: number; + end?: number; + spaceId: string; + sourceId: string; + indexPattern: string; + jobOverrides?: SetupMlModuleJobOverrides[]; + datafeedOverrides?: SetupMlModuleDatafeedOverrides[]; + query?: object; +} + +export const callSetupMlModuleAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => { + const { + moduleId, + start, + end, + spaceId, + sourceId, + indexPattern, + jobOverrides = [], + datafeedOverrides = [], + query, + } = requestArgs; + + const response = await fetch(`/api/ml/modules/setup/${moduleId}`, { method: 'POST', body: JSON.stringify( setupMlModuleRequestPayloadRT.encode({ @@ -40,10 +51,7 @@ export const callSetupMlModuleAPI = async ( ), }); - return pipe( - setupMlModuleResponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(setupMlModuleResponsePayloadRT)(response); }; const setupMlModuleTimeParamsRT = rt.partial({ diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_datasets.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_datasets.ts index 6c9d5e439d359..ec08d3ac107e5 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_datasets.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_datasets.ts @@ -4,21 +4,24 @@ * you may not use this file except in compliance with the Elastic License. */ +import type { HttpHandler } from 'src/core/public'; import { LOG_ANALYSIS_VALIDATE_DATASETS_PATH, validateLogEntryDatasetsRequestPayloadRT, validateLogEntryDatasetsResponsePayloadRT, } from '../../../../../common/http_api'; import { decodeOrThrow } from '../../../../../common/runtime_types'; -import { npStart } from '../../../../legacy_singletons'; -export const callValidateDatasetsAPI = async ( - indices: string[], - timestampField: string, - startTime: number, - endTime: number -) => { - const response = await npStart.http.fetch(LOG_ANALYSIS_VALIDATE_DATASETS_PATH, { +interface RequestArgs { + indices: string[]; + timestampField: string; + startTime: number; + endTime: number; +} + +export const callValidateDatasetsAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => { + const { indices, timestampField, startTime, endTime } = requestArgs; + const response = await fetch(LOG_ANALYSIS_VALIDATE_DATASETS_PATH, { method: 'POST', body: JSON.stringify( validateLogEntryDatasetsRequestPayloadRT.encode({ diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_indices.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_indices.ts index bbef7d201045f..465d09a744b19 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_indices.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/api/validate_indices.ts @@ -4,10 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; + import { LOG_ANALYSIS_VALIDATE_INDICES_PATH, ValidationIndicesFieldSpecification, @@ -15,19 +13,19 @@ import { validationIndicesResponsePayloadRT, } from '../../../../../common/http_api'; -import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; + +interface RequestArgs { + indices: string[]; + fields: ValidationIndicesFieldSpecification[]; +} -export const callValidateIndicesAPI = async ( - indices: string[], - fields: ValidationIndicesFieldSpecification[] -) => { - const response = await npStart.http.fetch(LOG_ANALYSIS_VALIDATE_INDICES_PATH, { +export const callValidateIndicesAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => { + const { indices, fields } = requestArgs; + const response = await fetch(LOG_ANALYSIS_VALIDATE_INDICES_PATH, { method: 'POST', body: JSON.stringify(validationIndicesRequestPayloadRT.encode({ data: { indices, fields } })), }); - return pipe( - validationIndicesResponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(validationIndicesResponsePayloadRT)(response); }; diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_capabilities.tsx b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_capabilities.tsx index 9116900ec2196..74b316f78259f 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_capabilities.tsx +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_capabilities.tsx @@ -6,18 +6,16 @@ import createContainer from 'constate'; import { useMemo, useState, useEffect } from 'react'; -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; -import { npStart } from '../../../legacy_singletons'; import { getMlCapabilitiesResponsePayloadRT, GetMlCapabilitiesResponsePayload, } from './api/ml_api_types'; -import { throwErrors, createPlainError } from '../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../common/runtime_types'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; export const useLogAnalysisCapabilities = () => { + const { services } = useKibanaContextForPlugin(); const [mlCapabilities, setMlCapabilities] = useState( initialMlCapabilities ); @@ -26,12 +24,9 @@ export const useLogAnalysisCapabilities = () => { { cancelPreviousOn: 'resolution', createPromise: async () => { - const rawResponse = await npStart.http.fetch('/api/ml/ml_capabilities'); + const rawResponse = await services.http.fetch('/api/ml/ml_capabilities'); - return pipe( - getMlCapabilitiesResponsePayloadRT.decode(rawResponse), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getMlCapabilitiesResponsePayloadRT)(rawResponse); }, onResolve: (response) => { setMlCapabilities(response); diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_cleanup.tsx b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_cleanup.tsx index 522616f83d0cb..ec5e879131aa1 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_cleanup.tsx +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_cleanup.tsx @@ -3,17 +3,18 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ - +import type { HttpHandler } from 'src/core/public'; import { getJobId } from '../../../../common/log_analysis'; import { callDeleteJobs, callGetJobDeletionTasks, callStopDatafeeds } from './api/ml_cleanup'; export const cleanUpJobsAndDatafeeds = async ( spaceId: string, sourceId: string, - jobTypes: JobType[] + jobTypes: JobType[], + fetch: HttpHandler ) => { try { - await callStopDatafeeds(spaceId, sourceId, jobTypes); + await callStopDatafeeds({ spaceId, sourceId, jobTypes }, fetch); } catch (err) { // Proceed only if datafeed has been deleted or didn't exist in the first place if (err?.res?.status !== 404) { @@ -21,27 +22,29 @@ export const cleanUpJobsAndDatafeeds = async ( } } - return await deleteJobs(spaceId, sourceId, jobTypes); + return await deleteJobs(spaceId, sourceId, jobTypes, fetch); }; const deleteJobs = async ( spaceId: string, sourceId: string, - jobTypes: JobType[] + jobTypes: JobType[], + fetch: HttpHandler ) => { - const deleteJobsResponse = await callDeleteJobs(spaceId, sourceId, jobTypes); - await waitUntilJobsAreDeleted(spaceId, sourceId, jobTypes); + const deleteJobsResponse = await callDeleteJobs({ spaceId, sourceId, jobTypes }, fetch); + await waitUntilJobsAreDeleted(spaceId, sourceId, jobTypes, fetch); return deleteJobsResponse; }; const waitUntilJobsAreDeleted = async ( spaceId: string, sourceId: string, - jobTypes: JobType[] + jobTypes: JobType[], + fetch: HttpHandler ) => { const moduleJobIds = jobTypes.map((jobType) => getJobId(spaceId, sourceId, jobType)); while (true) { - const { jobIds: jobIdsBeingDeleted } = await callGetJobDeletionTasks(); + const { jobIds: jobIdsBeingDeleted } = await callGetJobDeletionTasks(fetch); const needToWait = jobIdsBeingDeleted.some((jobId) => moduleJobIds.includes(jobId)); if (needToWait) { diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module.tsx b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module.tsx index 79768302a7310..27ef0039ae49f 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module.tsx +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module.tsx @@ -6,6 +6,7 @@ import { useCallback, useMemo } from 'react'; import { DatasetFilter } from '../../../../common/log_analysis'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { useModuleStatus } from './log_analysis_module_status'; import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types'; @@ -17,6 +18,7 @@ export const useLogAnalysisModule = ({ sourceConfiguration: ModuleSourceConfiguration; moduleDescriptor: ModuleDescriptor; }) => { + const { services } = useKibanaContextForPlugin(); const { spaceId, sourceId, timestampField } = sourceConfiguration; const [moduleStatus, dispatchModuleStatus] = useModuleStatus(moduleDescriptor.jobTypes); @@ -25,7 +27,7 @@ export const useLogAnalysisModule = ({ cancelPreviousOn: 'resolution', createPromise: async () => { dispatchModuleStatus({ type: 'fetchingJobStatuses' }); - return await moduleDescriptor.getJobSummary(spaceId, sourceId); + return await moduleDescriptor.getJobSummary(spaceId, sourceId, services.http.fetch); }, onResolve: (jobResponse) => { dispatchModuleStatus({ @@ -52,13 +54,23 @@ export const useLogAnalysisModule = ({ datasetFilter: DatasetFilter ) => { dispatchModuleStatus({ type: 'startedSetup' }); - const setupResult = await moduleDescriptor.setUpModule(start, end, datasetFilter, { - indices: selectedIndices, - sourceId, + const setupResult = await moduleDescriptor.setUpModule( + start, + end, + datasetFilter, + { + indices: selectedIndices, + sourceId, + spaceId, + timestampField, + }, + services.http.fetch + ); + const jobSummaries = await moduleDescriptor.getJobSummary( spaceId, - timestampField, - }); - const jobSummaries = await moduleDescriptor.getJobSummary(spaceId, sourceId); + sourceId, + services.http.fetch + ); return { setupResult, jobSummaries }; }, onResolve: ({ setupResult: { datafeeds, jobs }, jobSummaries }) => { @@ -82,7 +94,7 @@ export const useLogAnalysisModule = ({ { cancelPreviousOn: 'resolution', createPromise: async () => { - return await moduleDescriptor.cleanUpModule(spaceId, sourceId); + return await moduleDescriptor.cleanUpModule(spaceId, sourceId, services.http.fetch); }, }, [spaceId, sourceId] diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_definition.tsx b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_definition.tsx index 1f643d0e5eb34..7a5c1d354dc34 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_definition.tsx +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_definition.tsx @@ -6,6 +6,7 @@ import { useCallback, useMemo, useState } from 'react'; import { getJobId } from '../../../../common/log_analysis'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { JobSummary } from './api/ml_get_jobs_summary_api'; import { GetMlModuleResponsePayload, JobDefinition } from './api/ml_get_module'; @@ -18,6 +19,7 @@ export const useLogAnalysisModuleDefinition = ({ sourceConfiguration: ModuleSourceConfiguration; moduleDescriptor: ModuleDescriptor; }) => { + const { services } = useKibanaContextForPlugin(); const [moduleDefinition, setModuleDefinition] = useState< GetMlModuleResponsePayload | undefined >(); @@ -40,7 +42,7 @@ export const useLogAnalysisModuleDefinition = ({ { cancelPreviousOn: 'resolution', createPromise: async () => { - return await moduleDescriptor.getModuleDefinition(); + return await moduleDescriptor.getModuleDefinition(services.http.fetch); }, onResolve: (response) => { setModuleDefinition(response); diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_types.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_types.ts index ba355ad195b11..c42704860b032 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_types.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_module_types.ts @@ -4,6 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ +import type { HttpHandler } from 'src/core/public'; import { ValidateLogEntryDatasetsResponsePayload, ValidationIndicesResponsePayload, @@ -23,24 +24,35 @@ export interface ModuleDescriptor { jobTypes: JobType[]; bucketSpan: number; getJobIds: (spaceId: string, sourceId: string) => Record; - getJobSummary: (spaceId: string, sourceId: string) => Promise; - getModuleDefinition: () => Promise; + getJobSummary: ( + spaceId: string, + sourceId: string, + fetch: HttpHandler + ) => Promise; + getModuleDefinition: (fetch: HttpHandler) => Promise; setUpModule: ( start: number | undefined, end: number | undefined, datasetFilter: DatasetFilter, - sourceConfiguration: ModuleSourceConfiguration + sourceConfiguration: ModuleSourceConfiguration, + fetch: HttpHandler ) => Promise; - cleanUpModule: (spaceId: string, sourceId: string) => Promise; + cleanUpModule: ( + spaceId: string, + sourceId: string, + fetch: HttpHandler + ) => Promise; validateSetupIndices: ( indices: string[], - timestampField: string + timestampField: string, + fetch: HttpHandler ) => Promise; validateSetupDatasets: ( indices: string[], timestampField: string, startTime: number, - endTime: number + endTime: number, + fetch: HttpHandler ) => Promise; } diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_setup_state.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_setup_state.ts index e6fe8f4e92cc4..750a7104a3a98 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_setup_state.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/log_analysis_setup_state.ts @@ -18,6 +18,7 @@ import { ValidationIndicesError, ValidationUIError, } from '../../../components/logging/log_analysis_setup/initial_configuration_step'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { ModuleDescriptor, ModuleSourceConfiguration } from './log_analysis_module_types'; @@ -43,6 +44,7 @@ export const useAnalysisSetupState = ({ setUpModule, sourceConfiguration, }: AnalysisSetupStateArguments) => { + const { services } = useKibanaContextForPlugin(); const [startTime, setStartTime] = useState(Date.now() - fourWeeksInMs); const [endTime, setEndTime] = useState(undefined); @@ -158,7 +160,8 @@ export const useAnalysisSetupState = ({ createPromise: async () => { return await validateSetupIndices( sourceConfiguration.indices, - sourceConfiguration.timestampField + sourceConfiguration.timestampField, + services.http.fetch ); }, onResolve: ({ data: { errors } }) => { @@ -183,7 +186,8 @@ export const useAnalysisSetupState = ({ validIndexNames, sourceConfiguration.timestampField, startTime ?? 0, - endTime ?? Date.now() + endTime ?? Date.now(), + services.http.fetch ); }, onResolve: ({ data: { datasets } }) => { diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/modules/log_entry_categories/module_descriptor.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/modules/log_entry_categories/module_descriptor.ts index 9682b3e74db3b..46b28e091cc5c 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/modules/log_entry_categories/module_descriptor.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/modules/log_entry_categories/module_descriptor.ts @@ -5,6 +5,7 @@ */ import { i18n } from '@kbn/i18n'; +import type { HttpHandler } from 'src/core/public'; import { bucketSpan, categoriesMessageField, @@ -42,22 +43,26 @@ const getJobIds = (spaceId: string, sourceId: string) => {} as Record ); -const getJobSummary = async (spaceId: string, sourceId: string) => { - const response = await callJobsSummaryAPI(spaceId, sourceId, logEntryCategoriesJobTypes); +const getJobSummary = async (spaceId: string, sourceId: string, fetch: HttpHandler) => { + const response = await callJobsSummaryAPI( + { spaceId, sourceId, jobTypes: logEntryCategoriesJobTypes }, + fetch + ); const jobIds = Object.values(getJobIds(spaceId, sourceId)); return response.filter((jobSummary) => jobIds.includes(jobSummary.id)); }; -const getModuleDefinition = async () => { - return await callGetMlModuleAPI(moduleId); +const getModuleDefinition = async (fetch: HttpHandler) => { + return await callGetMlModuleAPI(moduleId, fetch); }; const setUpModule = async ( start: number | undefined, end: number | undefined, datasetFilter: DatasetFilter, - { spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration + { spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration, + fetch: HttpHandler ) => { const indexNamePattern = indices.join(','); const jobOverrides = [ @@ -101,46 +106,59 @@ const setUpModule = async ( }; return callSetupMlModuleAPI( - moduleId, - start, - end, - spaceId, - sourceId, - indexNamePattern, - jobOverrides, - [], - query + { + moduleId, + start, + end, + spaceId, + sourceId, + indexPattern: indexNamePattern, + jobOverrides, + query, + }, + fetch ); }; -const cleanUpModule = async (spaceId: string, sourceId: string) => { - return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryCategoriesJobTypes); +const cleanUpModule = async (spaceId: string, sourceId: string, fetch: HttpHandler) => { + return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryCategoriesJobTypes, fetch); }; -const validateSetupIndices = async (indices: string[], timestampField: string) => { - return await callValidateIndicesAPI(indices, [ - { - name: timestampField, - validTypes: ['date'], - }, - { - name: partitionField, - validTypes: ['keyword'], - }, +const validateSetupIndices = async ( + indices: string[], + timestampField: string, + fetch: HttpHandler +) => { + return await callValidateIndicesAPI( { - name: categoriesMessageField, - validTypes: ['text'], + indices, + fields: [ + { + name: timestampField, + validTypes: ['date'], + }, + { + name: partitionField, + validTypes: ['keyword'], + }, + { + name: categoriesMessageField, + validTypes: ['text'], + }, + ], }, - ]); + fetch + ); }; const validateSetupDatasets = async ( indices: string[], timestampField: string, startTime: number, - endTime: number + endTime: number, + fetch: HttpHandler ) => { - return await callValidateDatasetsAPI(indices, timestampField, startTime, endTime); + return await callValidateDatasetsAPI({ indices, timestampField, startTime, endTime }, fetch); }; export const logEntryCategoriesModule: ModuleDescriptor = { diff --git a/x-pack/plugins/infra/public/containers/logs/log_analysis/modules/log_entry_rate/module_descriptor.ts b/x-pack/plugins/infra/public/containers/logs/log_analysis/modules/log_entry_rate/module_descriptor.ts index 001174a2b7558..b97ec55105f5d 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_analysis/modules/log_entry_rate/module_descriptor.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_analysis/modules/log_entry_rate/module_descriptor.ts @@ -5,6 +5,7 @@ */ import { i18n } from '@kbn/i18n'; +import type { HttpHandler } from 'src/core/public'; import { bucketSpan, DatasetFilter, @@ -41,22 +42,26 @@ const getJobIds = (spaceId: string, sourceId: string) => {} as Record ); -const getJobSummary = async (spaceId: string, sourceId: string) => { - const response = await callJobsSummaryAPI(spaceId, sourceId, logEntryRateJobTypes); +const getJobSummary = async (spaceId: string, sourceId: string, fetch: HttpHandler) => { + const response = await callJobsSummaryAPI( + { spaceId, sourceId, jobTypes: logEntryRateJobTypes }, + fetch + ); const jobIds = Object.values(getJobIds(spaceId, sourceId)); return response.filter((jobSummary) => jobIds.includes(jobSummary.id)); }; -const getModuleDefinition = async () => { - return await callGetMlModuleAPI(moduleId); +const getModuleDefinition = async (fetch: HttpHandler) => { + return await callGetMlModuleAPI(moduleId, fetch); }; const setUpModule = async ( start: number | undefined, end: number | undefined, datasetFilter: DatasetFilter, - { spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration + { spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration, + fetch: HttpHandler ) => { const indexNamePattern = indices.join(','); const jobOverrides = [ @@ -93,42 +98,55 @@ const setUpModule = async ( : undefined; return callSetupMlModuleAPI( - moduleId, - start, - end, - spaceId, - sourceId, - indexNamePattern, - jobOverrides, - [], - query + { + moduleId, + start, + end, + spaceId, + sourceId, + indexPattern: indexNamePattern, + jobOverrides, + query, + }, + fetch ); }; -const cleanUpModule = async (spaceId: string, sourceId: string) => { - return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryRateJobTypes); +const cleanUpModule = async (spaceId: string, sourceId: string, fetch: HttpHandler) => { + return await cleanUpJobsAndDatafeeds(spaceId, sourceId, logEntryRateJobTypes, fetch); }; -const validateSetupIndices = async (indices: string[], timestampField: string) => { - return await callValidateIndicesAPI(indices, [ - { - name: timestampField, - validTypes: ['date'], - }, +const validateSetupIndices = async ( + indices: string[], + timestampField: string, + fetch: HttpHandler +) => { + return await callValidateIndicesAPI( { - name: partitionField, - validTypes: ['keyword'], + indices, + fields: [ + { + name: timestampField, + validTypes: ['date'], + }, + { + name: partitionField, + validTypes: ['keyword'], + }, + ], }, - ]); + fetch + ); }; const validateSetupDatasets = async ( indices: string[], timestampField: string, startTime: number, - endTime: number + endTime: number, + fetch: HttpHandler ) => { - return await callValidateDatasetsAPI(indices, timestampField, startTime, endTime); + return await callValidateDatasetsAPI({ indices, timestampField, startTime, endTime }, fetch); }; export const logEntryRateModule: ModuleDescriptor = { diff --git a/x-pack/plugins/infra/public/containers/logs/log_entries/api/fetch_log_entries.ts b/x-pack/plugins/infra/public/containers/logs/log_entries/api/fetch_log_entries.ts index 2a19a82892427..3bbd86cb0ef75 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_entries/api/fetch_log_entries.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_entries/api/fetch_log_entries.ts @@ -4,12 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; -import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; import { LOG_ENTRIES_PATH, @@ -18,11 +15,11 @@ import { logEntriesResponseRT, } from '../../../../../common/http_api'; -export const fetchLogEntries = async (requestArgs: LogEntriesRequest) => { - const response = await npStart.http.fetch(LOG_ENTRIES_PATH, { +export const fetchLogEntries = async (requestArgs: LogEntriesRequest, fetch: HttpHandler) => { + const response = await fetch(LOG_ENTRIES_PATH, { method: 'POST', body: JSON.stringify(logEntriesRequestRT.encode(requestArgs)), }); - return pipe(logEntriesResponseRT.decode(response), fold(throwErrors(createPlainError), identity)); + return decodeOrThrow(logEntriesResponseRT)(response); }; diff --git a/x-pack/plugins/infra/public/containers/logs/log_entries/api/fetch_log_entries_item.ts b/x-pack/plugins/infra/public/containers/logs/log_entries/api/fetch_log_entries_item.ts index 5fde01e458e36..d459fba6cf957 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_entries/api/fetch_log_entries_item.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_entries/api/fetch_log_entries_item.ts @@ -4,12 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; -import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; import { LOG_ENTRIES_ITEM_PATH, @@ -18,14 +15,14 @@ import { logEntriesItemResponseRT, } from '../../../../../common/http_api'; -export const fetchLogEntriesItem = async (requestArgs: LogEntriesItemRequest) => { - const response = await npStart.http.fetch(LOG_ENTRIES_ITEM_PATH, { +export const fetchLogEntriesItem = async ( + requestArgs: LogEntriesItemRequest, + fetch: HttpHandler +) => { + const response = await fetch(LOG_ENTRIES_ITEM_PATH, { method: 'POST', body: JSON.stringify(logEntriesItemRequestRT.encode(requestArgs)), }); - return pipe( - logEntriesItemResponseRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(logEntriesItemResponseRT)(response); }; diff --git a/x-pack/plugins/infra/public/containers/logs/log_entries/index.ts b/x-pack/plugins/infra/public/containers/logs/log_entries/index.ts index d5b2a0aaa61c0..4c8c610794b2e 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_entries/index.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_entries/index.ts @@ -14,6 +14,7 @@ import { LogEntriesBaseRequest, } from '../../../../common/http_api'; import { fetchLogEntries } from './api/fetch_log_entries'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; const DESIRED_BUFFER_PAGES = 2; const LIVE_STREAM_INTERVAL = 5000; @@ -144,6 +145,7 @@ const useFetchEntriesEffect = ( dispatch: Dispatch, props: LogEntriesProps ) => { + const { services } = useKibanaContextForPlugin(); const [prevParams, cachePrevParams] = useState(); const [startedStreaming, setStartedStreaming] = useState(false); @@ -172,7 +174,7 @@ const useFetchEntriesEffect = ( before: 'last', }; - const { data: payload } = await fetchLogEntries(fetchArgs); + const { data: payload } = await fetchLogEntries(fetchArgs, services.http.fetch); dispatch({ type: Action.ReceiveNewEntries, payload }); // Move position to the bottom if it's the first load. @@ -228,7 +230,7 @@ const useFetchEntriesEffect = ( after: state.bottomCursor, }; - const { data: payload } = await fetchLogEntries(fetchArgs); + const { data: payload } = await fetchLogEntries(fetchArgs, services.http.fetch); dispatch({ type: getEntriesBefore ? Action.ReceiveEntriesBefore : Action.ReceiveEntriesAfter, diff --git a/x-pack/plugins/infra/public/containers/logs/log_flyout.tsx b/x-pack/plugins/infra/public/containers/logs/log_flyout.tsx index 0489892e58f2a..9ed2f5ad175c7 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_flyout.tsx +++ b/x-pack/plugins/infra/public/containers/logs/log_flyout.tsx @@ -9,6 +9,7 @@ import { isString } from 'lodash'; import React, { useContext, useEffect, useMemo, useState } from 'react'; import { LogEntriesItem } from '../../../common/http_api'; +import { useKibanaContextForPlugin } from '../../hooks/use_kibana'; import { UrlStateContainer } from '../../utils/url_state'; import { useTrackedPromise } from '../../utils/use_tracked_promise'; import { fetchLogEntriesItem } from './log_entries/api/fetch_log_entries_item'; @@ -26,6 +27,7 @@ export interface FlyoutOptionsUrlState { } export const useLogFlyout = () => { + const { services } = useKibanaContextForPlugin(); const { sourceId } = useLogSourceContext(); const [flyoutVisible, setFlyoutVisibility] = useState(false); const [flyoutId, setFlyoutId] = useState(null); @@ -39,7 +41,7 @@ export const useLogFlyout = () => { if (!flyoutId) { return; } - return await fetchLogEntriesItem({ sourceId, id: flyoutId }); + return await fetchLogEntriesItem({ sourceId, id: flyoutId }, services.http.fetch); }, onResolve: (response) => { if (response) { diff --git a/x-pack/plugins/infra/public/containers/logs/log_highlights/api/fetch_log_entries_highlights.ts b/x-pack/plugins/infra/public/containers/logs/log_highlights/api/fetch_log_entries_highlights.ts index 030a9d180c7b5..25865a30467f5 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_highlights/api/fetch_log_entries_highlights.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_highlights/api/fetch_log_entries_highlights.ts @@ -4,12 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; -import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; import { LOG_ENTRIES_HIGHLIGHTS_PATH, @@ -18,14 +15,14 @@ import { logEntriesHighlightsResponseRT, } from '../../../../../common/http_api'; -export const fetchLogEntriesHighlights = async (requestArgs: LogEntriesHighlightsRequest) => { - const response = await npStart.http.fetch(LOG_ENTRIES_HIGHLIGHTS_PATH, { +export const fetchLogEntriesHighlights = async ( + requestArgs: LogEntriesHighlightsRequest, + fetch: HttpHandler +) => { + const response = await fetch(LOG_ENTRIES_HIGHLIGHTS_PATH, { method: 'POST', body: JSON.stringify(logEntriesHighlightsRequestRT.encode(requestArgs)), }); - return pipe( - logEntriesHighlightsResponseRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(logEntriesHighlightsResponseRT)(response); }; diff --git a/x-pack/plugins/infra/public/containers/logs/log_highlights/api/fetch_log_summary_highlights.ts b/x-pack/plugins/infra/public/containers/logs/log_highlights/api/fetch_log_summary_highlights.ts index bda8f535549c7..1cf95bc08a521 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_highlights/api/fetch_log_summary_highlights.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_highlights/api/fetch_log_summary_highlights.ts @@ -3,11 +3,9 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; -import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; + +import type { HttpHandler } from 'src/core/public'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; import { LOG_ENTRIES_SUMMARY_HIGHLIGHTS_PATH, @@ -17,15 +15,13 @@ import { } from '../../../../../common/http_api'; export const fetchLogSummaryHighlights = async ( - requestArgs: LogEntriesSummaryHighlightsRequest + requestArgs: LogEntriesSummaryHighlightsRequest, + fetch: HttpHandler ) => { - const response = await npStart.http.fetch(LOG_ENTRIES_SUMMARY_HIGHLIGHTS_PATH, { + const response = await fetch(LOG_ENTRIES_SUMMARY_HIGHLIGHTS_PATH, { method: 'POST', body: JSON.stringify(logEntriesSummaryHighlightsRequestRT.encode(requestArgs)), }); - return pipe( - logEntriesSummaryHighlightsResponseRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(logEntriesSummaryHighlightsResponseRT)(response); }; diff --git a/x-pack/plugins/infra/public/containers/logs/log_highlights/log_entry_highlights.tsx b/x-pack/plugins/infra/public/containers/logs/log_highlights/log_entry_highlights.tsx index dbeb8c71c11eb..b4edebe8f8207 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_highlights/log_entry_highlights.tsx +++ b/x-pack/plugins/infra/public/containers/logs/log_highlights/log_entry_highlights.tsx @@ -10,6 +10,7 @@ import { TimeKey } from '../../../../common/time'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { fetchLogEntriesHighlights } from './api/fetch_log_entries_highlights'; import { LogEntry, LogEntriesHighlightsResponse } from '../../../../common/http_api'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; export const useLogEntryHighlights = ( sourceId: string, @@ -21,6 +22,7 @@ export const useLogEntryHighlights = ( filterQuery: string | null, highlightTerms: string[] ) => { + const { services } = useKibanaContextForPlugin(); const [logEntryHighlights, setLogEntryHighlights] = useState< LogEntriesHighlightsResponse['data'] >([]); @@ -32,15 +34,18 @@ export const useLogEntryHighlights = ( throw new Error('Skipping request: Insufficient parameters'); } - return await fetchLogEntriesHighlights({ - sourceId, - startTimestamp, - endTimestamp, - center: centerPoint, - size, - query: filterQuery || undefined, - highlightTerms, - }); + return await fetchLogEntriesHighlights( + { + sourceId, + startTimestamp, + endTimestamp, + center: centerPoint, + size, + query: filterQuery || undefined, + highlightTerms, + }, + services.http.fetch + ); }, onResolve: (response) => { setLogEntryHighlights(response.data); diff --git a/x-pack/plugins/infra/public/containers/logs/log_highlights/log_summary_highlights.ts b/x-pack/plugins/infra/public/containers/logs/log_highlights/log_summary_highlights.ts index 6d982ee004ccc..14366891dbf59 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_highlights/log_summary_highlights.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_highlights/log_summary_highlights.ts @@ -11,6 +11,7 @@ import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { fetchLogSummaryHighlights } from './api/fetch_log_summary_highlights'; import { LogEntriesSummaryHighlightsResponse } from '../../../../common/http_api'; import { useBucketSize } from '../log_summary/bucket_size'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; export const useLogSummaryHighlights = ( sourceId: string, @@ -20,6 +21,7 @@ export const useLogSummaryHighlights = ( filterQuery: string | null, highlightTerms: string[] ) => { + const { services } = useKibanaContextForPlugin(); const [logSummaryHighlights, setLogSummaryHighlights] = useState< LogEntriesSummaryHighlightsResponse['data'] >([]); @@ -34,14 +36,17 @@ export const useLogSummaryHighlights = ( throw new Error('Skipping request: Insufficient parameters'); } - return await fetchLogSummaryHighlights({ - sourceId, - startTimestamp, - endTimestamp, - bucketSize, - query: filterQuery, - highlightTerms, - }); + return await fetchLogSummaryHighlights( + { + sourceId, + startTimestamp, + endTimestamp, + bucketSize, + query: filterQuery, + highlightTerms, + }, + services.http.fetch + ); }, onResolve: (response) => { setLogSummaryHighlights(response.data); diff --git a/x-pack/plugins/infra/public/containers/logs/log_source/api/fetch_log_source_configuration.ts b/x-pack/plugins/infra/public/containers/logs/log_source/api/fetch_log_source_configuration.ts index e847302a6d367..c9ced069473a3 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_source/api/fetch_log_source_configuration.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_source/api/fetch_log_source_configuration.ts @@ -4,17 +4,14 @@ * you may not use this file except in compliance with the Elastic License. */ -import { HttpSetup } from 'src/core/public'; +import type { HttpHandler } from 'src/core/public'; import { getLogSourceConfigurationPath, getLogSourceConfigurationSuccessResponsePayloadRT, } from '../../../../../common/http_api/log_sources'; import { decodeOrThrow } from '../../../../../common/runtime_types'; -export const callFetchLogSourceConfigurationAPI = async ( - sourceId: string, - fetch: HttpSetup['fetch'] -) => { +export const callFetchLogSourceConfigurationAPI = async (sourceId: string, fetch: HttpHandler) => { const response = await fetch(getLogSourceConfigurationPath(sourceId), { method: 'GET', }); diff --git a/x-pack/plugins/infra/public/containers/logs/log_source/api/fetch_log_source_status.ts b/x-pack/plugins/infra/public/containers/logs/log_source/api/fetch_log_source_status.ts index 20e67a0a59c9f..5bc409115e595 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_source/api/fetch_log_source_status.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_source/api/fetch_log_source_status.ts @@ -4,14 +4,14 @@ * you may not use this file except in compliance with the Elastic License. */ -import { HttpSetup } from 'src/core/public'; +import type { HttpHandler } from 'src/core/public'; import { getLogSourceStatusPath, getLogSourceStatusSuccessResponsePayloadRT, } from '../../../../../common/http_api/log_sources'; import { decodeOrThrow } from '../../../../../common/runtime_types'; -export const callFetchLogSourceStatusAPI = async (sourceId: string, fetch: HttpSetup['fetch']) => { +export const callFetchLogSourceStatusAPI = async (sourceId: string, fetch: HttpHandler) => { const response = await fetch(getLogSourceStatusPath(sourceId), { method: 'GET', }); diff --git a/x-pack/plugins/infra/public/containers/logs/log_source/api/patch_log_source_configuration.ts b/x-pack/plugins/infra/public/containers/logs/log_source/api/patch_log_source_configuration.ts index 4361e4bef827f..33212c5d3b0f2 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_source/api/patch_log_source_configuration.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_source/api/patch_log_source_configuration.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { HttpSetup } from 'src/core/public'; +import type { HttpHandler } from 'src/core/public'; import { getLogSourceConfigurationPath, patchLogSourceConfigurationSuccessResponsePayloadRT, @@ -16,7 +16,7 @@ import { decodeOrThrow } from '../../../../../common/runtime_types'; export const callPatchLogSourceConfigurationAPI = async ( sourceId: string, patchedProperties: LogSourceConfigurationPropertiesPatch, - fetch: HttpSetup['fetch'] + fetch: HttpHandler ) => { const response = await fetch(getLogSourceConfigurationPath(sourceId), { method: 'PATCH', diff --git a/x-pack/plugins/infra/public/containers/logs/log_source/log_source.ts b/x-pack/plugins/infra/public/containers/logs/log_source/log_source.ts index 51b32a4c4eacf..e2dd4c523c03f 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_source/log_source.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_source/log_source.ts @@ -7,7 +7,7 @@ import createContainer from 'constate'; import { useCallback, useMemo, useState } from 'react'; import { useMountedState } from 'react-use'; -import { HttpSetup } from 'src/core/public'; +import type { HttpHandler } from 'src/core/public'; import { LogSourceConfiguration, LogSourceConfigurationProperties, @@ -26,13 +26,7 @@ export { LogSourceStatus, }; -export const useLogSource = ({ - sourceId, - fetch, -}: { - sourceId: string; - fetch: HttpSetup['fetch']; -}) => { +export const useLogSource = ({ sourceId, fetch }: { sourceId: string; fetch: HttpHandler }) => { const getIsMounted = useMountedState(); const [sourceConfiguration, setSourceConfiguration] = useState< LogSourceConfiguration | undefined diff --git a/x-pack/plugins/infra/public/containers/logs/log_stream/index.ts b/x-pack/plugins/infra/public/containers/logs/log_stream/index.ts index b414408512db2..4a6da6063e960 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_stream/index.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_stream/index.ts @@ -9,6 +9,7 @@ import { esKuery } from '../../../../../../../src/plugins/data/public'; import { fetchLogEntries } from '../log_entries/api/fetch_log_entries'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { LogEntry, LogEntriesCursor } from '../../../../common/http_api'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; interface LogStreamProps { sourceId: string; @@ -31,6 +32,7 @@ export function useLogStream({ query, center, }: LogStreamProps): LogStreamState { + const { services } = useKibanaContextForPlugin(); const [entries, setEntries] = useState([]); const parsedQuery = useMemo(() => { @@ -47,13 +49,16 @@ export function useLogStream({ setEntries([]); const fetchPosition = center ? { center } : { before: 'last' }; - return fetchLogEntries({ - sourceId, - startTimestamp, - endTimestamp, - query: parsedQuery, - ...fetchPosition, - }); + return fetchLogEntries( + { + sourceId, + startTimestamp, + endTimestamp, + query: parsedQuery, + ...fetchPosition, + }, + services.http.fetch + ); }, onResolve: ({ data }) => { setEntries(data.entries); diff --git a/x-pack/plugins/infra/public/containers/logs/log_summary/api/fetch_log_summary.ts b/x-pack/plugins/infra/public/containers/logs/log_summary/api/fetch_log_summary.ts index f74f0dc0e3117..2be6538e21ebe 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_summary/api/fetch_log_summary.ts +++ b/x-pack/plugins/infra/public/containers/logs/log_summary/api/fetch_log_summary.ts @@ -4,11 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; -import { throwErrors, createPlainError } from '../../../../../common/runtime_types'; +import type { HttpHandler } from 'src/core/public'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; import { LOG_ENTRIES_SUMMARY_PATH, @@ -17,14 +14,14 @@ import { logEntriesSummaryResponseRT, } from '../../../../../common/http_api'; -export const fetchLogSummary = async (requestArgs: LogEntriesSummaryRequest) => { - const response = await npStart.http.fetch(LOG_ENTRIES_SUMMARY_PATH, { +export const fetchLogSummary = async ( + requestArgs: LogEntriesSummaryRequest, + fetch: HttpHandler +) => { + const response = await fetch(LOG_ENTRIES_SUMMARY_PATH, { method: 'POST', body: JSON.stringify(logEntriesSummaryRequestRT.encode(requestArgs)), }); - return pipe( - logEntriesSummaryResponseRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(logEntriesSummaryResponseRT)(response); }; diff --git a/x-pack/plugins/infra/public/containers/logs/log_summary/log_summary.test.tsx b/x-pack/plugins/infra/public/containers/logs/log_summary/log_summary.test.tsx index 73d0e5efdf06b..652ea8c71dc44 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_summary/log_summary.test.tsx +++ b/x-pack/plugins/infra/public/containers/logs/log_summary/log_summary.test.tsx @@ -5,6 +5,8 @@ */ import { renderHook } from '@testing-library/react-hooks'; +// We are using this inside a `jest.mock` call. Jest requires dynamic dependencies to be prefixed with `mock` +import { coreMock as mockCoreMock } from 'src/core/public/mocks'; import { useLogSummary } from './log_summary'; @@ -16,6 +18,10 @@ import { datemathToEpochMillis } from '../../../utils/datemath'; jest.mock('./api/fetch_log_summary', () => ({ fetchLogSummary: jest.fn() })); const fetchLogSummaryMock = fetchLogSummary as jest.MockedFunction; +jest.mock('../../../hooks/use_kibana', () => ({ + useKibanaContextForPlugin: () => ({ services: mockCoreMock.createStart() }), +})); + describe('useLogSummary hook', () => { beforeEach(() => { fetchLogSummaryMock.mockClear(); @@ -53,7 +59,8 @@ describe('useLogSummary hook', () => { expect(fetchLogSummaryMock).toHaveBeenLastCalledWith( expect.objectContaining({ sourceId: 'INITIAL_SOURCE_ID', - }) + }), + expect.anything() ); expect(result.current.buckets).toEqual(firstMockResponse.data.buckets); @@ -64,7 +71,8 @@ describe('useLogSummary hook', () => { expect(fetchLogSummaryMock).toHaveBeenLastCalledWith( expect.objectContaining({ sourceId: 'CHANGED_SOURCE_ID', - }) + }), + expect.anything() ); expect(result.current.buckets).toEqual(secondMockResponse.data.buckets); }); @@ -96,7 +104,8 @@ describe('useLogSummary hook', () => { expect(fetchLogSummaryMock).toHaveBeenLastCalledWith( expect.objectContaining({ query: 'INITIAL_FILTER_QUERY', - }) + }), + expect.anything() ); expect(result.current.buckets).toEqual(firstMockResponse.data.buckets); @@ -107,7 +116,8 @@ describe('useLogSummary hook', () => { expect(fetchLogSummaryMock).toHaveBeenLastCalledWith( expect.objectContaining({ query: 'CHANGED_FILTER_QUERY', - }) + }), + expect.anything() ); expect(result.current.buckets).toEqual(secondMockResponse.data.buckets); }); @@ -132,7 +142,8 @@ describe('useLogSummary hook', () => { expect.objectContaining({ startTimestamp: firstRange.startTimestamp, endTimestamp: firstRange.endTimestamp, - }) + }), + expect.anything() ); const secondRange = createMockDateRange('now-20s', 'now'); @@ -145,7 +156,8 @@ describe('useLogSummary hook', () => { expect.objectContaining({ startTimestamp: secondRange.startTimestamp, endTimestamp: secondRange.endTimestamp, - }) + }), + expect.anything() ); }); }); diff --git a/x-pack/plugins/infra/public/containers/logs/log_summary/log_summary.tsx b/x-pack/plugins/infra/public/containers/logs/log_summary/log_summary.tsx index b83be77656863..be0d87f5d267d 100644 --- a/x-pack/plugins/infra/public/containers/logs/log_summary/log_summary.tsx +++ b/x-pack/plugins/infra/public/containers/logs/log_summary/log_summary.tsx @@ -10,6 +10,7 @@ import { useCancellableEffect } from '../../../utils/cancellable_effect'; import { fetchLogSummary } from './api/fetch_log_summary'; import { LogEntriesSummaryResponse } from '../../../../common/http_api'; import { useBucketSize } from './bucket_size'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; export type LogSummaryBuckets = LogEntriesSummaryResponse['data']['buckets']; @@ -19,6 +20,7 @@ export const useLogSummary = ( endTimestamp: number | null, filterQuery: string | null ) => { + const { services } = useKibanaContextForPlugin(); const [logSummaryBuckets, setLogSummaryBuckets] = useState([]); const bucketSize = useBucketSize(startTimestamp, endTimestamp); @@ -28,13 +30,16 @@ export const useLogSummary = ( return; } - fetchLogSummary({ - sourceId, - startTimestamp, - endTimestamp, - bucketSize, - query: filterQuery, - }).then((response) => { + fetchLogSummary( + { + sourceId, + startTimestamp, + endTimestamp, + bucketSize, + query: filterQuery, + }, + services.http.fetch + ).then((response) => { if (!getIsCancelled()) { setLogSummaryBuckets(response.data.buckets); } diff --git a/x-pack/plugins/infra/public/containers/ml/api/ml_cleanup.ts b/x-pack/plugins/infra/public/containers/ml/api/ml_cleanup.ts index 23fa338e74f14..fa7d8f14c6a9a 100644 --- a/x-pack/plugins/infra/public/containers/ml/api/ml_cleanup.ts +++ b/x-pack/plugins/infra/public/containers/ml/api/ml_cleanup.ts @@ -5,21 +5,24 @@ */ import * as rt from 'io-ts'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../legacy_singletons'; - +import type { HttpHandler } from 'src/core/public'; import { getDatafeedId, getJobId } from '../../../../common/infra_ml'; -import { throwErrors, createPlainError } from '../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../common/runtime_types'; + +interface DeleteJobsRequestArgs { + spaceId: string; + sourceId: string; + jobTypes: JobType[]; +} export const callDeleteJobs = async ( - spaceId: string, - sourceId: string, - jobTypes: JobType[] + requestArgs: DeleteJobsRequestArgs, + fetch: HttpHandler ) => { + const { spaceId, sourceId, jobTypes } = requestArgs; + // NOTE: Deleting the jobs via this API will delete the datafeeds at the same time - const deleteJobsResponse = await npStart.http.fetch('/api/ml/jobs/delete_jobs', { + const deleteJobsResponse = await fetch('/api/ml/jobs/delete_jobs', { method: 'POST', body: JSON.stringify( deleteJobsRequestPayloadRT.encode({ @@ -28,28 +31,29 @@ export const callDeleteJobs = async ( ), }); - return pipe( - deleteJobsResponsePayloadRT.decode(deleteJobsResponse), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(deleteJobsResponsePayloadRT)(deleteJobsResponse); }; -export const callGetJobDeletionTasks = async () => { - const jobDeletionTasksResponse = await npStart.http.fetch('/api/ml/jobs/deleting_jobs_tasks'); +export const callGetJobDeletionTasks = async (fetch: HttpHandler) => { + const jobDeletionTasksResponse = await fetch('/api/ml/jobs/deleting_jobs_tasks'); - return pipe( - getJobDeletionTasksResponsePayloadRT.decode(jobDeletionTasksResponse), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getJobDeletionTasksResponsePayloadRT)(jobDeletionTasksResponse); }; +interface StopDatafeedsRequestArgs { + spaceId: string; + sourceId: string; + jobTypes: JobType[]; +} + export const callStopDatafeeds = async ( - spaceId: string, - sourceId: string, - jobTypes: JobType[] + requestArgs: StopDatafeedsRequestArgs, + fetch: HttpHandler ) => { + const { spaceId, sourceId, jobTypes } = requestArgs; + // Stop datafeed due to https://github.com/elastic/kibana/issues/44652 - const stopDatafeedResponse = await npStart.http.fetch('/api/ml/jobs/stop_datafeeds', { + const stopDatafeedResponse = await fetch('/api/ml/jobs/stop_datafeeds', { method: 'POST', body: JSON.stringify( stopDatafeedsRequestPayloadRT.encode({ @@ -58,10 +62,7 @@ export const callStopDatafeeds = async ( ), }); - return pipe( - stopDatafeedsResponsePayloadRT.decode(stopDatafeedResponse), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(stopDatafeedsResponsePayloadRT)(stopDatafeedResponse); }; export const deleteJobsRequestPayloadRT = rt.type({ diff --git a/x-pack/plugins/infra/public/containers/ml/api/ml_get_jobs_summary_api.ts b/x-pack/plugins/infra/public/containers/ml/api/ml_get_jobs_summary_api.ts index 3fddb63f69791..84b5df3d172c7 100644 --- a/x-pack/plugins/infra/public/containers/ml/api/ml_get_jobs_summary_api.ts +++ b/x-pack/plugins/infra/public/containers/ml/api/ml_get_jobs_summary_api.ts @@ -4,21 +4,24 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { pipe } from 'fp-ts/lib/pipeable'; import * as rt from 'io-ts'; -import { npStart } from '../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getJobId, jobCustomSettingsRT } from '../../../../common/infra_ml'; -import { createPlainError, throwErrors } from '../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../common/runtime_types'; + +interface RequestArgs { + spaceId: string; + sourceId: string; + jobTypes: JobType[]; +} export const callJobsSummaryAPI = async ( - spaceId: string, - sourceId: string, - jobTypes: JobType[] + requestArgs: RequestArgs, + fetch: HttpHandler ) => { - const response = await npStart.http.fetch('/api/ml/jobs/jobs_summary', { + const { spaceId, sourceId, jobTypes } = requestArgs; + const response = await fetch('/api/ml/jobs/jobs_summary', { method: 'POST', body: JSON.stringify( fetchJobStatusRequestPayloadRT.encode({ @@ -26,10 +29,7 @@ export const callJobsSummaryAPI = async ( }) ), }); - return pipe( - fetchJobStatusResponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(fetchJobStatusResponsePayloadRT)(response); }; export const fetchJobStatusRequestPayloadRT = rt.type({ diff --git a/x-pack/plugins/infra/public/containers/ml/api/ml_get_module.ts b/x-pack/plugins/infra/public/containers/ml/api/ml_get_module.ts index d492522c120a1..75ce335fbe49c 100644 --- a/x-pack/plugins/infra/public/containers/ml/api/ml_get_module.ts +++ b/x-pack/plugins/infra/public/containers/ml/api/ml_get_module.ts @@ -4,24 +4,18 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { pipe } from 'fp-ts/lib/pipeable'; import * as rt from 'io-ts'; -import { npStart } from '../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { jobCustomSettingsRT } from '../../../../common/log_analysis'; -import { createPlainError, throwErrors } from '../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../common/runtime_types'; -export const callGetMlModuleAPI = async (moduleId: string) => { - const response = await npStart.http.fetch(`/api/ml/modules/get_module/${moduleId}`, { +export const callGetMlModuleAPI = async (moduleId: string, fetch: HttpHandler) => { + const response = await fetch(`/api/ml/modules/get_module/${moduleId}`, { method: 'GET', }); - return pipe( - getMlModuleResponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getMlModuleResponsePayloadRT)(response); }; const jobDefinitionRT = rt.type({ diff --git a/x-pack/plugins/infra/public/containers/ml/api/ml_setup_module_api.ts b/x-pack/plugins/infra/public/containers/ml/api/ml_setup_module_api.ts index 06b0e075387b0..36dced1bd2680 100644 --- a/x-pack/plugins/infra/public/containers/ml/api/ml_setup_module_api.ts +++ b/x-pack/plugins/infra/public/containers/ml/api/ml_setup_module_api.ts @@ -4,27 +4,38 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { identity } from 'fp-ts/lib/function'; -import { pipe } from 'fp-ts/lib/pipeable'; import * as rt from 'io-ts'; -import { npStart } from '../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getJobIdPrefix, jobCustomSettingsRT } from '../../../../common/infra_ml'; -import { createPlainError, throwErrors } from '../../../../common/runtime_types'; - -export const callSetupMlModuleAPI = async ( - moduleId: string, - start: number | undefined, - end: number | undefined, - spaceId: string, - sourceId: string, - indexPattern: string, - jobOverrides: SetupMlModuleJobOverrides[] = [], - datafeedOverrides: SetupMlModuleDatafeedOverrides[] = [], - query?: object -) => { - const response = await npStart.http.fetch(`/api/ml/modules/setup/${moduleId}`, { +import { decodeOrThrow } from '../../../../common/runtime_types'; + +interface RequestArgs { + moduleId: string; + start?: number; + end?: number; + spaceId: string; + sourceId: string; + indexPattern: string; + jobOverrides?: SetupMlModuleJobOverrides[]; + datafeedOverrides?: SetupMlModuleDatafeedOverrides[]; + query?: object; +} + +export const callSetupMlModuleAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => { + const { + moduleId, + start, + end, + spaceId, + sourceId, + indexPattern, + jobOverrides = [], + datafeedOverrides = [], + query, + } = requestArgs; + + const response = await fetch(`/api/ml/modules/setup/${moduleId}`, { method: 'POST', body: JSON.stringify( setupMlModuleRequestPayloadRT.encode({ @@ -40,10 +51,7 @@ export const callSetupMlModuleAPI = async ( ), }); - return pipe( - setupMlModuleResponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(setupMlModuleResponsePayloadRT)(response); }; const setupMlModuleTimeParamsRT = rt.partial({ diff --git a/x-pack/plugins/infra/public/containers/ml/infra_ml_capabilities.tsx b/x-pack/plugins/infra/public/containers/ml/infra_ml_capabilities.tsx index f4c90a459af6a..bc488a51e2aff 100644 --- a/x-pack/plugins/infra/public/containers/ml/infra_ml_capabilities.tsx +++ b/x-pack/plugins/infra/public/containers/ml/infra_ml_capabilities.tsx @@ -10,14 +10,15 @@ import { fold } from 'fp-ts/lib/Either'; import { pipe } from 'fp-ts/lib/pipeable'; import { identity } from 'fp-ts/lib/function'; import { useTrackedPromise } from '../../utils/use_tracked_promise'; -import { npStart } from '../../legacy_singletons'; import { getMlCapabilitiesResponsePayloadRT, GetMlCapabilitiesResponsePayload, } from './api/ml_api_types'; import { throwErrors, createPlainError } from '../../../common/runtime_types'; +import { useKibanaContextForPlugin } from '../../hooks/use_kibana'; export const useInfraMLCapabilities = () => { + const { services } = useKibanaContextForPlugin(); const [mlCapabilities, setMlCapabilities] = useState( initialMlCapabilities ); @@ -26,7 +27,7 @@ export const useInfraMLCapabilities = () => { { cancelPreviousOn: 'resolution', createPromise: async () => { - const rawResponse = await npStart.http.fetch('/api/ml/ml_capabilities'); + const rawResponse = await services.http.fetch('/api/ml/ml_capabilities'); return pipe( getMlCapabilitiesResponsePayloadRT.decode(rawResponse), diff --git a/x-pack/plugins/infra/public/containers/ml/infra_ml_cleanup.tsx b/x-pack/plugins/infra/public/containers/ml/infra_ml_cleanup.tsx index 736982c8043b1..871e61ecfe507 100644 --- a/x-pack/plugins/infra/public/containers/ml/infra_ml_cleanup.tsx +++ b/x-pack/plugins/infra/public/containers/ml/infra_ml_cleanup.tsx @@ -4,16 +4,18 @@ * you may not use this file except in compliance with the Elastic License. */ +import { HttpHandler } from 'src/core/public'; import { getJobId } from '../../../common/infra_ml'; import { callDeleteJobs, callGetJobDeletionTasks, callStopDatafeeds } from './api/ml_cleanup'; export const cleanUpJobsAndDatafeeds = async ( spaceId: string, sourceId: string, - jobTypes: JobType[] + jobTypes: JobType[], + fetch: HttpHandler ) => { try { - await callStopDatafeeds(spaceId, sourceId, jobTypes); + await callStopDatafeeds({ spaceId, sourceId, jobTypes }, fetch); } catch (err) { // Proceed only if datafeed has been deleted or didn't exist in the first place if (err?.res?.status !== 404) { @@ -21,27 +23,29 @@ export const cleanUpJobsAndDatafeeds = async ( } } - return await deleteJobs(spaceId, sourceId, jobTypes); + return await deleteJobs(spaceId, sourceId, jobTypes, fetch); }; const deleteJobs = async ( spaceId: string, sourceId: string, - jobTypes: JobType[] + jobTypes: JobType[], + fetch: HttpHandler ) => { - const deleteJobsResponse = await callDeleteJobs(spaceId, sourceId, jobTypes); - await waitUntilJobsAreDeleted(spaceId, sourceId, jobTypes); + const deleteJobsResponse = await callDeleteJobs({ spaceId, sourceId, jobTypes }, fetch); + await waitUntilJobsAreDeleted(spaceId, sourceId, jobTypes, fetch); return deleteJobsResponse; }; const waitUntilJobsAreDeleted = async ( spaceId: string, sourceId: string, - jobTypes: JobType[] + jobTypes: JobType[], + fetch: HttpHandler ) => { const moduleJobIds = jobTypes.map((jobType) => getJobId(spaceId, sourceId, jobType)); while (true) { - const { jobIds: jobIdsBeingDeleted } = await callGetJobDeletionTasks(); + const { jobIds: jobIdsBeingDeleted } = await callGetJobDeletionTasks(fetch); const needToWait = jobIdsBeingDeleted.some((jobId) => moduleJobIds.includes(jobId)); if (needToWait) { diff --git a/x-pack/plugins/infra/public/containers/ml/infra_ml_module.tsx b/x-pack/plugins/infra/public/containers/ml/infra_ml_module.tsx index 349541d108f5e..5408084a5246e 100644 --- a/x-pack/plugins/infra/public/containers/ml/infra_ml_module.tsx +++ b/x-pack/plugins/infra/public/containers/ml/infra_ml_module.tsx @@ -6,6 +6,7 @@ import { useCallback, useMemo } from 'react'; import { DatasetFilter } from '../../../common/infra_ml'; +import { useKibanaContextForPlugin } from '../../hooks/use_kibana'; import { useTrackedPromise } from '../../utils/use_tracked_promise'; import { useModuleStatus } from './infra_ml_module_status'; import { ModuleDescriptor, ModuleSourceConfiguration } from './infra_ml_module_types'; @@ -17,6 +18,7 @@ export const useInfraMLModule = ({ sourceConfiguration: ModuleSourceConfiguration; moduleDescriptor: ModuleDescriptor; }) => { + const { services } = useKibanaContextForPlugin(); const { spaceId, sourceId, timestampField } = sourceConfiguration; const [moduleStatus, dispatchModuleStatus] = useModuleStatus(moduleDescriptor.jobTypes); @@ -25,7 +27,7 @@ export const useInfraMLModule = ({ cancelPreviousOn: 'resolution', createPromise: async () => { dispatchModuleStatus({ type: 'fetchingJobStatuses' }); - return await moduleDescriptor.getJobSummary(spaceId, sourceId); + return await moduleDescriptor.getJobSummary(spaceId, sourceId, services.http.fetch); }, onResolve: (jobResponse) => { dispatchModuleStatus({ @@ -54,18 +56,25 @@ export const useInfraMLModule = ({ ) => { dispatchModuleStatus({ type: 'startedSetup' }); const setupResult = await moduleDescriptor.setUpModule( - start, - end, - datasetFilter, { - indices: selectedIndices, - sourceId, - spaceId, - timestampField, + start, + end, + datasetFilter, + moduleSourceConfiguration: { + indices: selectedIndices, + sourceId, + spaceId, + timestampField, + }, + partitionField, }, - partitionField + services.http.fetch + ); + const jobSummaries = await moduleDescriptor.getJobSummary( + spaceId, + sourceId, + services.http.fetch ); - const jobSummaries = await moduleDescriptor.getJobSummary(spaceId, sourceId); return { setupResult, jobSummaries }; }, onResolve: ({ setupResult: { datafeeds, jobs }, jobSummaries }) => { @@ -89,7 +98,7 @@ export const useInfraMLModule = ({ { cancelPreviousOn: 'resolution', createPromise: async () => { - return await moduleDescriptor.cleanUpModule(spaceId, sourceId); + return await moduleDescriptor.cleanUpModule(spaceId, sourceId, services.http.fetch); }, }, [spaceId, sourceId] diff --git a/x-pack/plugins/infra/public/containers/ml/infra_ml_module_definition.tsx b/x-pack/plugins/infra/public/containers/ml/infra_ml_module_definition.tsx index 3c7ffcfd4a4e2..a747a2853d1f7 100644 --- a/x-pack/plugins/infra/public/containers/ml/infra_ml_module_definition.tsx +++ b/x-pack/plugins/infra/public/containers/ml/infra_ml_module_definition.tsx @@ -6,6 +6,7 @@ import { useCallback, useMemo, useState } from 'react'; import { getJobId } from '../../../common/log_analysis'; +import { useKibanaContextForPlugin } from '../../hooks/use_kibana'; import { useTrackedPromise } from '../../utils/use_tracked_promise'; import { JobSummary } from './api/ml_get_jobs_summary_api'; import { GetMlModuleResponsePayload, JobDefinition } from './api/ml_get_module'; @@ -18,6 +19,7 @@ export const useInfraMLModuleDefinition = ({ sourceConfiguration: ModuleSourceConfiguration; moduleDescriptor: ModuleDescriptor; }) => { + const { services } = useKibanaContextForPlugin(); const [moduleDefinition, setModuleDefinition] = useState< GetMlModuleResponsePayload | undefined >(); @@ -40,7 +42,7 @@ export const useInfraMLModuleDefinition = ({ { cancelPreviousOn: 'resolution', createPromise: async () => { - return await moduleDescriptor.getModuleDefinition(); + return await moduleDescriptor.getModuleDefinition(services.http.fetch); }, onResolve: (response) => { setModuleDefinition(response); diff --git a/x-pack/plugins/infra/public/containers/ml/infra_ml_module_types.ts b/x-pack/plugins/infra/public/containers/ml/infra_ml_module_types.ts index e36f38add641a..976a64e8034bc 100644 --- a/x-pack/plugins/infra/public/containers/ml/infra_ml_module_types.ts +++ b/x-pack/plugins/infra/public/containers/ml/infra_ml_module_types.ts @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ - +import { HttpHandler } from 'src/core/public'; import { ValidateLogEntryDatasetsResponsePayload, ValidationIndicesResponsePayload, @@ -16,6 +16,14 @@ import { SetupMlModuleResponsePayload } from './api/ml_setup_module_api'; export { JobModelSizeStats, JobSummary } from './api/ml_get_jobs_summary_api'; +export interface SetUpModuleArgs { + start?: number | undefined; + end?: number | undefined; + datasetFilter?: DatasetFilter; + moduleSourceConfiguration: ModuleSourceConfiguration; + partitionField?: string; +} + export interface ModuleDescriptor { moduleId: string; moduleName: string; @@ -23,25 +31,32 @@ export interface ModuleDescriptor { jobTypes: JobType[]; bucketSpan: number; getJobIds: (spaceId: string, sourceId: string) => Record; - getJobSummary: (spaceId: string, sourceId: string) => Promise; - getModuleDefinition: () => Promise; + getJobSummary: ( + spaceId: string, + sourceId: string, + fetch: HttpHandler + ) => Promise; + getModuleDefinition: (fetch: HttpHandler) => Promise; setUpModule: ( - start: number | undefined, - end: number | undefined, - datasetFilter: DatasetFilter, - sourceConfiguration: ModuleSourceConfiguration, - partitionField?: string + setUpModuleArgs: SetUpModuleArgs, + fetch: HttpHandler ) => Promise; - cleanUpModule: (spaceId: string, sourceId: string) => Promise; + cleanUpModule: ( + spaceId: string, + sourceId: string, + fetch: HttpHandler + ) => Promise; validateSetupIndices?: ( indices: string[], - timestampField: string + timestampField: string, + fetch: HttpHandler ) => Promise; validateSetupDatasets?: ( indices: string[], timestampField: string, startTime: number, - endTime: number + endTime: number, + fetch: HttpHandler ) => Promise; } diff --git a/x-pack/plugins/infra/public/containers/ml/modules/metrics_hosts/module_descriptor.ts b/x-pack/plugins/infra/public/containers/ml/modules/metrics_hosts/module_descriptor.ts index 7ea87c3d21322..47230cbed977f 100644 --- a/x-pack/plugins/infra/public/containers/ml/modules/metrics_hosts/module_descriptor.ts +++ b/x-pack/plugins/infra/public/containers/ml/modules/metrics_hosts/module_descriptor.ts @@ -5,7 +5,8 @@ */ import { i18n } from '@kbn/i18n'; -import { ModuleDescriptor, ModuleSourceConfiguration } from '../../infra_ml_module_types'; +import { HttpHandler } from 'src/core/public'; +import { ModuleDescriptor, SetUpModuleArgs } from '../../infra_ml_module_types'; import { cleanUpJobsAndDatafeeds } from '../../infra_ml_cleanup'; import { callJobsSummaryAPI } from '../../api/ml_get_jobs_summary_api'; import { callGetMlModuleAPI } from '../../api/ml_get_module'; @@ -14,7 +15,6 @@ import { metricsHostsJobTypes, getJobId, MetricsHostsJobType, - DatasetFilter, bucketSpan, } from '../../../../../common/infra_ml'; // eslint-disable-next-line @kbn/eslint/no-restricted-paths @@ -48,24 +48,28 @@ const getJobIds = (spaceId: string, sourceId: string) => {} as Record ); -const getJobSummary = async (spaceId: string, sourceId: string) => { - const response = await callJobsSummaryAPI(spaceId, sourceId, metricsHostsJobTypes); +const getJobSummary = async (spaceId: string, sourceId: string, fetch: HttpHandler) => { + const response = await callJobsSummaryAPI( + { spaceId, sourceId, jobTypes: metricsHostsJobTypes }, + fetch + ); const jobIds = Object.values(getJobIds(spaceId, sourceId)); return response.filter((jobSummary) => jobIds.includes(jobSummary.id)); }; -const getModuleDefinition = async () => { - return await callGetMlModuleAPI(moduleId); +const getModuleDefinition = async (fetch: HttpHandler) => { + return await callGetMlModuleAPI(moduleId, fetch); }; -const setUpModule = async ( - start: number | undefined, - end: number | undefined, - datasetFilter: DatasetFilter, - { spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration, - partitionField?: string -) => { +const setUpModule = async (setUpModuleArgs: SetUpModuleArgs, fetch: HttpHandler) => { + const { + start, + end, + moduleSourceConfiguration: { spaceId, sourceId, indices, timestampField }, + partitionField, + } = setUpModuleArgs; + const indexNamePattern = indices.join(','); const jobIds: JobType[] = ['hosts_memory_usage', 'hosts_network_in', 'hosts_network_out']; @@ -128,14 +132,17 @@ const setUpModule = async ( }); return callSetupMlModuleAPI( - moduleId, - start, - end, - spaceId, - sourceId, - indexNamePattern, - jobOverrides, - datafeedOverrides + { + moduleId, + start, + end, + spaceId, + sourceId, + indexPattern: indexNamePattern, + jobOverrides, + datafeedOverrides, + }, + fetch ); }; @@ -159,8 +166,8 @@ const getDefaultJobConfigs = (jobId: JobType): { datafeed: any; job: any } => { } }; -const cleanUpModule = async (spaceId: string, sourceId: string) => { - return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsHostsJobTypes); +const cleanUpModule = async (spaceId: string, sourceId: string, fetch: HttpHandler) => { + return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsHostsJobTypes, fetch); }; export const metricHostsModule: ModuleDescriptor = { diff --git a/x-pack/plugins/infra/public/containers/ml/modules/metrics_k8s/module_descriptor.ts b/x-pack/plugins/infra/public/containers/ml/modules/metrics_k8s/module_descriptor.ts index eaf7489c84eb4..488803dc113b0 100644 --- a/x-pack/plugins/infra/public/containers/ml/modules/metrics_k8s/module_descriptor.ts +++ b/x-pack/plugins/infra/public/containers/ml/modules/metrics_k8s/module_descriptor.ts @@ -5,7 +5,8 @@ */ import { i18n } from '@kbn/i18n'; -import { ModuleDescriptor, ModuleSourceConfiguration } from '../../infra_ml_module_types'; +import { HttpHandler } from 'src/core/public'; +import { ModuleDescriptor, SetUpModuleArgs } from '../../infra_ml_module_types'; import { cleanUpJobsAndDatafeeds } from '../../infra_ml_cleanup'; import { callJobsSummaryAPI } from '../../api/ml_get_jobs_summary_api'; import { callGetMlModuleAPI } from '../../api/ml_get_module'; @@ -14,7 +15,6 @@ import { metricsK8SJobTypes, getJobId, MetricK8sJobType, - DatasetFilter, bucketSpan, } from '../../../../../common/infra_ml'; // eslint-disable-next-line @kbn/eslint/no-restricted-paths @@ -49,24 +49,28 @@ const getJobIds = (spaceId: string, sourceId: string) => {} as Record ); -const getJobSummary = async (spaceId: string, sourceId: string) => { - const response = await callJobsSummaryAPI(spaceId, sourceId, metricsK8SJobTypes); +const getJobSummary = async (spaceId: string, sourceId: string, fetch: HttpHandler) => { + const response = await callJobsSummaryAPI( + { spaceId, sourceId, jobTypes: metricsK8SJobTypes }, + fetch + ); const jobIds = Object.values(getJobIds(spaceId, sourceId)); return response.filter((jobSummary) => jobIds.includes(jobSummary.id)); }; -const getModuleDefinition = async () => { - return await callGetMlModuleAPI(moduleId); +const getModuleDefinition = async (fetch: HttpHandler) => { + return await callGetMlModuleAPI(moduleId, fetch); }; -const setUpModule = async ( - start: number | undefined, - end: number | undefined, - datasetFilter: DatasetFilter, - { spaceId, sourceId, indices, timestampField }: ModuleSourceConfiguration, - partitionField?: string -) => { +const setUpModule = async (setUpModuleArgs: SetUpModuleArgs, fetch: HttpHandler) => { + const { + start, + end, + moduleSourceConfiguration: { spaceId, sourceId, indices, timestampField }, + partitionField, + } = setUpModuleArgs; + const indexNamePattern = indices.join(','); const jobIds: JobType[] = ['k8s_memory_usage', 'k8s_network_in', 'k8s_network_out']; const jobOverrides = jobIds.map((id) => { @@ -133,14 +137,17 @@ const setUpModule = async ( }); return callSetupMlModuleAPI( - moduleId, - start, - end, - spaceId, - sourceId, - indexNamePattern, - jobOverrides, - datafeedOverrides + { + moduleId, + start, + end, + spaceId, + sourceId, + indexPattern: indexNamePattern, + jobOverrides, + datafeedOverrides, + }, + fetch ); }; @@ -164,8 +171,8 @@ const getDefaultJobConfigs = (jobId: JobType): { datafeed: any; job: any } => { } }; -const cleanUpModule = async (spaceId: string, sourceId: string) => { - return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsK8SJobTypes); +const cleanUpModule = async (spaceId: string, sourceId: string, fetch: HttpHandler) => { + return await cleanUpJobsAndDatafeeds(spaceId, sourceId, metricsK8SJobTypes, fetch); }; export const metricHostsModule: ModuleDescriptor = { diff --git a/x-pack/plugins/infra/public/pages/link_to/link_to_logs.test.tsx b/x-pack/plugins/infra/public/pages/link_to/link_to_logs.test.tsx index 945b299674aaa..4f83e37d7e029 100644 --- a/x-pack/plugins/infra/public/pages/link_to/link_to_logs.test.tsx +++ b/x-pack/plugins/infra/public/pages/link_to/link_to_logs.test.tsx @@ -14,7 +14,6 @@ import { createMemoryHistory } from 'history'; import React from 'react'; import { Route, Router, Switch } from 'react-router-dom'; import { httpServiceMock } from 'src/core/public/mocks'; -// import { HttpSetup } from 'src/core/public'; import { KibanaContextProvider } from 'src/plugins/kibana_react/public'; import { useLogSource } from '../../containers/logs/log_source'; import { diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_log_entry_category_datasets.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_log_entry_category_datasets.ts index a8cd7854efb6b..5f34d45635b60 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_log_entry_category_datasets.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_log_entry_category_datasets.ts @@ -4,24 +4,28 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getLogEntryCategoryDatasetsRequestPayloadRT, getLogEntryCategoryDatasetsSuccessReponsePayloadRT, LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH, } from '../../../../../common/http_api/log_analysis'; -import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; + +interface RequestArgs { + sourceId: string; + startTime: number; + endTime: number; +} export const callGetLogEntryCategoryDatasetsAPI = async ( - sourceId: string, - startTime: number, - endTime: number + requestArgs: RequestArgs, + fetch: HttpHandler ) => { - const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH, { + const { sourceId, startTime, endTime } = requestArgs; + + const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_DATASETS_PATH, { method: 'POST', body: JSON.stringify( getLogEntryCategoryDatasetsRequestPayloadRT.encode({ @@ -36,8 +40,5 @@ export const callGetLogEntryCategoryDatasetsAPI = async ( ), }); - return pipe( - getLogEntryCategoryDatasetsSuccessReponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getLogEntryCategoryDatasetsSuccessReponsePayloadRT)(response); }; diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_log_entry_category_examples.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_log_entry_category_examples.ts index a10d077a2dd4f..c4b756ebf5d58 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_log_entry_category_examples.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_log_entry_category_examples.ts @@ -4,26 +4,30 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getLogEntryCategoryExamplesRequestPayloadRT, getLogEntryCategoryExamplesSuccessReponsePayloadRT, LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_EXAMPLES_PATH, } from '../../../../../common/http_api/log_analysis'; -import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; + +interface RequestArgs { + sourceId: string; + startTime: number; + endTime: number; + categoryId: number; + exampleCount: number; +} export const callGetLogEntryCategoryExamplesAPI = async ( - sourceId: string, - startTime: number, - endTime: number, - categoryId: number, - exampleCount: number + requestArgs: RequestArgs, + fetch: HttpHandler ) => { - const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_EXAMPLES_PATH, { + const { sourceId, startTime, endTime, categoryId, exampleCount } = requestArgs; + + const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORY_EXAMPLES_PATH, { method: 'POST', body: JSON.stringify( getLogEntryCategoryExamplesRequestPayloadRT.encode({ @@ -40,8 +44,5 @@ export const callGetLogEntryCategoryExamplesAPI = async ( ), }); - return pipe( - getLogEntryCategoryExamplesSuccessReponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getLogEntryCategoryExamplesSuccessReponsePayloadRT)(response); }; diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_top_log_entry_categories.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_top_log_entry_categories.ts index 2ebcff4fd3ca5..fd53803796339 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_top_log_entry_categories.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/service_calls/get_top_log_entry_categories.ts @@ -4,28 +4,31 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getLogEntryCategoriesRequestPayloadRT, getLogEntryCategoriesSuccessReponsePayloadRT, LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH, } from '../../../../../common/http_api/log_analysis'; -import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; + +interface RequestArgs { + sourceId: string; + startTime: number; + endTime: number; + categoryCount: number; + datasets?: string[]; +} export const callGetTopLogEntryCategoriesAPI = async ( - sourceId: string, - startTime: number, - endTime: number, - categoryCount: number, - datasets?: string[] + requestArgs: RequestArgs, + fetch: HttpHandler ) => { + const { sourceId, startTime, endTime, categoryCount, datasets } = requestArgs; const intervalDuration = endTime - startTime; - const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH, { + const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_CATEGORIES_PATH, { method: 'POST', body: JSON.stringify( getLogEntryCategoriesRequestPayloadRT.encode({ @@ -60,8 +63,5 @@ export const callGetTopLogEntryCategoriesAPI = async ( ), }); - return pipe( - getLogEntryCategoriesSuccessReponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getLogEntryCategoriesSuccessReponsePayloadRT)(response); }; diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/use_log_entry_categories_results.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/use_log_entry_categories_results.ts index 123b188046b85..0a12c433db60a 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/use_log_entry_categories_results.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/use_log_entry_categories_results.ts @@ -13,6 +13,7 @@ import { import { useTrackedPromise, CanceledPromiseError } from '../../../utils/use_tracked_promise'; import { callGetTopLogEntryCategoriesAPI } from './service_calls/get_top_log_entry_categories'; import { callGetLogEntryCategoryDatasetsAPI } from './service_calls/get_log_entry_category_datasets'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; type TopLogEntryCategories = GetLogEntryCategoriesSuccessResponsePayload['data']['categories']; type LogEntryCategoryDatasets = GetLogEntryCategoryDatasetsSuccessResponsePayload['data']['datasets']; @@ -34,6 +35,7 @@ export const useLogEntryCategoriesResults = ({ sourceId: string; startTime: number; }) => { + const { services } = useKibanaContextForPlugin(); const [topLogEntryCategories, setTopLogEntryCategories] = useState([]); const [logEntryCategoryDatasets, setLogEntryCategoryDatasets] = useState< LogEntryCategoryDatasets @@ -44,11 +46,14 @@ export const useLogEntryCategoriesResults = ({ cancelPreviousOn: 'creation', createPromise: async () => { return await callGetTopLogEntryCategoriesAPI( - sourceId, - startTime, - endTime, - categoriesCount, - filteredDatasets + { + sourceId, + startTime, + endTime, + categoryCount: categoriesCount, + datasets: filteredDatasets, + }, + services.http.fetch ); }, onResolve: ({ data: { categories } }) => { @@ -71,7 +76,10 @@ export const useLogEntryCategoriesResults = ({ { cancelPreviousOn: 'creation', createPromise: async () => { - return await callGetLogEntryCategoryDatasetsAPI(sourceId, startTime, endTime); + return await callGetLogEntryCategoryDatasetsAPI( + { sourceId, startTime, endTime }, + services.http.fetch + ); }, onResolve: ({ data: { datasets } }) => { setLogEntryCategoryDatasets(datasets); diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/use_log_entry_category_examples.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/use_log_entry_category_examples.tsx index cdf3b642a8012..84b9f045288cc 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/use_log_entry_category_examples.tsx +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/use_log_entry_category_examples.tsx @@ -7,6 +7,7 @@ import { useMemo, useState } from 'react'; import { LogEntryCategoryExample } from '../../../../common/http_api'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { callGetLogEntryCategoryExamplesAPI } from './service_calls/get_log_entry_category_examples'; @@ -23,6 +24,8 @@ export const useLogEntryCategoryExamples = ({ sourceId: string; startTime: number; }) => { + const { services } = useKibanaContextForPlugin(); + const [logEntryCategoryExamples, setLogEntryCategoryExamples] = useState< LogEntryCategoryExample[] >([]); @@ -32,11 +35,14 @@ export const useLogEntryCategoryExamples = ({ cancelPreviousOn: 'creation', createPromise: async () => { return await callGetLogEntryCategoryExamplesAPI( - sourceId, - startTime, - endTime, - categoryId, - exampleCount + { + sourceId, + startTime, + endTime, + categoryId, + exampleCount, + }, + services.http.fetch ); }, onResolve: ({ data: { examples } }) => { diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies.ts index 21696df566ed9..7f90604bfefdd 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getLogEntryAnomaliesRequestPayloadRT, getLogEntryAnomaliesSuccessReponsePayloadRT, @@ -13,15 +13,18 @@ import { import { decodeOrThrow } from '../../../../../common/runtime_types'; import { Sort, Pagination } from '../../../../../common/http_api/log_analysis'; -export const callGetLogEntryAnomaliesAPI = async ( - sourceId: string, - startTime: number, - endTime: number, - sort: Sort, - pagination: Pagination, - datasets?: string[] -) => { - const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH, { +interface RequestArgs { + sourceId: string; + startTime: number; + endTime: number; + sort: Sort; + pagination: Pagination; + datasets?: string[]; +} + +export const callGetLogEntryAnomaliesAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => { + const { sourceId, startTime, endTime, sort, pagination, datasets } = requestArgs; + const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH, { method: 'POST', body: JSON.stringify( getLogEntryAnomaliesRequestPayloadRT.encode({ diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies_datasets.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies_datasets.ts index 24be5a646d103..c62bec691590c 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies_datasets.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies_datasets.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { decodeOrThrow } from '../../../../../common/runtime_types'; import { getLogEntryAnomaliesDatasetsRequestPayloadRT, @@ -12,12 +12,18 @@ import { LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH, } from '../../../../../common/http_api/log_analysis'; +interface RequestArgs { + sourceId: string; + startTime: number; + endTime: number; +} + export const callGetLogEntryAnomaliesDatasetsAPI = async ( - sourceId: string, - startTime: number, - endTime: number + requestArgs: RequestArgs, + fetch: HttpHandler ) => { - const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH, { + const { sourceId, startTime, endTime } = requestArgs; + const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH, { method: 'POST', body: JSON.stringify( getLogEntryAnomaliesDatasetsRequestPayloadRT.encode({ diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_examples.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_examples.ts index a125b53f9e635..ab724a2f435b2 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_examples.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_examples.ts @@ -4,27 +4,27 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getLogEntryExamplesRequestPayloadRT, getLogEntryExamplesSuccessReponsePayloadRT, LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH, } from '../../../../../common/http_api/log_analysis'; -import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; -export const callGetLogEntryExamplesAPI = async ( - sourceId: string, - startTime: number, - endTime: number, - dataset: string, - exampleCount: number, - categoryId?: string -) => { - const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH, { +interface RequestArgs { + sourceId: string; + startTime: number; + endTime: number; + dataset: string; + exampleCount: number; + categoryId?: string; +} + +export const callGetLogEntryExamplesAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => { + const { sourceId, startTime, endTime, dataset, exampleCount, categoryId } = requestArgs; + const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH, { method: 'POST', body: JSON.stringify( getLogEntryExamplesRequestPayloadRT.encode({ @@ -42,8 +42,5 @@ export const callGetLogEntryExamplesAPI = async ( ), }); - return pipe( - getLogEntryExamplesSuccessReponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getLogEntryExamplesSuccessReponsePayloadRT)(response); }; diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_rate.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_rate.ts index 77111d279309d..c9189bd803955 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_rate.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_rate.ts @@ -4,25 +4,25 @@ * you may not use this file except in compliance with the Elastic License. */ -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { npStart } from '../../../../legacy_singletons'; +import type { HttpHandler } from 'src/core/public'; import { getLogEntryRateRequestPayloadRT, getLogEntryRateSuccessReponsePayloadRT, LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH, } from '../../../../../common/http_api/log_analysis'; -import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; -export const callGetLogEntryRateAPI = async ( - sourceId: string, - startTime: number, - endTime: number, - bucketDuration: number, - datasets?: string[] -) => { - const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH, { +interface RequestArgs { + sourceId: string; + startTime: number; + endTime: number; + bucketDuration: number; + datasets?: string[]; +} + +export const callGetLogEntryRateAPI = async (requestArgs: RequestArgs, fetch: HttpHandler) => { + const { sourceId, startTime, endTime, bucketDuration, datasets } = requestArgs; + const response = await fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH, { method: 'POST', body: JSON.stringify( getLogEntryRateRequestPayloadRT.encode({ @@ -38,8 +38,5 @@ export const callGetLogEntryRateAPI = async ( }) ), }); - return pipe( - getLogEntryRateSuccessReponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); + return decodeOrThrow(getLogEntryRateSuccessReponsePayloadRT)(response); }; diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_anomalies_results.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_anomalies_results.ts index 52632e54390a9..37c99272f0872 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_anomalies_results.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_anomalies_results.ts @@ -16,6 +16,7 @@ import { GetLogEntryAnomaliesDatasetsSuccessResponsePayload, LogEntryAnomaly, } from '../../../../common/http_api/log_analysis'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; export type SortOptions = Sort; export type PaginationOptions = Pick; @@ -161,6 +162,8 @@ export const useLogEntryAnomaliesResults = ({ }; }; + const { services } = useKibanaContextForPlugin(); + const [reducerState, dispatch] = useReducer(stateReducer, STATE_DEFAULTS, initStateReducer); const [logEntryAnomalies, setLogEntryAnomalies] = useState([]); @@ -177,15 +180,18 @@ export const useLogEntryAnomaliesResults = ({ filteredDatasets: queryFilteredDatasets, } = reducerState; return await callGetLogEntryAnomaliesAPI( - sourceId, - queryStartTime, - queryEndTime, - sortOptions, { - ...paginationOptions, - cursor: paginationCursor, + sourceId, + startTime: queryStartTime, + endTime: queryEndTime, + sort: sortOptions, + pagination: { + ...paginationOptions, + cursor: paginationCursor, + }, + datasets: queryFilteredDatasets, }, - queryFilteredDatasets + services.http.fetch ); }, onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => { @@ -286,7 +292,10 @@ export const useLogEntryAnomaliesResults = ({ { cancelPreviousOn: 'creation', createPromise: async () => { - return await callGetLogEntryAnomaliesDatasetsAPI(sourceId, startTime, endTime); + return await callGetLogEntryAnomaliesDatasetsAPI( + { sourceId, startTime, endTime }, + services.http.fetch + ); }, onResolve: ({ data: { datasets } }) => { setLogEntryAnomaliesDatasets(datasets); diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_examples.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_examples.ts index fae5bd200a415..e809ab9cd5a6f 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_examples.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_examples.ts @@ -7,6 +7,7 @@ import { useMemo, useState } from 'react'; import { LogEntryExample } from '../../../../common/http_api'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { callGetLogEntryExamplesAPI } from './service_calls/get_log_entry_examples'; @@ -25,6 +26,7 @@ export const useLogEntryExamples = ({ startTime: number; categoryId?: string; }) => { + const { services } = useKibanaContextForPlugin(); const [logEntryExamples, setLogEntryExamples] = useState([]); const [getLogEntryExamplesRequest, getLogEntryExamples] = useTrackedPromise( @@ -32,12 +34,15 @@ export const useLogEntryExamples = ({ cancelPreviousOn: 'creation', createPromise: async () => { return await callGetLogEntryExamplesAPI( - sourceId, - startTime, - endTime, - dataset, - exampleCount, - categoryId + { + sourceId, + startTime, + endTime, + dataset, + exampleCount, + categoryId, + }, + services.http.fetch ); }, onResolve: ({ data: { examples } }) => { diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_rate_results.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_rate_results.ts index a52dab58cb018..aef94afa505f1 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_rate_results.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_rate_results.ts @@ -12,6 +12,7 @@ import { LogEntryRatePartition, LogEntryRateAnomaly, } from '../../../../common/http_api/log_analysis'; +import { useKibanaContextForPlugin } from '../../../hooks/use_kibana'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; import { callGetLogEntryRateAPI } from './service_calls/get_log_entry_rate'; @@ -49,6 +50,7 @@ export const useLogEntryRateResults = ({ bucketDuration: number; filteredDatasets?: string[]; }) => { + const { services } = useKibanaContextForPlugin(); const [logEntryRate, setLogEntryRate] = useState(null); const [getLogEntryRateRequest, getLogEntryRate] = useTrackedPromise( @@ -56,11 +58,14 @@ export const useLogEntryRateResults = ({ cancelPreviousOn: 'resolution', createPromise: async () => { return await callGetLogEntryRateAPI( - sourceId, - startTime, - endTime, - bucketDuration, - filteredDatasets + { + sourceId, + startTime, + endTime, + bucketDuration, + datasets: filteredDatasets, + }, + services.http.fetch ); }, onResolve: ({ data }) => { diff --git a/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_hosts_anomalies.ts b/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_hosts_anomalies.ts index f33e3ea16b389..02170f41a32ca 100644 --- a/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_hosts_anomalies.ts +++ b/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_hosts_anomalies.ts @@ -5,6 +5,7 @@ */ import { useMemo, useState, useCallback, useEffect, useReducer } from 'react'; +import { HttpHandler } from 'src/core/public'; import { INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH, Metric, @@ -16,8 +17,8 @@ import { getMetricsHostsAnomaliesSuccessReponsePayloadRT, } from '../../../../../common/http_api/infra_ml'; import { useTrackedPromise } from '../../../../utils/use_tracked_promise'; -import { npStart } from '../../../../legacy_singletons'; import { decodeOrThrow } from '../../../../../common/runtime_types'; +import { useKibanaContextForPlugin } from '../../../../hooks/use_kibana'; export type SortOptions = Sort; export type PaginationOptions = Pick; @@ -149,6 +150,7 @@ export const useMetricsHostsAnomaliesResults = ({ onGetMetricsHostsAnomaliesDatasetsError?: (error: Error) => void; filteredDatasets?: string[]; }) => { + const { services } = useKibanaContextForPlugin(); const initStateReducer = (stateDefaults: ReducerStateDefaults): ReducerState => { return { ...stateDefaults, @@ -177,15 +179,18 @@ export const useMetricsHostsAnomaliesResults = ({ paginationCursor, } = reducerState; return await callGetMetricHostsAnomaliesAPI( - sourceId, - queryStartTime, - queryEndTime, - metric, - sortOptions, { - ...paginationOptions, - cursor: paginationCursor, - } + sourceId, + startTime: queryStartTime, + endTime: queryEndTime, + metric, + sort: sortOptions, + pagination: { + ...paginationOptions, + cursor: paginationCursor, + }, + }, + services.http.fetch ); }, onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => { @@ -288,15 +293,21 @@ export const useMetricsHostsAnomaliesResults = ({ }; }; +interface RequestArgs { + sourceId: string; + startTime: number; + endTime: number; + metric: Metric; + sort: Sort; + pagination: Pagination; +} + export const callGetMetricHostsAnomaliesAPI = async ( - sourceId: string, - startTime: number, - endTime: number, - metric: Metric, - sort: Sort, - pagination: Pagination + requestArgs: RequestArgs, + fetch: HttpHandler ) => { - const response = await npStart.http.fetch(INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH, { + const { sourceId, startTime, endTime, metric, sort, pagination } = requestArgs; + const response = await fetch(INFA_ML_GET_METRICS_HOSTS_ANOMALIES_PATH, { method: 'POST', body: JSON.stringify( getMetricsHostsAnomaliesRequestPayloadRT.encode({ diff --git a/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_k8s_anomalies.ts b/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_k8s_anomalies.ts index 89e70c4c5c4c7..951951b9b6106 100644 --- a/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_k8s_anomalies.ts +++ b/x-pack/plugins/infra/public/pages/metrics/inventory_view/hooks/use_metrics_k8s_anomalies.ts @@ -5,6 +5,7 @@ */ import { useMemo, useState, useCallback, useEffect, useReducer } from 'react'; +import { HttpHandler } from 'src/core/public'; import { Sort, Pagination, @@ -16,8 +17,8 @@ import { Metric, } from '../../../../../common/http_api/infra_ml'; import { useTrackedPromise } from '../../../../utils/use_tracked_promise'; -import { npStart } from '../../../../legacy_singletons'; import { decodeOrThrow } from '../../../../../common/runtime_types'; +import { useKibanaContextForPlugin } from '../../../../hooks/use_kibana'; export type SortOptions = Sort; export type PaginationOptions = Pick; @@ -149,6 +150,7 @@ export const useMetricsK8sAnomaliesResults = ({ onGetMetricsHostsAnomaliesDatasetsError?: (error: Error) => void; filteredDatasets?: string[]; }) => { + const { services } = useKibanaContextForPlugin(); const initStateReducer = (stateDefaults: ReducerStateDefaults): ReducerState => { return { ...stateDefaults, @@ -178,16 +180,19 @@ export const useMetricsK8sAnomaliesResults = ({ filteredDatasets: queryFilteredDatasets, } = reducerState; return await callGetMetricsK8sAnomaliesAPI( - sourceId, - queryStartTime, - queryEndTime, - metric, - sortOptions, { - ...paginationOptions, - cursor: paginationCursor, + sourceId, + startTime: queryStartTime, + endTime: queryEndTime, + metric, + sort: sortOptions, + pagination: { + ...paginationOptions, + cursor: paginationCursor, + }, + datasets: queryFilteredDatasets, }, - queryFilteredDatasets + services.http.fetch ); }, onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => { @@ -290,16 +295,22 @@ export const useMetricsK8sAnomaliesResults = ({ }; }; +interface RequestArgs { + sourceId: string; + startTime: number; + endTime: number; + metric: Metric; + sort: Sort; + pagination: Pagination; + datasets?: string[]; +} + export const callGetMetricsK8sAnomaliesAPI = async ( - sourceId: string, - startTime: number, - endTime: number, - metric: Metric, - sort: Sort, - pagination: Pagination, - datasets?: string[] + requestArgs: RequestArgs, + fetch: HttpHandler ) => { - const response = await npStart.http.fetch(INFA_ML_GET_METRICS_K8S_ANOMALIES_PATH, { + const { sourceId, startTime, endTime, metric, sort, pagination, datasets } = requestArgs; + const response = await fetch(INFA_ML_GET_METRICS_K8S_ANOMALIES_PATH, { method: 'POST', body: JSON.stringify( getMetricsK8sAnomaliesRequestPayloadRT.encode({ diff --git a/x-pack/plugins/infra/public/plugin.ts b/x-pack/plugins/infra/public/plugin.ts index 3c6b1a14cfd47..0e49ca93010fd 100644 --- a/x-pack/plugins/infra/public/plugin.ts +++ b/x-pack/plugins/infra/public/plugin.ts @@ -9,7 +9,6 @@ import { DEFAULT_APP_CATEGORIES } from '../../../../src/core/public'; import { createMetricThresholdAlertType } from './alerting/metric_threshold'; import { createInventoryMetricAlertType } from './alerting/inventory'; import { getAlertType as getLogsAlertType } from './alerting/log_threshold'; -import { registerStartSingleton } from './legacy_singletons'; import { registerFeatures } from './register_feature'; import { InfraClientSetupDeps, @@ -98,9 +97,7 @@ export class Plugin implements InfraClientPluginClass { }); } - start(core: InfraClientCoreStart, _plugins: InfraClientStartDeps) { - registerStartSingleton(core); - } + start(_core: InfraClientCoreStart, _plugins: InfraClientStartDeps) {} stop() {} } diff --git a/x-pack/plugins/ingest_manager/common/constants/agent.ts b/x-pack/plugins/ingest_manager/common/constants/agent.ts index 82d2ad712ef02..30b8a6b740609 100644 --- a/x-pack/plugins/ingest_manager/common/constants/agent.ts +++ b/x-pack/plugins/ingest_manager/common/constants/agent.ts @@ -13,10 +13,12 @@ export const AGENT_TYPE_EPHEMERAL = 'EPHEMERAL'; export const AGENT_TYPE_TEMPORARY = 'TEMPORARY'; export const AGENT_POLLING_REQUEST_TIMEOUT_MS = 300000; // 5 minutes +export const AGENT_POLLING_REQUEST_TIMEOUT_MARGIN_MS = 20000; // 20s + export const AGENT_POLLING_THRESHOLD_MS = 30000; export const AGENT_POLLING_INTERVAL = 1000; export const AGENT_UPDATE_LAST_CHECKIN_INTERVAL_MS = 30000; export const AGENT_UPDATE_ACTIONS_INTERVAL_MS = 5000; -export const AGENT_POLICY_ROLLOUT_RATE_LIMIT_INTERVAL_MS = 5000; -export const AGENT_POLICY_ROLLOUT_RATE_LIMIT_REQUEST_PER_INTERVAL = 25; +export const AGENT_POLICY_ROLLOUT_RATE_LIMIT_INTERVAL_MS = 1000; +export const AGENT_POLICY_ROLLOUT_RATE_LIMIT_REQUEST_PER_INTERVAL = 5; diff --git a/x-pack/plugins/ingest_manager/server/constants/index.ts b/x-pack/plugins/ingest_manager/server/constants/index.ts index 3965e27da0542..c69ee7e4b6092 100644 --- a/x-pack/plugins/ingest_manager/server/constants/index.ts +++ b/x-pack/plugins/ingest_manager/server/constants/index.ts @@ -8,6 +8,7 @@ export { AGENT_TYPE_EPHEMERAL, AGENT_TYPE_TEMPORARY, AGENT_POLLING_THRESHOLD_MS, + AGENT_POLLING_REQUEST_TIMEOUT_MARGIN_MS, AGENT_POLLING_INTERVAL, AGENT_UPDATE_LAST_CHECKIN_INTERVAL_MS, AGENT_POLICY_ROLLOUT_RATE_LIMIT_REQUEST_PER_INTERVAL, diff --git a/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.test.ts b/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.test.ts index 5e84e3a50bb44..2909899418ec2 100644 --- a/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.test.ts +++ b/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.test.ts @@ -15,9 +15,13 @@ describe('createRateLimiter', () => { scheduler.run(({ expectObservable, cold }) => { const source = cold('a-b-c-d-e-f|'); - const rateLimiter = createRateLimiter(10, 1, 2, scheduler); + const intervalMs = 10; + const perInterval = 1; + const maxDelayMs = 50; + const rateLimiter = createRateLimiter(intervalMs, perInterval, maxDelayMs, scheduler); const obs = source.pipe(rateLimiter()); - const results = 'a 9ms b 9ms c 9ms d 9ms e 9ms (f|)'; + // f should be dropped because of maxDelay + const results = 'a 9ms b 9ms c 9ms d 9ms (e|)'; expectObservable(obs).toBe(results); }); }); diff --git a/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.ts b/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.ts index 3bbfbbd4ec1bf..bbdaa9975eeac 100644 --- a/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.ts +++ b/x-pack/plugins/ingest_manager/server/services/agents/checkin/rxjs_utils.ts @@ -54,6 +54,8 @@ export function createRateLimiter( let countInCurrentInterval = 0; function createRateLimitOperator(): Rx.OperatorFunction { + const maxIntervalEnd = scheduler.now() + maxDelay; + return Rx.pipe( concatMap(function rateLimit(value: T) { const now = scheduler.now(); @@ -61,9 +63,9 @@ export function createRateLimiter( countInCurrentInterval = 1; intervalEnd = now + ratelimitIntervalMs; return Rx.of(value); - } else if (intervalEnd >= now + maxDelay) { - // re-rate limit in the future to avoid to schedule too far in the future as some observer can unsubscribe - return Rx.of(value).pipe(delay(maxDelay, scheduler), createRateLimitOperator()); + } else if (intervalEnd >= maxIntervalEnd) { + // drop the value as it's never going to success as long polling timeout is going to happen before we can send the policy + return Rx.EMPTY; } else { if (++countInCurrentInterval > ratelimitRequestPerInterval) { countInCurrentInterval = 1; diff --git a/x-pack/plugins/ingest_manager/server/services/agents/checkin/state_new_actions.ts b/x-pack/plugins/ingest_manager/server/services/agents/checkin/state_new_actions.ts index 51ccdc8eb1c7c..8ae151577fefa 100644 --- a/x-pack/plugins/ingest_manager/server/services/agents/checkin/state_new_actions.ts +++ b/x-pack/plugins/ingest_manager/server/services/agents/checkin/state_new_actions.ts @@ -27,6 +27,7 @@ import * as APIKeysService from '../../api_keys'; import { AGENT_SAVED_OBJECT_TYPE, AGENT_UPDATE_ACTIONS_INTERVAL_MS, + AGENT_POLLING_REQUEST_TIMEOUT_MARGIN_MS, AGENT_POLICY_ROLLOUT_RATE_LIMIT_INTERVAL_MS, AGENT_POLICY_ROLLOUT_RATE_LIMIT_REQUEST_PER_INTERVAL, } from '../../../constants'; @@ -38,8 +39,6 @@ import { import { appContextService } from '../../app_context'; import { toPromiseAbortable, AbortError, createRateLimiter } from './rxjs_utils'; -const RATE_LIMIT_MAX_DELAY_MS = 5 * 60 * 1000; // 5 minutes - function getInternalUserSOClient() { const fakeRequest = ({ headers: {}, @@ -166,19 +165,29 @@ export async function createAgentActionFromPolicyAction( return [newAgentAction]; } +function getPollingTimeoutMs() { + const pollingTimeoutMs = appContextService.getConfig()?.fleet.pollingRequestTimeout ?? 0; + // Set a timeout 20s before the real timeout to have a chance to respond an empty response before socket timeout + return Math.max( + pollingTimeoutMs - AGENT_POLLING_REQUEST_TIMEOUT_MARGIN_MS, + AGENT_POLLING_REQUEST_TIMEOUT_MARGIN_MS + ); +} + export function agentCheckinStateNewActionsFactory() { // Shared Observables const agentPolicies$ = new Map>(); const newActions$ = createNewActionsSharedObservable(); // Rx operators - const pollingTimeoutMs = appContextService.getConfig()?.fleet.pollingRequestTimeout ?? 0; + const pollingTimeoutMs = getPollingTimeoutMs(); + const rateLimiterIntervalMs = appContextService.getConfig()?.fleet.agentPolicyRolloutRateLimitIntervalMs ?? AGENT_POLICY_ROLLOUT_RATE_LIMIT_INTERVAL_MS; const rateLimiterRequestPerInterval = appContextService.getConfig()?.fleet.agentPolicyRolloutRateLimitRequestPerInterval ?? AGENT_POLICY_ROLLOUT_RATE_LIMIT_REQUEST_PER_INTERVAL; - const rateLimiterMaxDelay = Math.min(RATE_LIMIT_MAX_DELAY_MS, pollingTimeoutMs); + const rateLimiterMaxDelay = pollingTimeoutMs; const rateLimiter = createRateLimiter( rateLimiterIntervalMs, @@ -204,10 +213,7 @@ export function agentCheckinStateNewActionsFactory() { } const stream$ = agentPolicy$.pipe( - timeout( - // Set a timeout 3s before the real timeout to have a chance to respond an empty response before socket timeout - Math.max(pollingTimeoutMs - 3000, 3000) - ), + timeout(pollingTimeoutMs), filter( (action) => agent.policy_id !== undefined && diff --git a/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/template/template.test.ts b/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/template/template.test.ts index 99e568bf771f8..cc1aa79c7491c 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/template/template.test.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/template/template.test.ts @@ -212,6 +212,37 @@ test('tests processing keyword field with multi fields with analyzed text field' expect(mappings).toEqual(keywordWithAnalyzedMultiFieldsMapping); }); +test('tests processing keyword field with multi fields with normalized keyword field', () => { + const keywordWithNormalizedMultiFieldsLiteralYml = ` + - name: keywordWithNormalizedMultiField + type: keyword + multi_fields: + - name: normalized + type: keyword + normalizer: lowercase + `; + + const keywordWithNormalizedMultiFieldsMapping = { + properties: { + keywordWithNormalizedMultiField: { + ignore_above: 1024, + type: 'keyword', + fields: { + normalized: { + type: 'keyword', + ignore_above: 1024, + normalizer: 'lowercase', + }, + }, + }, + }, + }; + const fields: Field[] = safeLoad(keywordWithNormalizedMultiFieldsLiteralYml); + const processedFields = processFields(fields); + const mappings = generateMappings(processedFields); + expect(mappings).toEqual(keywordWithNormalizedMultiFieldsMapping); +}); + test('tests processing object field with no other attributes', () => { const objectFieldLiteralYml = ` - name: objectField diff --git a/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/template/template.ts b/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/template/template.ts index 00c2e873ba129..e0fea59107c26 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/template/template.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/template/template.ts @@ -189,6 +189,9 @@ function generateKeywordMapping(field: Field): IndexTemplateMapping { if (field.ignore_above) { mapping.ignore_above = field.ignore_above; } + if (field.normalizer) { + mapping.normalizer = field.normalizer; + } return mapping; } diff --git a/x-pack/plugins/ingest_manager/server/services/epm/fields/field.ts b/x-pack/plugins/ingest_manager/server/services/epm/fields/field.ts index a44e5e4221f9f..5913302e77ba6 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/fields/field.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/fields/field.ts @@ -20,6 +20,7 @@ export interface Field { index?: boolean; required?: boolean; multi_fields?: Fields; + normalizer?: string; doc_values?: boolean; copy_to?: string; analyzer?: string; diff --git a/x-pack/plugins/lens/public/app_plugin/lens_top_nav.tsx b/x-pack/plugins/lens/public/app_plugin/lens_top_nav.tsx index f6234d063d8cd..9162af52052ee 100644 --- a/x-pack/plugins/lens/public/app_plugin/lens_top_nav.tsx +++ b/x-pack/plugins/lens/public/app_plugin/lens_top_nav.tsx @@ -30,24 +30,22 @@ export function getLensTopNavConfig(options: { defaultMessage: 'Save', }); - if (showSaveAndReturn) { + if (showCancel) { topNavMenu.push({ - label: i18n.translate('xpack.lens.app.saveAndReturn', { - defaultMessage: 'Save and return', + label: i18n.translate('xpack.lens.app.cancel', { + defaultMessage: 'cancel', }), - emphasize: true, - iconType: 'check', - run: actions.saveAndReturn, - testId: 'lnsApp_saveAndReturnButton', - disableButton: !savingPermitted, - description: i18n.translate('xpack.lens.app.saveAndReturnButtonAriaLabel', { - defaultMessage: 'Save the current lens visualization and return to the last app', + run: actions.cancel, + testId: 'lnsApp_cancelButton', + description: i18n.translate('xpack.lens.app.cancelButtonAriaLabel', { + defaultMessage: 'Return to the last app without saving changes', }), }); } topNavMenu.push({ label: saveButtonLabel, + iconType: !showSaveAndReturn ? 'save' : undefined, emphasize: !showSaveAndReturn, run: actions.showSaveModal, testId: 'lnsApp_saveButton', @@ -57,17 +55,21 @@ export function getLensTopNavConfig(options: { disableButton: !savingPermitted, }); - if (showCancel) { + if (showSaveAndReturn) { topNavMenu.push({ - label: i18n.translate('xpack.lens.app.cancel', { - defaultMessage: 'cancel', + label: i18n.translate('xpack.lens.app.saveAndReturn', { + defaultMessage: 'Save and return', }), - run: actions.cancel, - testId: 'lnsApp_cancelButton', - description: i18n.translate('xpack.lens.app.cancelButtonAriaLabel', { - defaultMessage: 'Return to the last app without saving changes', + emphasize: true, + iconType: 'checkInCircleFilled', + run: actions.saveAndReturn, + testId: 'lnsApp_saveAndReturnButton', + disableButton: !savingPermitted, + description: i18n.translate('xpack.lens.app.saveAndReturnButtonAriaLabel', { + defaultMessage: 'Save the current lens visualization and return to the last app', }), }); } + return topNavMenu; } diff --git a/x-pack/plugins/lens/public/async_services.ts b/x-pack/plugins/lens/public/async_services.ts index 5a88b47c0e894..09b9233197d2f 100644 --- a/x-pack/plugins/lens/public/async_services.ts +++ b/x-pack/plugins/lens/public/async_services.ts @@ -21,4 +21,5 @@ export * from './xy_visualization/xy_visualization'; export * from './indexpattern_datasource/indexpattern'; export * from './editor_frame_service/editor_frame'; +export * from './editor_frame_service/embeddable'; export * from './app_plugin/mounter'; diff --git a/x-pack/plugins/lens/public/editor_frame_service/editor_frame/index.ts b/x-pack/plugins/lens/public/editor_frame_service/editor_frame/index.ts index 41558caafc64c..04d4bc9c25de5 100644 --- a/x-pack/plugins/lens/public/editor_frame_service/editor_frame/index.ts +++ b/x-pack/plugins/lens/public/editor_frame_service/editor_frame/index.ts @@ -5,3 +5,5 @@ */ export * from './editor_frame'; +export * from './state_helpers'; +export * from './state_management'; diff --git a/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable.test.tsx b/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable.test.tsx index d48f9ed713caf..151f85e817c70 100644 --- a/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable.test.tsx +++ b/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable.test.tsx @@ -26,7 +26,6 @@ import { VIS_EVENT_TO_TRIGGER } from '../../../../../../src/plugins/visualizatio import { coreMock, httpServiceMock } from '../../../../../../src/core/public/mocks'; import { IBasePath } from '../../../../../../src/core/public'; import { AttributeService } from '../../../../../../src/plugins/dashboard/public'; -import { Ast } from '@kbn/interpreter/common'; import { LensAttributeService } from '../../lens_attribute_service'; jest.mock('../../../../../../src/plugins/inspector/public/', () => ({ @@ -103,8 +102,14 @@ describe('embeddable', () => { indexPatternService: {} as IndexPatternsContract, editable: true, getTrigger, - documentToExpression: () => Promise.resolve({} as Ast), - toExpressionString: () => 'my | expression', + documentToExpression: () => + Promise.resolve({ + type: 'expression', + chain: [ + { type: 'function', function: 'my', arguments: {} }, + { type: 'function', function: 'expression', arguments: {} }, + ], + }), }, {} as LensEmbeddableInput ); @@ -112,7 +117,8 @@ describe('embeddable', () => { embeddable.render(mountpoint); expect(expressionRenderer).toHaveBeenCalledTimes(1); - expect(expressionRenderer.mock.calls[0][0]!.expression).toEqual('my | expression'); + expect(expressionRenderer.mock.calls[0][0]!.expression).toEqual(`my +| expression`); }); it('should re-render if new input is pushed', async () => { @@ -129,8 +135,14 @@ describe('embeddable', () => { indexPatternService: {} as IndexPatternsContract, editable: true, getTrigger, - documentToExpression: () => Promise.resolve({} as Ast), - toExpressionString: () => 'my | expression', + documentToExpression: () => + Promise.resolve({ + type: 'expression', + chain: [ + { type: 'function', function: 'my', arguments: {} }, + { type: 'function', function: 'expression', arguments: {} }, + ], + }), }, { id: '123' } as LensEmbeddableInput ); @@ -162,8 +174,14 @@ describe('embeddable', () => { indexPatternService: {} as IndexPatternsContract, editable: true, getTrigger, - documentToExpression: () => Promise.resolve({} as Ast), - toExpressionString: () => 'my | expression', + documentToExpression: () => + Promise.resolve({ + type: 'expression', + chain: [ + { type: 'function', function: 'my', arguments: {} }, + { type: 'function', function: 'expression', arguments: {} }, + ], + }), }, input ); @@ -208,8 +226,14 @@ describe('embeddable', () => { indexPatternService: {} as IndexPatternsContract, editable: true, getTrigger, - documentToExpression: () => Promise.resolve({} as Ast), - toExpressionString: () => 'my | expression', + documentToExpression: () => + Promise.resolve({ + type: 'expression', + chain: [ + { type: 'function', function: 'my', arguments: {} }, + { type: 'function', function: 'expression', arguments: {} }, + ], + }), }, input ); @@ -237,8 +261,14 @@ describe('embeddable', () => { indexPatternService: {} as IndexPatternsContract, editable: true, getTrigger, - documentToExpression: () => Promise.resolve({} as Ast), - toExpressionString: () => 'my | expression', + documentToExpression: () => + Promise.resolve({ + type: 'expression', + chain: [ + { type: 'function', function: 'my', arguments: {} }, + { type: 'function', function: 'expression', arguments: {} }, + ], + }), }, { id: '123' } as LensEmbeddableInput ); @@ -270,8 +300,14 @@ describe('embeddable', () => { indexPatternService: {} as IndexPatternsContract, editable: true, getTrigger, - documentToExpression: () => Promise.resolve({} as Ast), - toExpressionString: () => 'my | expression', + documentToExpression: () => + Promise.resolve({ + type: 'expression', + chain: [ + { type: 'function', function: 'my', arguments: {} }, + { type: 'function', function: 'expression', arguments: {} }, + ], + }), }, { id: '123', timeRange, query, filters } as LensEmbeddableInput ); @@ -311,8 +347,14 @@ describe('embeddable', () => { indexPatternService: {} as IndexPatternsContract, editable: true, getTrigger, - documentToExpression: () => Promise.resolve({} as Ast), - toExpressionString: () => 'my | expression', + documentToExpression: () => + Promise.resolve({ + type: 'expression', + chain: [ + { type: 'function', function: 'my', arguments: {} }, + { type: 'function', function: 'expression', arguments: {} }, + ], + }), }, { id: '123', timeRange, query, filters } as LensEmbeddableInput ); diff --git a/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable.tsx b/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable.tsx index 16b19ca0af849..1297c1da6e1b6 100644 --- a/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable.tsx +++ b/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable.tsx @@ -18,7 +18,7 @@ import { import { ExecutionContextSearch } from 'src/plugins/expressions'; import { Subscription } from 'rxjs'; -import { Ast } from '@kbn/interpreter/common'; +import { toExpression, Ast } from '@kbn/interpreter/common'; import { ExpressionRendererEvent, ReactExpressionRendererType, @@ -59,7 +59,6 @@ export interface LensEmbeddableOutput extends EmbeddableOutput { export interface LensEmbeddableDeps { attributeService: LensAttributeService; documentToExpression: (doc: Document) => Promise; - toExpressionString: (astObj: Ast, type?: string) => string; editable: boolean; indexPatternService: IndexPatternsContract; expressionRenderer: ReactExpressionRendererType; @@ -135,7 +134,7 @@ export class Embeddable savedObjectId: (input as LensByReferenceInput)?.savedObjectId, }; const expression = await this.deps.documentToExpression(this.savedVis); - this.expression = expression ? this.deps.toExpressionString(expression) : null; + this.expression = expression ? toExpression(expression) : null; await this.initializeOutput(); this.isInitialized = true; if (this.domNode) { diff --git a/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable_factory.ts b/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable_factory.ts index 8771d1ebaddb1..35d120e5c4f45 100644 --- a/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable_factory.ts +++ b/x-pack/plugins/lens/public/editor_frame_service/embeddable/embeddable_factory.ts @@ -7,7 +7,7 @@ import { Capabilities, HttpSetup } from 'kibana/public'; import { i18n } from '@kbn/i18n'; import { RecursiveReadonly } from '@kbn/utility-types'; -import { toExpression, Ast } from '@kbn/interpreter/target/common'; +import { Ast } from '@kbn/interpreter/target/common'; import { IndexPatternsContract, TimefilterContract, @@ -17,7 +17,7 @@ import { EmbeddableFactoryDefinition, IContainer, } from '../../../../../../src/plugins/embeddable/public'; -import { Embeddable, LensByReferenceInput, LensEmbeddableInput } from './embeddable'; +import { LensByReferenceInput, LensEmbeddableInput } from './embeddable'; import { DOC_TYPE } from '../../persistence'; import { UiActionsStart } from '../../../../../../src/plugins/ui_actions/public'; import { Document } from '../../persistence/saved_object_store'; @@ -83,6 +83,8 @@ export class EmbeddableFactory implements EmbeddableFactoryDefinition { indexPatternService, } = await this.getStartServices(); + const { Embeddable } = await import('../../async_services'); + return new Embeddable( { attributeService, @@ -93,7 +95,6 @@ export class EmbeddableFactory implements EmbeddableFactoryDefinition { basePath: coreHttp.basePath, getTrigger: uiActions?.getTrigger, documentToExpression, - toExpressionString: toExpression, }, input, parent diff --git a/x-pack/plugins/infra/public/legacy_singletons.ts b/x-pack/plugins/lens/public/editor_frame_service/embeddable/index.ts similarity index 58% rename from x-pack/plugins/infra/public/legacy_singletons.ts rename to x-pack/plugins/lens/public/editor_frame_service/embeddable/index.ts index f57047f21c281..460341365094e 100644 --- a/x-pack/plugins/infra/public/legacy_singletons.ts +++ b/x-pack/plugins/lens/public/editor_frame_service/embeddable/index.ts @@ -3,12 +3,5 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import { CoreStart } from 'kibana/public'; -let npStart: CoreStart; - -export function registerStartSingleton(start: CoreStart) { - npStart = start; -} - -export { npStart }; +export * from './embeddable'; diff --git a/x-pack/plugins/lens/public/editor_frame_service/service.tsx b/x-pack/plugins/lens/public/editor_frame_service/service.tsx index 8892217f5d51d..e6d7f78f5ad07 100644 --- a/x-pack/plugins/lens/public/editor_frame_service/service.tsx +++ b/x-pack/plugins/lens/public/editor_frame_service/service.tsx @@ -25,10 +25,8 @@ import { Document } from '../persistence/saved_object_store'; import { mergeTables } from './merge_tables'; import { formatColumn } from './format_column'; import { EmbeddableFactory, LensEmbeddableStartServices } from './embeddable/embeddable_factory'; -import { getActiveDatasourceIdFromDoc } from './editor_frame/state_management'; import { UiActionsStart } from '../../../../../src/plugins/ui_actions/public'; import { DashboardStart } from '../../../../../src/plugins/dashboard/public'; -import { persistedStateToExpression } from './editor_frame/state_helpers'; import { LensAttributeService } from '../lens_attribute_service'; export interface EditorFrameSetupPlugins { @@ -77,6 +75,8 @@ export class EditorFrameService { collectAsyncDefinitions(this.visualizations), ]); + const { persistedStateToExpression } = await import('../async_services'); + return await persistedStateToExpression(resolvedDatasources, resolvedVisualizations, doc); } @@ -133,7 +133,7 @@ export class EditorFrameService { const firstDatasourceId = Object.keys(resolvedDatasources)[0]; const firstVisualizationId = Object.keys(resolvedVisualizations)[0]; - const { EditorFrame } = await import('../async_services'); + const { EditorFrame, getActiveDatasourceIdFromDoc } = await import('../async_services'); render( diff --git a/x-pack/plugins/lens/public/indexpattern_datasource/operations/definitions/ranges/advanced_editor.tsx b/x-pack/plugins/lens/public/indexpattern_datasource/operations/definitions/ranges/advanced_editor.tsx index a6756df403ba7..16b861ae034fa 100644 --- a/x-pack/plugins/lens/public/indexpattern_datasource/operations/definitions/ranges/advanced_editor.tsx +++ b/x-pack/plugins/lens/public/indexpattern_datasource/operations/definitions/ranges/advanced_editor.tsx @@ -132,11 +132,11 @@ export const RangePopover = ({ { const newRange = { ...tempRange, - to: target.value !== '' ? Number(target.value) : -Infinity, + to: target.value !== '' ? Number(target.value) : Infinity, }; setTempRange(newRange); saveRangeAndReset(newRange); diff --git a/x-pack/plugins/lens/public/indexpattern_datasource/operations/definitions/ranges/ranges.test.tsx b/x-pack/plugins/lens/public/indexpattern_datasource/operations/definitions/ranges/ranges.test.tsx index 2409406afcdbc..fb6cf6df8573f 100644 --- a/x-pack/plugins/lens/public/indexpattern_datasource/operations/definitions/ranges/ranges.test.tsx +++ b/x-pack/plugins/lens/public/indexpattern_datasource/operations/definitions/ranges/ranges.test.tsx @@ -485,7 +485,7 @@ describe('ranges', () => { /> ); - // This series of act clojures are made to make it work properly the update flush + // This series of act closures are made to make it work properly the update flush act(() => { instance.find(RangePopover).find(EuiLink).prop('onClick')!({} as ReactMouseEvent); }); @@ -550,6 +550,46 @@ describe('ranges', () => { expect(instance.find(RangePopover)).toHaveLength(1); }); }); + + it('should handle correctly open ranges when saved', () => { + const setStateSpy = jest.fn(); + + // Add an extra open range: + (state.layers.first.columns.col1 as RangeIndexPatternColumn).params.ranges.push({ + from: null, + to: null, + label: '', + }); + + const instance = mount( + + ); + + act(() => { + instance.find(RangePopover).last().find(EuiLink).prop('onClick')!({} as ReactMouseEvent); + }); + + act(() => { + // need another wrapping for this in order to work + instance.update(); + + // Check UI values for open ranges + expect( + instance.find(RangePopover).last().find(EuiFieldNumber).first().prop('value') + ).toBe(''); + + expect(instance.find(RangePopover).last().find(EuiFieldNumber).last().prop('value')).toBe( + '' + ); + }); + }); }); }); }); diff --git a/x-pack/plugins/lens/public/indexpattern_datasource/operations/definitions/ranges/ranges.tsx b/x-pack/plugins/lens/public/indexpattern_datasource/operations/definitions/ranges/ranges.tsx index a59780ef59939..a8304456262eb 100644 --- a/x-pack/plugins/lens/public/indexpattern_datasource/operations/definitions/ranges/ranges.tsx +++ b/x-pack/plugins/lens/public/indexpattern_datasource/operations/definitions/ranges/ranges.tsx @@ -16,7 +16,13 @@ import { updateColumnParam, changeColumn } from '../../../state_helpers'; import { MODES, AUTO_BARS, DEFAULT_INTERVAL, MIN_HISTOGRAM_BARS, SLICES } from './constants'; type RangeType = Omit; -export type RangeTypeLens = RangeType & { label: string }; +// Try to cover all possible serialized states for ranges +export type RangeTypeLens = (RangeType | { from: Range['from'] | null; to: Range['to'] | null }) & { + label: string; +}; + +// This is a subset of RangeTypeLens which has both from and to defined +type FullRangeTypeLens = Extract>; export type MODES_TYPES = typeof MODES[keyof typeof MODES]; @@ -35,10 +41,13 @@ export type UpdateParamsFnType = ( value: RangeColumnParams[K] ) => void; -export const isValidNumber = (value: number | '') => - value !== '' && !isNaN(value) && isFinite(value); -export const isRangeWithin = (range: RangeTypeLens): boolean => range.from <= range.to; -const isFullRange = ({ from, to }: RangeType) => isValidNumber(from) && isValidNumber(to); +// on initialization values can be null (from the Infinity serialization), so handle it correctly +// or they will be casted to 0 by the editor ( see #78867 ) +export const isValidNumber = (value: number | '' | null): value is number => + value != null && value !== '' && !isNaN(value) && isFinite(value); +export const isRangeWithin = (range: RangeType): boolean => range.from <= range.to; +const isFullRange = (range: RangeTypeLens): range is FullRangeTypeLens => + isValidNumber(range.from) && isValidNumber(range.to); export const isValidRange = (range: RangeTypeLens): boolean => { if (isFullRange(range)) { return isRangeWithin(range); diff --git a/x-pack/plugins/lens/public/xy_visualization/expression.test.tsx b/x-pack/plugins/lens/public/xy_visualization/expression.test.tsx index 5fc89d831a961..405491ddc372a 100644 --- a/x-pack/plugins/lens/public/xy_visualization/expression.test.tsx +++ b/x-pack/plugins/lens/public/xy_visualization/expression.test.tsx @@ -751,7 +751,7 @@ describe('xy_expression', () => { }); test('onElementClick returns correct context data', () => { - const geometry: GeometryValue = { x: 5, y: 1, accessor: 'y1', mark: null }; + const geometry: GeometryValue = { x: 5, y: 1, accessor: 'y1', mark: null, datum: {} }; const series = { key: 'spec{d}yAccessor{d}splitAccessors{b-2}', specId: 'd', diff --git a/x-pack/plugins/lens/server/routes/existing_fields.test.ts b/x-pack/plugins/lens/server/routes/existing_fields.test.ts index 9799dcf92ae41..c877e69d7b0dd 100644 --- a/x-pack/plugins/lens/server/routes/existing_fields.test.ts +++ b/x-pack/plugins/lens/server/routes/existing_fields.test.ts @@ -14,99 +14,55 @@ describe('existingFields', () => { return { name, isScript: false, - isAlias: false, isMeta: false, - path: name.split('.'), ...obj, }; } - function indexPattern(_source: unknown, fields: unknown = {}) { - return { _source, fields }; + function searchResults(fields: Record = {}) { + return { fields }; } it('should handle root level fields', () => { const result = existingFields( - [indexPattern({ foo: 'bar' }), indexPattern({ baz: 0 })], + [searchResults({ foo: ['bar'] }), searchResults({ baz: [0] })], [field('foo'), field('bar'), field('baz')] ); expect(result).toEqual(['foo', 'baz']); }); - it('should handle arrays of objects', () => { + it('should handle basic arrays, ignoring empty ones', () => { const result = existingFields( - [indexPattern({ stuff: [{ foo: 'bar' }, { baz: 0 }] })], - [field('stuff.foo'), field('stuff.bar'), field('stuff.baz')] + [searchResults({ stuff: ['heyo', 'there'], empty: [] })], + [field('stuff'), field('empty')] ); - expect(result).toEqual(['stuff.foo', 'stuff.baz']); - }); - - it('should handle basic arrays', () => { - const result = existingFields([indexPattern({ stuff: ['heyo', 'there'] })], [field('stuff')]); - expect(result).toEqual(['stuff']); }); - it('should handle deep object structures', () => { - const result = existingFields( - [indexPattern({ geo: { coordinates: { lat: 40, lon: -77 } } })], - [field('geo.coordinates')] - ); - - expect(result).toEqual(['geo.coordinates']); - }); - it('should handle objects with dotted fields', () => { const result = existingFields( - [indexPattern({ 'geo.country_name': 'US' })], + [searchResults({ 'geo.country_name': ['US'] })], [field('geo.country_name')] ); expect(result).toEqual(['geo.country_name']); }); - it('should handle arrays with dotted fields on both sides', () => { - const result = existingFields( - [indexPattern({ 'process.cpu': [{ 'user.pct': 50 }] })], - [field('process.cpu.user.pct')] - ); - - expect(result).toEqual(['process.cpu.user.pct']); - }); - - it('should be false if it hits a positive leaf before the end of the path', () => { - const result = existingFields( - [indexPattern({ geo: { coordinates: 32 } })], - [field('geo.coordinates.lat')] - ); - - expect(result).toEqual([]); - }); - - it('should use path, not name', () => { - const result = existingFields( - [indexPattern({ stuff: [{ foo: 'bar' }, { baz: 0 }] })], - [field({ name: 'goober', path: ['stuff', 'foo'] })] - ); - - expect(result).toEqual(['goober']); - }); - it('supports scripted fields', () => { const result = existingFields( - [indexPattern({}, { bar: 'scriptvalue' })], - [field({ name: 'baz', isScript: true, path: ['bar'] })] + [searchResults({ bar: ['scriptvalue'] })], + [field({ name: 'bar', isScript: true })] ); - expect(result).toEqual(['baz']); + expect(result).toEqual(['bar']); }); it('supports meta fields', () => { const result = existingFields( - [{ _mymeta: 'abc', ...indexPattern({}, { bar: 'scriptvalue' }) }], - [field({ name: '_mymeta', isMeta: true, path: ['_mymeta'] })] + [{ _mymeta: 'abc', ...searchResults({ bar: ['scriptvalue'] }) }], + [field({ name: '_mymeta', isMeta: true })] ); expect(result).toEqual(['_mymeta']); @@ -132,81 +88,22 @@ describe('buildFieldList', () => { references: [], }; - const mappings = { - testpattern: { - mappings: { - properties: { - '@bar': { - type: 'alias', - path: 'bar', - }, - }, - }, - }, - }; - - const fieldDescriptors = [ - { - name: 'baz', - subType: { multi: { parent: 'a.b.c' } }, - }, - ]; - - it('uses field descriptors to determine the path', () => { - const fields = buildFieldList(indexPattern, mappings, fieldDescriptors, []); - expect(fields.find((f) => f.name === 'baz')).toMatchObject({ - isAlias: false, - isScript: false, - name: 'baz', - path: ['a', 'b', 'c'], - }); - }); - - it('uses aliases to determine the path', () => { - const fields = buildFieldList(indexPattern, mappings, fieldDescriptors, []); - expect(fields.find((f) => f.isAlias)).toMatchObject({ - isAlias: true, - isScript: false, - name: '@bar', - path: ['bar'], - }); - }); - it('supports scripted fields', () => { - const fields = buildFieldList(indexPattern, mappings, fieldDescriptors, []); + const fields = buildFieldList(indexPattern, []); expect(fields.find((f) => f.isScript)).toMatchObject({ - isAlias: false, isScript: true, name: 'foo', - path: ['foo'], lang: 'painless', script: '2+2', }); }); it('supports meta fields', () => { - const fields = buildFieldList(indexPattern, mappings, fieldDescriptors, ['_mymeta']); + const fields = buildFieldList(indexPattern, ['_mymeta']); expect(fields.find((f) => f.isMeta)).toMatchObject({ - isAlias: false, isScript: false, isMeta: true, name: '_mymeta', - path: ['_mymeta'], - }); - }); - - it('handles missing mappings', () => { - const fields = buildFieldList(indexPattern, {}, fieldDescriptors, []); - expect(fields.every((f) => f.isAlias === false)).toEqual(true); - }); - - it('handles empty fieldDescriptors by skipping multi-mappings', () => { - const fields = buildFieldList(indexPattern, mappings, [], []); - expect(fields.find((f) => f.name === 'baz')).toMatchObject({ - isAlias: false, - isScript: false, - name: 'baz', - path: ['baz'], }); }); }); diff --git a/x-pack/plugins/lens/server/routes/existing_fields.ts b/x-pack/plugins/lens/server/routes/existing_fields.ts index 33fcafacfad73..c925517b572da 100644 --- a/x-pack/plugins/lens/server/routes/existing_fields.ts +++ b/x-pack/plugins/lens/server/routes/existing_fields.ts @@ -9,36 +9,17 @@ import { schema } from '@kbn/config-schema'; import { ILegacyScopedClusterClient, SavedObject, RequestHandlerContext } from 'src/core/server'; import { CoreSetup } from 'src/core/server'; import { BASE_API_URL } from '../../common'; -import { - IndexPatternsFetcher, - IndexPatternAttributes, - UI_SETTINGS, -} from '../../../../../src/plugins/data/server'; +import { IndexPatternAttributes, UI_SETTINGS } from '../../../../../src/plugins/data/server'; /** * The number of docs to sample to determine field empty status. */ const SAMPLE_SIZE = 500; -interface MappingResult { - [indexPatternTitle: string]: { - mappings: { - properties: Record; - }; - }; -} - -interface FieldDescriptor { - name: string; - subType?: { multi?: { parent?: string } }; -} - export interface Field { name: string; isScript: boolean; - isAlias: boolean; isMeta: boolean; - path: string[]; lang?: string; script?: string; } @@ -105,14 +86,12 @@ async function fetchFieldExistence({ timeFieldName?: string; }) { const metaFields: string[] = await context.core.uiSettings.client.get(UI_SETTINGS.META_FIELDS); - const { - indexPattern, - indexPatternTitle, - mappings, - fieldDescriptors, - } = await fetchIndexPatternDefinition(indexPatternId, context, metaFields); + const { indexPattern, indexPatternTitle } = await fetchIndexPatternDefinition( + indexPatternId, + context + ); - const fields = buildFieldList(indexPattern, mappings, fieldDescriptors, metaFields); + const fields = buildFieldList(indexPattern, metaFields); const docs = await fetchIndexPatternStats({ fromDate, toDate, @@ -129,51 +108,17 @@ async function fetchFieldExistence({ }; } -async function fetchIndexPatternDefinition( - indexPatternId: string, - context: RequestHandlerContext, - metaFields: string[] -) { +async function fetchIndexPatternDefinition(indexPatternId: string, context: RequestHandlerContext) { const savedObjectsClient = context.core.savedObjects.client; - const requestClient = context.core.elasticsearch.legacy.client; const indexPattern = await savedObjectsClient.get( 'index-pattern', indexPatternId ); const indexPatternTitle = indexPattern.attributes.title; - if (indexPatternTitle.includes(':')) { - // Cross cluster search patterns include a colon, and we aren't able to fetch - // mapping information. - return { - indexPattern, - indexPatternTitle, - mappings: {}, - fieldDescriptors: [], - }; - } - - // TODO: maybe don't use IndexPatternsFetcher at all, since we're only using it - // to look up field values in the resulting documents. We can accomplish the same - // using the mappings which we're also fetching here. - const indexPatternsFetcher = new IndexPatternsFetcher(requestClient.callAsCurrentUser); - const [mappings, fieldDescriptors] = await Promise.all([ - requestClient.callAsCurrentUser('indices.getMapping', { - index: indexPatternTitle, - }), - - indexPatternsFetcher.getFieldsForWildcard({ - pattern: indexPatternTitle, - // TODO: Pull this from kibana advanced settings - metaFields, - }), - ]); - return { indexPattern, indexPatternTitle, - mappings, - fieldDescriptors, }; } @@ -182,32 +127,13 @@ async function fetchIndexPatternDefinition( */ export function buildFieldList( indexPattern: SavedObject, - mappings: MappingResult | {}, - fieldDescriptors: FieldDescriptor[], metaFields: string[] ): Field[] { - const aliasMap = Object.entries(Object.values(mappings)[0]?.mappings.properties ?? {}) - .map(([name, v]) => ({ ...v, name })) - .filter((f) => f.type === 'alias') - .reduce((acc, f) => { - acc[f.name] = f.path; - return acc; - }, {} as Record); - - const descriptorMap = fieldDescriptors.reduce((acc, f) => { - acc[f.name] = f; - return acc; - }, {} as Record); - return JSON.parse(indexPattern.attributes.fields).map( (field: { name: string; lang: string; scripted?: boolean; script?: string }) => { - const path = - aliasMap[field.name] || descriptorMap[field.name]?.subType?.multi?.parent || field.name; return { name: field.name, isScript: !!field.scripted, - isAlias: !!aliasMap[field.name], - path: path.split('.'), lang: field.lang, script: field.script, // id is a special case - it doesn't show up in the meta field list, @@ -263,8 +189,8 @@ async function fetchIndexPatternStats({ size: SAMPLE_SIZE, query, sort: timeFieldName && fromDate && toDate ? [{ [timeFieldName]: 'desc' }] : [], - // _source is required because we are also providing script fields. - _source: '*', + fields: ['*'], + _source: false, script_fields: scriptedFields.reduce((acc, field) => { acc[field.name] = { script: { @@ -279,49 +205,11 @@ async function fetchIndexPatternStats({ return result.hits.hits; } -// Recursive function to determine if the _source of a document -// contains a known path. -function exists(obj: unknown, path: string[], i = 0): boolean { - if (obj == null) { - return false; - } - - if (path.length === i) { - return true; - } - - if (Array.isArray(obj)) { - return obj.some((child) => exists(child, path, i)); - } - - if (typeof obj === 'object') { - // Because Elasticsearch flattens paths, dots in the field name are allowed - // as JSON keys. For example, { 'a.b': 10 } - const partialKeyMatches = Object.getOwnPropertyNames(obj) - .map((key) => key.split('.')) - .filter((keyPaths) => keyPaths.every((key, keyIndex) => key === path[keyIndex + i])); - - if (partialKeyMatches.length) { - return partialKeyMatches.every((keyPaths) => { - return exists( - (obj as Record)[keyPaths.join('.')], - path, - i + keyPaths.length - ); - }); - } - - return exists((obj as Record)[path[i]], path, i + 1); - } - - return path.length === i; -} - /** * Exported only for unit tests. */ export function existingFields( - docs: Array<{ _source: unknown; fields: unknown; [key: string]: unknown }>, + docs: Array<{ fields: Record; [key: string]: unknown }>, fields: Field[] ): string[] { const missingFields = new Set(fields); @@ -332,14 +220,14 @@ export function existingFields( } missingFields.forEach((field) => { - let fieldStore = doc._source; - if (field.isScript) { - fieldStore = doc.fields; - } + let fieldStore: Record = doc.fields; if (field.isMeta) { fieldStore = doc; } - if (exists(fieldStore, field.path)) { + const value = fieldStore[field.name]; + if (Array.isArray(value) && value.length) { + missingFields.delete(field); + } else if (!Array.isArray(value) && value) { missingFields.delete(field); } }); diff --git a/x-pack/plugins/lists/README.md b/x-pack/plugins/lists/README.md index dac6e8bb78fa5..02be757303417 100644 --- a/x-pack/plugins/lists/README.md +++ b/x-pack/plugins/lists/README.md @@ -113,12 +113,6 @@ You should see the new exception list created like so: ```sh { - "_tags": [ - "endpoint", - "process", - "malware", - "os:linux" - ], "created_at": "2020-05-28T19:16:31.052Z", "created_by": "yo", "description": "This is a sample endpoint type exception", @@ -141,12 +135,6 @@ And you can attach exception list items like so: ```ts { - "_tags": [ - "endpoint", - "process", - "malware", - "os:linux" - ], "comments": [], "created_at": "2020-05-28T19:17:21.099Z", "created_by": "yo", @@ -173,6 +161,7 @@ And you can attach exception list items like so: "list_id": "endpoint_list", "name": "Sample Endpoint Exception List", "namespace_type": "single", + "os_types": ["linux"], "tags": [ "user added string for a tag", "malware" @@ -222,12 +211,6 @@ or for finding exception lists: { "data": [ { - "_tags": [ - "endpoint", - "process", - "malware", - "os:linux" - ], "created_at": "2020-05-28T19:16:31.052Z", "created_by": "yo", "description": "This is a sample endpoint type exception", @@ -235,6 +218,7 @@ or for finding exception lists: "list_id": "endpoint_list", "name": "Sample Endpoint Exception List", "namespace_type": "single", + "os_types": ["linux"], "tags": [ "user added string for a tag", "malware" diff --git a/x-pack/plugins/lists/common/constants.mock.ts b/x-pack/plugins/lists/common/constants.mock.ts index 46ed524ff33e3..c712af83dd9b1 100644 --- a/x-pack/plugins/lists/common/constants.mock.ts +++ b/x-pack/plugins/lists/common/constants.mock.ts @@ -5,6 +5,7 @@ */ import moment from 'moment'; +import { OsTypeArray } from './schemas/common'; import { EntriesArray } from './schemas/types'; import { EndpointEntriesArray } from './schemas/types/endpoint'; export const DATE_NOW = '2020-04-20T15:25:31.830Z'; @@ -68,7 +69,7 @@ export const ENDPOINT_ENTRIES: EndpointEntriesArray = [ { field: 'some.not.nested.field', operator: 'included', type: 'match', value: 'some value' }, ]; export const ITEM_TYPE = 'simple'; -export const _TAGS = []; +export const OS_TYPES: OsTypeArray = ['windows']; export const TAGS = []; export const COMMENTS = []; export const FILTER = 'name:Nicolas Bourbaki'; diff --git a/x-pack/plugins/lists/common/schemas/common/schemas.test.ts b/x-pack/plugins/lists/common/schemas/common/schemas.test.ts index ec3871b673888..04bdf037c556e 100644 --- a/x-pack/plugins/lists/common/schemas/common/schemas.test.ts +++ b/x-pack/plugins/lists/common/schemas/common/schemas.test.ts @@ -27,6 +27,8 @@ import { esDataTypeUnion, exceptionListType, operator, + osType, + osTypeArrayOrUndefined, type, } from './schemas'; @@ -379,4 +381,35 @@ describe('Common schemas', () => { expect(message.schema).toEqual({}); }); }); + + describe('osType', () => { + test('it will validate a correct osType', () => { + const payload = 'windows'; + const decoded = osType.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual(payload); + }); + + test('it will fail to validate an incorrect osType', () => { + const payload = 'foo'; + const decoded = osType.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([ + 'Invalid value "foo" supplied to ""linux" | "macos" | "windows""', + ]); + expect(message.schema).toEqual({}); + }); + + test('it will default to an empty array when osTypeArrayOrUndefined is used', () => { + const payload = undefined; + const decoded = osTypeArrayOrUndefined.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual([]); + }); + }); }); diff --git a/x-pack/plugins/lists/common/schemas/common/schemas.ts b/x-pack/plugins/lists/common/schemas/common/schemas.ts index 37da5fbcd1a1b..7497b81fbe91c 100644 --- a/x-pack/plugins/lists/common/schemas/common/schemas.ts +++ b/x-pack/plugins/lists/common/schemas/common/schemas.ts @@ -9,7 +9,7 @@ import * as t from 'io-ts'; import { DefaultNamespace } from '../types/default_namespace'; -import { DefaultStringArray, NonEmptyString } from '../../shared_imports'; +import { DefaultArray, DefaultStringArray, NonEmptyString } from '../../shared_imports'; export const name = t.string; export type Name = t.TypeOf; @@ -211,11 +211,6 @@ export type Tags = t.TypeOf; export const tagsOrUndefined = t.union([tags, t.undefined]); export type TagsOrUndefined = t.TypeOf; -export const _tags = DefaultStringArray; -export type _Tags = t.TypeOf; -export const _tagsOrUndefined = t.union([_tags, t.undefined]); -export type _TagsOrUndefined = t.TypeOf; - export const exceptionListType = t.keyof({ detection: null, endpoint: null }); export const exceptionListTypeOrUndefined = t.union([exceptionListType, t.undefined]); export type ExceptionListType = t.TypeOf; @@ -317,3 +312,16 @@ export type Immutable = t.TypeOf; export const immutableOrUndefined = t.union([immutable, t.undefined]); export type ImmutableOrUndefined = t.TypeOf; + +export const osType = t.keyof({ + linux: null, + macos: null, + windows: null, +}); +export type OsType = t.TypeOf; + +export const osTypeArray = DefaultArray(osType); +export type OsTypeArray = t.TypeOf; + +export const osTypeArrayOrUndefined = t.union([osTypeArray, t.undefined]); +export type OsTypeArrayOrUndefined = t.OutputOf; diff --git a/x-pack/plugins/lists/common/schemas/request/create_endpoint_list_item_schema.mock.ts b/x-pack/plugins/lists/common/schemas/request/create_endpoint_list_item_schema.mock.ts index 529e173618f15..f292b7c5bc945 100644 --- a/x-pack/plugins/lists/common/schemas/request/create_endpoint_list_item_schema.mock.ts +++ b/x-pack/plugins/lists/common/schemas/request/create_endpoint_list_item_schema.mock.ts @@ -11,20 +11,20 @@ import { ITEM_TYPE, META, NAME, + OS_TYPES, TAGS, - _TAGS, } from '../../constants.mock'; import { CreateEndpointListItemSchema } from './create_endpoint_list_item_schema'; export const getCreateEndpointListItemSchemaMock = (): CreateEndpointListItemSchema => ({ - _tags: _TAGS, comments: COMMENTS, description: DESCRIPTION, entries: ENDPOINT_ENTRIES, item_id: undefined, meta: META, name: NAME, + os_types: OS_TYPES, tags: TAGS, type: ITEM_TYPE, }); diff --git a/x-pack/plugins/lists/common/schemas/request/create_endpoint_list_item_schema.test.ts b/x-pack/plugins/lists/common/schemas/request/create_endpoint_list_item_schema.test.ts index 624de2fb30d17..afb0454a79667 100644 --- a/x-pack/plugins/lists/common/schemas/request/create_endpoint_list_item_schema.test.ts +++ b/x-pack/plugins/lists/common/schemas/request/create_endpoint_list_item_schema.test.ts @@ -174,19 +174,6 @@ describe('create_endpoint_list_item_schema', () => { expect(message.schema).toEqual(outputPayload); }); - test('it should pass validation when supplied an undefined for "_tags" but return an array and generate a correct body not counting the auto generated uuid', () => { - const inputPayload = getCreateEndpointListItemSchemaMock(); - const outputPayload = getCreateEndpointListItemSchemaMock(); - delete inputPayload._tags; - outputPayload._tags = []; - const decoded = createEndpointListItemSchema.decode(inputPayload); - const checked = exactCheck(inputPayload, decoded); - const message = pipe(checked, foldLeftRight); - delete (message.schema as CreateEndpointListItemSchema).item_id; - expect(getPaths(left(message.errors))).toEqual([]); - expect(message.schema).toEqual(outputPayload); - }); - test('it should pass validation when supplied an undefined for "item_id" and auto generate a uuid', () => { const inputPayload = getCreateEndpointListItemSchemaMock(); delete inputPayload.item_id; diff --git a/x-pack/plugins/lists/common/schemas/request/create_endpoint_list_item_schema.ts b/x-pack/plugins/lists/common/schemas/request/create_endpoint_list_item_schema.ts index d1fc167f5a92b..611d9a83befc7 100644 --- a/x-pack/plugins/lists/common/schemas/request/create_endpoint_list_item_schema.ts +++ b/x-pack/plugins/lists/common/schemas/request/create_endpoint_list_item_schema.ts @@ -8,13 +8,13 @@ import * as t from 'io-ts'; import { ItemId, + OsTypeArray, Tags, - _Tags, - _tags, description, exceptionListItemType, meta, name, + osTypeArrayOrUndefined, tags, } from '../common/schemas'; import { RequiredKeepUndefined } from '../../types'; @@ -34,10 +34,10 @@ export const createEndpointListItemSchema = t.intersection([ ), t.exact( t.partial({ - _tags, // defaults to empty array if not set during decode comments: DefaultCreateCommentsArray, // defaults to empty array if not set during decode item_id: DefaultUuid, // defaults to GUID (uuid v4) if not set during decode meta, // defaults to undefined if not set during decode + os_types: osTypeArrayOrUndefined, // defaults to empty array if not set during decode tags, // defaults to empty array if not set during decode }) ), @@ -48,11 +48,11 @@ export type CreateEndpointListItemSchema = t.OutputOf>, - '_tags' | 'tags' | 'item_id' | 'entries' | 'comments' + 'tags' | 'item_id' | 'entries' | 'comments' | 'os_types' > & { - _tags: _Tags; comments: CreateCommentsArray; tags: Tags; item_id: ItemId; entries: EntriesArray; + os_types: OsTypeArray; }; diff --git a/x-pack/plugins/lists/common/schemas/request/create_exception_list_item_schema.mock.ts b/x-pack/plugins/lists/common/schemas/request/create_exception_list_item_schema.mock.ts index da22e33dc7b52..9a55e88a7a8fa 100644 --- a/x-pack/plugins/lists/common/schemas/request/create_exception_list_item_schema.mock.ts +++ b/x-pack/plugins/lists/common/schemas/request/create_exception_list_item_schema.mock.ts @@ -14,14 +14,13 @@ import { META, NAME, NAMESPACE_TYPE, + OS_TYPES, TAGS, - _TAGS, } from '../../constants.mock'; import { CreateExceptionListItemSchema } from './create_exception_list_item_schema'; export const getCreateExceptionListItemSchemaMock = (): CreateExceptionListItemSchema => ({ - _tags: _TAGS, comments: COMMENTS, description: DESCRIPTION, entries: ENTRIES, @@ -30,6 +29,7 @@ export const getCreateExceptionListItemSchemaMock = (): CreateExceptionListItemS meta: META, name: NAME, namespace_type: NAMESPACE_TYPE, + os_types: OS_TYPES, tags: TAGS, type: ITEM_TYPE, }); @@ -43,6 +43,7 @@ export const getCreateExceptionListItemMinimalSchemaMock = (): CreateExceptionLi item_id: ITEM_ID, list_id: LIST_ID, name: NAME, + os_types: OS_TYPES, type: ITEM_TYPE, }); @@ -54,5 +55,6 @@ export const getCreateExceptionListItemMinimalSchemaMockWithoutId = (): CreateEx entries: ENTRIES, list_id: LIST_ID, name: NAME, + os_types: OS_TYPES, type: ITEM_TYPE, }); diff --git a/x-pack/plugins/lists/common/schemas/request/create_exception_list_item_schema.test.ts b/x-pack/plugins/lists/common/schemas/request/create_exception_list_item_schema.test.ts index 4a4c3972dc1e3..e83b2e3010785 100644 --- a/x-pack/plugins/lists/common/schemas/request/create_exception_list_item_schema.test.ts +++ b/x-pack/plugins/lists/common/schemas/request/create_exception_list_item_schema.test.ts @@ -176,19 +176,6 @@ describe('create_exception_list_item_schema', () => { expect(message.schema).toEqual(outputPayload); }); - test('it should pass validation when supplied an undefined for "_tags" but return an array and generate a correct body not counting the auto generated uuid', () => { - const inputPayload = getCreateExceptionListItemSchemaMock(); - const outputPayload = getCreateExceptionListItemSchemaMock(); - delete inputPayload._tags; - outputPayload._tags = []; - const decoded = createExceptionListItemSchema.decode(inputPayload); - const checked = exactCheck(inputPayload, decoded); - const message = pipe(checked, foldLeftRight); - delete (message.schema as CreateExceptionListItemSchema).item_id; - expect(getPaths(left(message.errors))).toEqual([]); - expect(message.schema).toEqual(outputPayload); - }); - test('it should pass validation when supplied an undefined for "item_id" and auto generate a uuid', () => { const inputPayload = getCreateExceptionListItemSchemaMock(); delete inputPayload.item_id; diff --git a/x-pack/plugins/lists/common/schemas/request/create_exception_list_item_schema.ts b/x-pack/plugins/lists/common/schemas/request/create_exception_list_item_schema.ts index fd3390721d41e..642a6c549e7fa 100644 --- a/x-pack/plugins/lists/common/schemas/request/create_exception_list_item_schema.ts +++ b/x-pack/plugins/lists/common/schemas/request/create_exception_list_item_schema.ts @@ -8,15 +8,15 @@ import * as t from 'io-ts'; import { ItemId, + OsTypeArray, Tags, - _Tags, - _tags, description, exceptionListItemType, list_id, meta, name, namespace_type, + osTypeArrayOrUndefined, tags, } from '../common/schemas'; import { RequiredKeepUndefined } from '../../types'; @@ -41,11 +41,11 @@ export const createExceptionListItemSchema = t.intersection([ ), t.exact( t.partial({ - _tags, // defaults to empty array if not set during decode comments: DefaultCreateCommentsArray, // defaults to empty array if not set during decode item_id: DefaultUuid, // defaults to GUID (uuid v4) if not set during decode meta, // defaults to undefined if not set during decode namespace_type, // defaults to 'single' if not set during decode + os_types: osTypeArrayOrUndefined, // defaults to empty array if not set during decode tags, // defaults to empty array if not set during decode }) ), @@ -56,12 +56,12 @@ export type CreateExceptionListItemSchema = t.OutputOf>, - '_tags' | 'tags' | 'item_id' | 'entries' | 'namespace_type' | 'comments' + 'tags' | 'item_id' | 'entries' | 'namespace_type' | 'comments' > & { - _tags: _Tags; comments: CreateCommentsArray; tags: Tags; item_id: ItemId; entries: EntriesArray; namespace_type: NamespaceType; + os_types: OsTypeArray; }; diff --git a/x-pack/plugins/lists/common/schemas/request/create_exception_list_schema.mock.ts b/x-pack/plugins/lists/common/schemas/request/create_exception_list_schema.mock.ts index f8431fcce1bf7..3150cb9975f21 100644 --- a/x-pack/plugins/lists/common/schemas/request/create_exception_list_schema.mock.ts +++ b/x-pack/plugins/lists/common/schemas/request/create_exception_list_schema.mock.ts @@ -17,12 +17,12 @@ import { import { CreateExceptionListSchema } from './create_exception_list_schema'; export const getCreateExceptionListSchemaMock = (): CreateExceptionListSchema => ({ - _tags: [], description: DESCRIPTION, list_id: undefined, meta: META, name: NAME, namespace_type: NAMESPACE_TYPE, + os_types: [], tags: [], type: ENDPOINT_TYPE, version: VERSION, diff --git a/x-pack/plugins/lists/common/schemas/request/create_exception_list_schema.test.ts b/x-pack/plugins/lists/common/schemas/request/create_exception_list_schema.test.ts index c9e2aa37a132b..6bcd3bc15a975 100644 --- a/x-pack/plugins/lists/common/schemas/request/create_exception_list_schema.test.ts +++ b/x-pack/plugins/lists/common/schemas/request/create_exception_list_schema.test.ts @@ -50,19 +50,6 @@ describe('create_exception_list_schema', () => { expect(message.schema).toEqual(outputPayload); }); - test('it should accept an undefined for "_tags" but return an array and generate a correct body not counting the uuid', () => { - const inputPayload = getCreateExceptionListSchemaMock(); - const outputPayload = getCreateExceptionListSchemaMock(); - delete inputPayload._tags; - outputPayload._tags = []; - const decoded = createExceptionListSchema.decode(inputPayload); - const checked = exactCheck(inputPayload, decoded); - const message = pipe(checked, foldLeftRight); - delete (message.schema as CreateExceptionListSchema).list_id; - expect(getPaths(left(message.errors))).toEqual([]); - expect(message.schema).toEqual(outputPayload); - }); - test('it should accept an undefined for "list_id" and auto generate a uuid', () => { const inputPayload = getCreateExceptionListSchemaMock(); delete inputPayload.list_id; diff --git a/x-pack/plugins/lists/common/schemas/request/create_exception_list_schema.ts b/x-pack/plugins/lists/common/schemas/request/create_exception_list_schema.ts index ffec974602714..4eae11081454c 100644 --- a/x-pack/plugins/lists/common/schemas/request/create_exception_list_schema.ts +++ b/x-pack/plugins/lists/common/schemas/request/create_exception_list_schema.ts @@ -8,14 +8,14 @@ import * as t from 'io-ts'; import { ListId, + OsTypeArray, Tags, - _Tags, - _tags, description, exceptionListType, meta, name, namespace_type, + osTypeArrayOrUndefined, tags, } from '../common/schemas'; import { RequiredKeepUndefined } from '../../types'; @@ -36,10 +36,10 @@ export const createExceptionListSchema = t.intersection([ ), t.exact( t.partial({ - _tags, // defaults to empty array if not set during decode list_id: DefaultUuid, // defaults to a GUID (UUID v4) string if not set during decode meta, // defaults to undefined if not set during decode namespace_type, // defaults to 'single' if not set during decode + os_types: osTypeArrayOrUndefined, // defaults to empty array if not set during decode tags, // defaults to empty array if not set during decode version: DefaultVersionNumber, // defaults to numerical 1 if not set during decode }) @@ -51,11 +51,11 @@ export type CreateExceptionListSchema = t.OutputOf>, - '_tags' | 'tags' | 'list_id' | 'namespace_type' + 'tags' | 'list_id' | 'namespace_type' | 'os_types' > & { - _tags: _Tags; tags: Tags; list_id: ListId; namespace_type: NamespaceType; + os_types: OsTypeArray; version: DefaultVersionNumberDecoded; }; diff --git a/x-pack/plugins/lists/common/schemas/request/update_endpoint_list_item_schema.mock.ts b/x-pack/plugins/lists/common/schemas/request/update_endpoint_list_item_schema.mock.ts index 0847389dac922..8c999332e8893 100644 --- a/x-pack/plugins/lists/common/schemas/request/update_endpoint_list_item_schema.mock.ts +++ b/x-pack/plugins/lists/common/schemas/request/update_endpoint_list_item_schema.mock.ts @@ -13,14 +13,13 @@ import { LIST_ITEM_ID, META, NAME, + OS_TYPES, TAGS, - _TAGS, } from '../../constants.mock'; import { UpdateEndpointListItemSchema } from './update_endpoint_list_item_schema'; export const getUpdateEndpointListItemSchemaMock = (): UpdateEndpointListItemSchema => ({ - _tags: _TAGS, _version: undefined, comments: COMMENTS, description: DESCRIPTION, @@ -29,6 +28,7 @@ export const getUpdateEndpointListItemSchemaMock = (): UpdateEndpointListItemSch item_id: LIST_ITEM_ID, meta: META, name: NAME, + os_types: OS_TYPES, tags: TAGS, type: ITEM_TYPE, }); diff --git a/x-pack/plugins/lists/common/schemas/request/update_endpoint_list_item_schema.test.ts b/x-pack/plugins/lists/common/schemas/request/update_endpoint_list_item_schema.test.ts index 671e38ceb5266..c7be8b78d54a6 100644 --- a/x-pack/plugins/lists/common/schemas/request/update_endpoint_list_item_schema.test.ts +++ b/x-pack/plugins/lists/common/schemas/request/update_endpoint_list_item_schema.test.ts @@ -127,18 +127,6 @@ describe('update_endpoint_list_item_schema', () => { expect(message.schema).toEqual(outputPayload); }); - test('it should accept an undefined for "_tags" but return an array', () => { - const inputPayload = getUpdateEndpointListItemSchemaMock(); - const outputPayload = getUpdateEndpointListItemSchemaMock(); - delete inputPayload._tags; - outputPayload._tags = []; - const decoded = updateEndpointListItemSchema.decode(inputPayload); - const checked = exactCheck(inputPayload, decoded); - const message = pipe(checked, foldLeftRight); - expect(getPaths(left(message.errors))).toEqual([]); - expect(message.schema).toEqual(outputPayload); - }); - test('it should not allow an extra key to be sent in', () => { const payload: UpdateEndpointListItemSchema & { extraKey?: string; diff --git a/x-pack/plugins/lists/common/schemas/request/update_endpoint_list_item_schema.ts b/x-pack/plugins/lists/common/schemas/request/update_endpoint_list_item_schema.ts index 6ce5ad7858b78..f6ced91cd4010 100644 --- a/x-pack/plugins/lists/common/schemas/request/update_endpoint_list_item_schema.ts +++ b/x-pack/plugins/lists/common/schemas/request/update_endpoint_list_item_schema.ts @@ -7,15 +7,15 @@ import * as t from 'io-ts'; import { + OsTypeArray, Tags, - _Tags, - _tags, _version, description, exceptionListItemType, id, meta, name, + osTypeArrayOrUndefined, tags, } from '../common/schemas'; import { RequiredKeepUndefined } from '../../types'; @@ -37,12 +37,12 @@ export const updateEndpointListItemSchema = t.intersection([ ), t.exact( t.partial({ - _tags, // defaults to empty array if not set during decode _version, // defaults to undefined if not set during decode comments: DefaultUpdateCommentsArray, // defaults to empty array if not set during decode id, // defaults to undefined if not set during decode item_id: t.union([t.string, t.undefined]), meta, // defaults to undefined if not set during decode + os_types: osTypeArrayOrUndefined, // defaults to empty array if not set during decode tags, // defaults to empty array if not set during decode }) ), @@ -53,10 +53,10 @@ export type UpdateEndpointListItemSchema = t.OutputOf>, - '_tags' | 'tags' | 'entries' | 'comments' + 'tags' | 'entries' | 'comments' > & { - _tags: _Tags; comments: UpdateCommentsArray; tags: Tags; entries: EntriesArray; + os_types: OsTypeArray; }; diff --git a/x-pack/plugins/lists/common/schemas/request/update_exception_list_item_schema.mock.ts b/x-pack/plugins/lists/common/schemas/request/update_exception_list_item_schema.mock.ts index 4673c0fe7629d..e65b37b48545e 100644 --- a/x-pack/plugins/lists/common/schemas/request/update_exception_list_item_schema.mock.ts +++ b/x-pack/plugins/lists/common/schemas/request/update_exception_list_item_schema.mock.ts @@ -15,14 +15,13 @@ import { META, NAME, NAMESPACE_TYPE, + OS_TYPES, TAGS, - _TAGS, } from '../../constants.mock'; import { UpdateExceptionListItemSchema } from './update_exception_list_item_schema'; export const getUpdateExceptionListItemSchemaMock = (): UpdateExceptionListItemSchema => ({ - _tags: _TAGS, _version: undefined, comments: COMMENTS, description: DESCRIPTION, @@ -32,6 +31,7 @@ export const getUpdateExceptionListItemSchemaMock = (): UpdateExceptionListItemS meta: META, name: NAME, namespace_type: NAMESPACE_TYPE, + os_types: ['linux'], tags: TAGS, type: ITEM_TYPE, }); @@ -45,5 +45,6 @@ export const getUpdateMinimalExceptionListItemSchemaMock = (): UpdateExceptionLi entries: ENTRIES, item_id: ITEM_ID, name: NAME, + os_types: OS_TYPES, type: ITEM_TYPE, }); diff --git a/x-pack/plugins/lists/common/schemas/request/update_exception_list_item_schema.test.ts b/x-pack/plugins/lists/common/schemas/request/update_exception_list_item_schema.test.ts index da320a4983de3..387c29ad2d190 100644 --- a/x-pack/plugins/lists/common/schemas/request/update_exception_list_item_schema.test.ts +++ b/x-pack/plugins/lists/common/schemas/request/update_exception_list_item_schema.test.ts @@ -139,18 +139,6 @@ describe('update_exception_list_item_schema', () => { expect(message.schema).toEqual(outputPayload); }); - test('it should accept an undefined for "_tags" but return an array', () => { - const inputPayload = getUpdateExceptionListItemSchemaMock(); - const outputPayload = getUpdateExceptionListItemSchemaMock(); - delete inputPayload._tags; - outputPayload._tags = []; - const decoded = updateExceptionListItemSchema.decode(inputPayload); - const checked = exactCheck(inputPayload, decoded); - const message = pipe(checked, foldLeftRight); - expect(getPaths(left(message.errors))).toEqual([]); - expect(message.schema).toEqual(outputPayload); - }); - test('it should accept an undefined for "item_id" and generate a correct body not counting the uuid', () => { const inputPayload = getUpdateExceptionListItemSchemaMock(); delete inputPayload.item_id; diff --git a/x-pack/plugins/lists/common/schemas/request/update_exception_list_item_schema.ts b/x-pack/plugins/lists/common/schemas/request/update_exception_list_item_schema.ts index 659dde0b5b533..14cac2bb93fe0 100644 --- a/x-pack/plugins/lists/common/schemas/request/update_exception_list_item_schema.ts +++ b/x-pack/plugins/lists/common/schemas/request/update_exception_list_item_schema.ts @@ -7,9 +7,8 @@ import * as t from 'io-ts'; import { + OsTypeArray, Tags, - _Tags, - _tags, _version, description, exceptionListItemType, @@ -17,6 +16,7 @@ import { meta, name, namespace_type, + osTypeArrayOrUndefined, tags, } from '../common/schemas'; import { RequiredKeepUndefined } from '../../types'; @@ -39,13 +39,13 @@ export const updateExceptionListItemSchema = t.intersection([ ), t.exact( t.partial({ - _tags, // defaults to empty array if not set during decode _version, // defaults to undefined if not set during decode comments: DefaultUpdateCommentsArray, // defaults to empty array if not set during decode id, // defaults to undefined if not set during decode item_id: t.union([t.string, t.undefined]), meta, // defaults to undefined if not set during decode namespace_type, // defaults to 'single' if not set during decode + os_types: osTypeArrayOrUndefined, // defaults to empty array if not set during decode tags, // defaults to empty array if not set during decode }) ), @@ -56,11 +56,11 @@ export type UpdateExceptionListItemSchema = t.OutputOf>, - '_tags' | 'tags' | 'entries' | 'namespace_type' | 'comments' + 'tags' | 'entries' | 'namespace_type' | 'comments' | 'os_types' > & { - _tags: _Tags; comments: UpdateCommentsArray; tags: Tags; entries: EntriesArray; namespace_type: NamespaceType; + os_types: OsTypeArray; }; diff --git a/x-pack/plugins/lists/common/schemas/request/update_exception_list_schema.mock.ts b/x-pack/plugins/lists/common/schemas/request/update_exception_list_schema.mock.ts index b7dc2d9e0c948..fdefa6fe9b2c5 100644 --- a/x-pack/plugins/lists/common/schemas/request/update_exception_list_schema.mock.ts +++ b/x-pack/plugins/lists/common/schemas/request/update_exception_list_schema.mock.ts @@ -4,12 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -import { DESCRIPTION, ID, LIST_ID, META, NAME, NAMESPACE_TYPE, _TAGS } from '../../constants.mock'; +import { DESCRIPTION, ID, LIST_ID, META, NAME, NAMESPACE_TYPE } from '../../constants.mock'; import { UpdateExceptionListSchema } from './update_exception_list_schema'; export const getUpdateExceptionListSchemaMock = (): UpdateExceptionListSchema => ({ - _tags: _TAGS, _version: undefined, description: DESCRIPTION, id: ID, @@ -17,6 +16,7 @@ export const getUpdateExceptionListSchemaMock = (): UpdateExceptionListSchema => meta: META, name: NAME, namespace_type: NAMESPACE_TYPE, + os_types: [], tags: ['malware'], type: 'endpoint', }); diff --git a/x-pack/plugins/lists/common/schemas/request/update_exception_list_schema.test.ts b/x-pack/plugins/lists/common/schemas/request/update_exception_list_schema.test.ts index 32f114ae34d8e..4afd1aa442aa7 100644 --- a/x-pack/plugins/lists/common/schemas/request/update_exception_list_schema.test.ts +++ b/x-pack/plugins/lists/common/schemas/request/update_exception_list_schema.test.ts @@ -100,18 +100,6 @@ describe('update_exception_list_schema', () => { expect(message.schema).toEqual(outputPayload); }); - test('it should accept an undefined for "_tags" but return an array', () => { - const inputPayload = getUpdateExceptionListSchemaMock(); - const outputPayload = getUpdateExceptionListSchemaMock(); - delete inputPayload._tags; - outputPayload._tags = []; - const decoded = updateExceptionListSchema.decode(inputPayload); - const checked = exactCheck(inputPayload, decoded); - const message = pipe(checked, foldLeftRight); - expect(getPaths(left(message.errors))).toEqual([]); - expect(message.schema).toEqual(outputPayload); - }); - test('it should accept an undefined for "list_id" and generate a correct body not counting the uuid', () => { const inputPayload = getUpdateExceptionListSchemaMock(); delete inputPayload.list_id; diff --git a/x-pack/plugins/lists/common/schemas/request/update_exception_list_schema.ts b/x-pack/plugins/lists/common/schemas/request/update_exception_list_schema.ts index 54e0bbafe4981..37ba21bcfc424 100644 --- a/x-pack/plugins/lists/common/schemas/request/update_exception_list_schema.ts +++ b/x-pack/plugins/lists/common/schemas/request/update_exception_list_schema.ts @@ -7,9 +7,8 @@ import * as t from 'io-ts'; import { + OsTypeArray, Tags, - _Tags, - _tags, _version, description, exceptionListType, @@ -18,6 +17,7 @@ import { meta, name, namespace_type, + osTypeArrayOrUndefined, tags, version, } from '../common/schemas'; @@ -34,12 +34,12 @@ export const updateExceptionListSchema = t.intersection([ ), t.exact( t.partial({ - _tags, // defaults to empty array if not set during decode _version, // defaults to undefined if not set during decode id, // defaults to undefined if not set during decode list_id, // defaults to undefined if not set during decode meta, // defaults to undefined if not set during decode namespace_type, // defaults to 'single' if not set during decode + os_types: osTypeArrayOrUndefined, // defaults to empty array if not set during decode tags, // defaults to empty array if not set during decode version, // defaults to undefined if not set during decode }) @@ -51,9 +51,9 @@ export type UpdateExceptionListSchema = t.OutputOf>, - '_tags | tags | namespace_type' + 'tags | namespace_type' | 'os_types' > & { - _tags: _Tags; tags: Tags; namespace_type: NamespaceType; + os_types: OsTypeArray; }; diff --git a/x-pack/plugins/lists/common/schemas/response/create_endpoint_list_schema.test.ts b/x-pack/plugins/lists/common/schemas/response/create_endpoint_list_schema.test.ts index 380a1e3a4cfd5..ebae189ca7d06 100644 --- a/x-pack/plugins/lists/common/schemas/response/create_endpoint_list_schema.test.ts +++ b/x-pack/plugins/lists/common/schemas/response/create_endpoint_list_schema.test.ts @@ -42,7 +42,7 @@ describe('create_endpoint_list_schema', () => { const message = pipe(checked, foldLeftRight); expect(getPaths(left(message.errors))).toEqual([ - 'invalid keys "_tags,["endpoint","process","malware","os:linux"],_version,created_at,created_by,description,id,immutable,meta,{},name,namespace_type,tags,["user added string for a tag","malware"],tie_breaker_id,type,updated_at,updated_by,version"', + 'invalid keys "_version,created_at,created_by,description,id,immutable,meta,{},name,namespace_type,os_types,["linux"],tags,["user added string for a tag","malware"],tie_breaker_id,type,updated_at,updated_by,version"', ]); expect(message.schema).toEqual({}); }); diff --git a/x-pack/plugins/lists/common/schemas/response/exception_list_item_schema.mock.ts b/x-pack/plugins/lists/common/schemas/response/exception_list_item_schema.mock.ts index 1a8f21a5232f8..c2a751c03ee13 100644 --- a/x-pack/plugins/lists/common/schemas/response/exception_list_item_schema.mock.ts +++ b/x-pack/plugins/lists/common/schemas/response/exception_list_item_schema.mock.ts @@ -15,6 +15,7 @@ import { META, NAME, NAMESPACE_TYPE, + OS_TYPES, TIE_BREAKER, USER, } from '../../constants.mock'; @@ -22,7 +23,6 @@ import { import { ExceptionListItemSchema } from './exception_list_item_schema'; export const getExceptionListItemSchemaMock = (): ExceptionListItemSchema => ({ - _tags: ['endpoint', 'process', 'malware', 'os:linux'], _version: undefined, comments: COMMENTS, created_at: DATE_NOW, @@ -35,6 +35,7 @@ export const getExceptionListItemSchemaMock = (): ExceptionListItemSchema => ({ meta: META, name: NAME, namespace_type: NAMESPACE_TYPE, + os_types: ['linux'], tags: ['user added string for a tag', 'malware'], tie_breaker_id: TIE_BREAKER, type: ITEM_TYPE, @@ -49,7 +50,6 @@ export const getExceptionListItemSchemaMock = (): ExceptionListItemSchema => ({ export const getExceptionListItemResponseMockWithoutAutoGeneratedValues = (): Partial< ExceptionListItemSchema > => ({ - _tags: [], comments: [], created_by: ELASTIC_USER, description: DESCRIPTION, @@ -58,6 +58,7 @@ export const getExceptionListItemResponseMockWithoutAutoGeneratedValues = (): Pa list_id: LIST_ID, name: NAME, namespace_type: 'single', + os_types: OS_TYPES, tags: [], type: ITEM_TYPE, updated_by: ELASTIC_USER, diff --git a/x-pack/plugins/lists/common/schemas/response/exception_list_item_schema.ts b/x-pack/plugins/lists/common/schemas/response/exception_list_item_schema.ts index 65a1a26eaa622..f5ee12e098d17 100644 --- a/x-pack/plugins/lists/common/schemas/response/exception_list_item_schema.ts +++ b/x-pack/plugins/lists/common/schemas/response/exception_list_item_schema.ts @@ -7,7 +7,6 @@ import * as t from 'io-ts'; import { - _tags, _versionOrUndefined, created_at, created_by, @@ -19,6 +18,7 @@ import { metaOrUndefined, name, namespace_type, + osTypeArray, tags, tie_breaker_id, updated_at, @@ -28,7 +28,6 @@ import { commentsArray, entriesArray } from '../types'; export const exceptionListItemSchema = t.exact( t.type({ - _tags, _version: _versionOrUndefined, comments: commentsArray, created_at, @@ -41,6 +40,7 @@ export const exceptionListItemSchema = t.exact( meta: metaOrUndefined, name, namespace_type, + os_types: osTypeArray, tags, tie_breaker_id, type: exceptionListItemType, diff --git a/x-pack/plugins/lists/common/schemas/response/exception_list_schema.mock.ts b/x-pack/plugins/lists/common/schemas/response/exception_list_schema.mock.ts index 6df051e83b97c..7371a9d16fd4d 100644 --- a/x-pack/plugins/lists/common/schemas/response/exception_list_schema.mock.ts +++ b/x-pack/plugins/lists/common/schemas/response/exception_list_schema.mock.ts @@ -28,7 +28,6 @@ import { import { ExceptionListSchema } from './exception_list_schema'; export const getExceptionListSchemaMock = (): ExceptionListSchema => ({ - _tags: ['endpoint', 'process', 'malware', 'os:linux'], _version: _VERSION, created_at: DATE_NOW, created_by: USER, @@ -39,6 +38,7 @@ export const getExceptionListSchemaMock = (): ExceptionListSchema => ({ meta: META, name: 'Sample Endpoint Exception List', namespace_type: 'agnostic', + os_types: ['linux'], tags: ['user added string for a tag', 'malware'], tie_breaker_id: TIE_BREAKER, type: ENDPOINT_TYPE, @@ -63,13 +63,13 @@ export const getTrustedAppsListSchemaMock = (): ExceptionListSchema => { export const getExceptionResponseMockWithoutAutoGeneratedValues = (): Partial< ExceptionListSchema > => ({ - _tags: [], created_by: ELASTIC_USER, description: DESCRIPTION, immutable: IMMUTABLE, list_id: LIST_ID, name: NAME, namespace_type: 'single', + os_types: [], tags: [], type: ENDPOINT_TYPE, updated_by: ELASTIC_USER, diff --git a/x-pack/plugins/lists/common/schemas/response/exception_list_schema.ts b/x-pack/plugins/lists/common/schemas/response/exception_list_schema.ts index 6597cb20508ca..ec03467c64e5c 100644 --- a/x-pack/plugins/lists/common/schemas/response/exception_list_schema.ts +++ b/x-pack/plugins/lists/common/schemas/response/exception_list_schema.ts @@ -7,7 +7,6 @@ import * as t from 'io-ts'; import { - _tags, _versionOrUndefined, created_at, created_by, @@ -19,6 +18,7 @@ import { metaOrUndefined, name, namespace_type, + osTypeArray, tags, tie_breaker_id, updated_at, @@ -28,7 +28,6 @@ import { export const exceptionListSchema = t.exact( t.type({ - _tags, _version: _versionOrUndefined, created_at, created_by, @@ -39,6 +38,7 @@ export const exceptionListSchema = t.exact( meta: metaOrUndefined, name, namespace_type, + os_types: osTypeArray, tags, tie_breaker_id, type: exceptionListType, diff --git a/x-pack/plugins/lists/common/schemas/saved_objects/exceptions_list_so_schema.ts b/x-pack/plugins/lists/common/schemas/saved_objects/exceptions_list_so_schema.ts index f4db77f4ee057..16c43e4611edb 100644 --- a/x-pack/plugins/lists/common/schemas/saved_objects/exceptions_list_so_schema.ts +++ b/x-pack/plugins/lists/common/schemas/saved_objects/exceptions_list_so_schema.ts @@ -8,7 +8,6 @@ import * as t from 'io-ts'; import { commentsArrayOrUndefined, entriesArrayOrUndefined } from '../types'; import { - _tags, created_at, created_by, description, @@ -20,6 +19,7 @@ import { list_type, metaOrUndefined, name, + osTypeArray, tags, tie_breaker_id, updated_by, @@ -31,7 +31,6 @@ import { */ export const exceptionListSoSchema = t.exact( t.type({ - _tags, comments: commentsArrayOrUndefined, created_at, created_by, @@ -43,6 +42,7 @@ export const exceptionListSoSchema = t.exact( list_type, meta: metaOrUndefined, name, + os_types: osTypeArray, tags, tie_breaker_id, type: t.union([exceptionListType, exceptionListItemType]), diff --git a/x-pack/plugins/lists/common/shared_exports.ts b/x-pack/plugins/lists/common/shared_exports.ts index 361837bdef229..ec9358c2cb503 100644 --- a/x-pack/plugins/lists/common/shared_exports.ts +++ b/x-pack/plugins/lists/common/shared_exports.ts @@ -41,6 +41,8 @@ export { namespaceType, ExceptionListType, Type, + osTypeArray, + OsTypeArray, } from './schemas'; export { ENDPOINT_LIST_ID } from './constants'; diff --git a/x-pack/plugins/lists/common/shared_imports.ts b/x-pack/plugins/lists/common/shared_imports.ts index e5302b5cd5d88..9fe37465519ea 100644 --- a/x-pack/plugins/lists/common/shared_imports.ts +++ b/x-pack/plugins/lists/common/shared_imports.ts @@ -6,6 +6,7 @@ export { NonEmptyString, + DefaultArray, DefaultUuid, DefaultStringArray, DefaultVersionNumber, diff --git a/x-pack/plugins/lists/server/routes/create_endpoint_list_item_route.ts b/x-pack/plugins/lists/server/routes/create_endpoint_list_item_route.ts index 7fd07ed5fb8cd..cce4038ff48d6 100644 --- a/x-pack/plugins/lists/server/routes/create_endpoint_list_item_route.ts +++ b/x-pack/plugins/lists/server/routes/create_endpoint_list_item_route.ts @@ -37,13 +37,13 @@ export const createEndpointListItemRoute = (router: IRouter): void => { try { const { name, - _tags, tags, meta, comments, description, entries, item_id: itemId, + os_types: osTypes, type, } = request.body; const exceptionLists = getExceptionListClient(context); @@ -58,13 +58,13 @@ export const createEndpointListItemRoute = (router: IRouter): void => { }); } else { const createdList = await exceptionLists.createEndpointListItem({ - _tags, comments, description, entries, itemId, meta, name, + osTypes, tags, type, }); diff --git a/x-pack/plugins/lists/server/routes/create_exception_list_item_route.ts b/x-pack/plugins/lists/server/routes/create_exception_list_item_route.ts index e51e113239f20..afcb0f99c8a35 100644 --- a/x-pack/plugins/lists/server/routes/create_exception_list_item_route.ts +++ b/x-pack/plugins/lists/server/routes/create_exception_list_item_route.ts @@ -39,7 +39,6 @@ export const createExceptionListItemRoute = (router: IRouter): void => { const { namespace_type: namespaceType, name, - _tags, tags, meta, comments, @@ -47,6 +46,7 @@ export const createExceptionListItemRoute = (router: IRouter): void => { entries, item_id: itemId, list_id: listId, + os_types: osTypes, type, } = request.body; const exceptionLists = getExceptionListClient(context); @@ -87,7 +87,6 @@ export const createExceptionListItemRoute = (router: IRouter): void => { } } const createdList = await exceptionLists.createExceptionListItem({ - _tags, comments, description, entries, @@ -96,6 +95,7 @@ export const createExceptionListItemRoute = (router: IRouter): void => { meta, name, namespaceType, + osTypes, tags, type, }); diff --git a/x-pack/plugins/lists/server/routes/create_exception_list_route.ts b/x-pack/plugins/lists/server/routes/create_exception_list_route.ts index 08db0825e07bd..fd2ba6340009c 100644 --- a/x-pack/plugins/lists/server/routes/create_exception_list_route.ts +++ b/x-pack/plugins/lists/server/routes/create_exception_list_route.ts @@ -36,7 +36,6 @@ export const createExceptionListRoute = (router: IRouter): void => { try { const { name, - _tags, tags, meta, namespace_type: namespaceType, @@ -58,7 +57,6 @@ export const createExceptionListRoute = (router: IRouter): void => { }); } else { const createdList = await exceptionLists.createExceptionList({ - _tags, description, immutable: false, listId, diff --git a/x-pack/plugins/lists/server/routes/update_endpoint_list_item_route.ts b/x-pack/plugins/lists/server/routes/update_endpoint_list_item_route.ts index e0d6a0ffffa6b..8312f2fc87b98 100644 --- a/x-pack/plugins/lists/server/routes/update_endpoint_list_item_route.ts +++ b/x-pack/plugins/lists/server/routes/update_endpoint_list_item_route.ts @@ -38,9 +38,9 @@ export const updateEndpointListItemRoute = (router: IRouter): void => { description, id, name, + os_types: osTypes, meta, type, - _tags, _version, comments, entries, @@ -49,7 +49,6 @@ export const updateEndpointListItemRoute = (router: IRouter): void => { } = request.body; const exceptionLists = getExceptionListClient(context); const exceptionListItem = await exceptionLists.updateEndpointListItem({ - _tags, _version, comments, description, @@ -58,6 +57,7 @@ export const updateEndpointListItemRoute = (router: IRouter): void => { itemId, meta, name, + osTypes, tags, type, }); diff --git a/x-pack/plugins/lists/server/routes/update_exception_list_item_route.ts b/x-pack/plugins/lists/server/routes/update_exception_list_item_route.ts index 745ad0735a174..9ad563724b860 100644 --- a/x-pack/plugins/lists/server/routes/update_exception_list_item_route.ts +++ b/x-pack/plugins/lists/server/routes/update_exception_list_item_route.ts @@ -46,12 +46,12 @@ export const updateExceptionListItemRoute = (router: IRouter): void => { name, meta, type, - _tags, _version, comments, entries, item_id: itemId, namespace_type: namespaceType, + os_types: osTypes, tags, } = request.body; if (id == null && itemId == null) { @@ -62,7 +62,6 @@ export const updateExceptionListItemRoute = (router: IRouter): void => { } else { const exceptionLists = getExceptionListClient(context); const exceptionListItem = await exceptionLists.updateExceptionListItem({ - _tags, _version, comments, description, @@ -72,6 +71,7 @@ export const updateExceptionListItemRoute = (router: IRouter): void => { meta, name, namespaceType, + osTypes, tags, type, }); diff --git a/x-pack/plugins/lists/server/routes/update_exception_list_route.ts b/x-pack/plugins/lists/server/routes/update_exception_list_route.ts index 1903d0f601d1d..47008e3b78fae 100644 --- a/x-pack/plugins/lists/server/routes/update_exception_list_route.ts +++ b/x-pack/plugins/lists/server/routes/update_exception_list_route.ts @@ -35,7 +35,6 @@ export const updateExceptionListRoute = (router: IRouter): void => { const siemResponse = buildSiemResponse(response); try { const { - _tags, _version, tags, name, @@ -44,6 +43,7 @@ export const updateExceptionListRoute = (router: IRouter): void => { list_id: listId, meta, namespace_type: namespaceType, + os_types: osTypes, type, version, } = request.body; @@ -55,7 +55,6 @@ export const updateExceptionListRoute = (router: IRouter): void => { }); } else { const list = await exceptionLists.updateExceptionList({ - _tags, _version, description, id, @@ -63,6 +62,7 @@ export const updateExceptionListRoute = (router: IRouter): void => { meta, name, namespaceType, + osTypes, tags, type, version, diff --git a/x-pack/plugins/lists/server/saved_objects/exception_list.ts b/x-pack/plugins/lists/server/saved_objects/exception_list.ts index f9e408833e069..b3fd2c0eced98 100644 --- a/x-pack/plugins/lists/server/saved_objects/exception_list.ts +++ b/x-pack/plugins/lists/server/saved_objects/exception_list.ts @@ -6,6 +6,8 @@ import { SavedObjectsType } from 'kibana/server'; +import { migrations } from './migrations'; + export const exceptionListSavedObjectType = 'exception-list'; export const exceptionListAgnosticSavedObjectType = 'exception-list-agnostic'; export type SavedObjectType = 'exception-list' | 'exception-list-agnostic'; @@ -149,6 +151,9 @@ export const exceptionListItemMapping: SavedObjectsType['mappings'] = { item_id: { type: 'keyword', }, + os_types: { + type: 'keyword', + }, }, }; @@ -163,6 +168,7 @@ const combinedMappings: SavedObjectsType['mappings'] = { export const exceptionListType: SavedObjectsType = { hidden: false, mappings: combinedMappings, + migrations, name: exceptionListSavedObjectType, namespaceType: 'single', }; @@ -170,6 +176,7 @@ export const exceptionListType: SavedObjectsType = { export const exceptionListAgnosticType: SavedObjectsType = { hidden: false, mappings: combinedMappings, + migrations, name: exceptionListAgnosticSavedObjectType, namespaceType: 'agnostic', }; diff --git a/x-pack/plugins/lists/server/saved_objects/migrations.test.ts b/x-pack/plugins/lists/server/saved_objects/migrations.test.ts new file mode 100644 index 0000000000000..cd7ef0f37d505 --- /dev/null +++ b/x-pack/plugins/lists/server/saved_objects/migrations.test.ts @@ -0,0 +1,132 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { SavedObjectUnsanitizedDoc } from 'kibana/server'; + +import { ENDPOINT_LIST_ID } from '../../common/constants'; + +import { OldExceptionListSoSchema, migrations } from './migrations'; + +describe('7.10.0 lists migrations', () => { + const migration = migrations['7.10.0']; + + test('properly converts .text fields to .caseless', () => { + const doc = { + attributes: { + entries: [ + { + field: 'file.path.text', + operator: 'included', + type: 'match', + value: 'C:\\Windows\\explorer.exe', + }, + { + field: 'host.os.name', + operator: 'included', + type: 'match', + value: 'my-host', + }, + { + entries: [ + { + field: 'process.command_line.text', + operator: 'included', + type: 'match', + value: '/usr/bin/bash', + }, + { + field: 'process.parent.command_line.text', + operator: 'included', + type: 'match', + value: '/usr/bin/bash', + }, + ], + field: 'nested.field', + type: 'nested', + }, + ], + list_id: ENDPOINT_LIST_ID, + }, + id: 'abcd', + migrationVersion: {}, + references: [], + type: 'so-type', + updated_at: '2020-06-09T20:18:20.349Z', + }; + expect( + migration((doc as unknown) as SavedObjectUnsanitizedDoc) + ).toEqual({ + attributes: { + entries: [ + { + field: 'file.path.caseless', + operator: 'included', + type: 'match', + value: 'C:\\Windows\\explorer.exe', + }, + { + field: 'host.os.name', + operator: 'included', + type: 'match', + value: 'my-host', + }, + { + entries: [ + { + field: 'process.command_line.caseless', + operator: 'included', + type: 'match', + value: '/usr/bin/bash', + }, + { + field: 'process.parent.command_line.caseless', + operator: 'included', + type: 'match', + value: '/usr/bin/bash', + }, + ], + field: 'nested.field', + type: 'nested', + }, + ], + list_id: ENDPOINT_LIST_ID, + }, + id: 'abcd', + migrationVersion: {}, + references: [], + type: 'so-type', + updated_at: '2020-06-09T20:18:20.349Z', + }); + }); + + test('properly copies os tags to os_types', () => { + const doc = { + attributes: { + _tags: ['1234', 'os:windows'], + comments: [], + }, + id: 'abcd', + migrationVersion: {}, + references: [], + type: 'so-type', + updated_at: '2020-06-09T20:18:20.349Z', + }; + expect( + migration((doc as unknown) as SavedObjectUnsanitizedDoc) + ).toEqual({ + attributes: { + _tags: ['1234', 'os:windows'], + comments: [], + os_types: ['windows'], + }, + id: 'abcd', + migrationVersion: {}, + references: [], + type: 'so-type', + updated_at: '2020-06-09T20:18:20.349Z', + }); + }); +}); diff --git a/x-pack/plugins/lists/server/saved_objects/migrations.ts b/x-pack/plugins/lists/server/saved_objects/migrations.ts new file mode 100644 index 0000000000000..2e9792cd8eb3c --- /dev/null +++ b/x-pack/plugins/lists/server/saved_objects/migrations.ts @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as t from 'io-ts'; +import { SavedObjectSanitizedDoc, SavedObjectUnsanitizedDoc } from 'kibana/server'; + +import { ENDPOINT_LIST_ID, ENDPOINT_TRUSTED_APPS_LIST_ID } from '../../common/constants'; +import { + EntriesArray, + ExceptionListSoSchema, + NonEmptyNestedEntriesArray, + OsTypeArray, + entriesNested, + entry, +} from '../../common/schemas'; + +const entryType = t.union([entry, entriesNested]); +type EntryType = t.TypeOf; + +const migrateEntry = (entryToMigrate: EntryType): EntryType => { + const newEntry = entryToMigrate; + if (entriesNested.is(entryToMigrate) && entriesNested.is(newEntry)) { + newEntry.entries = entryToMigrate.entries.map((nestedEntry) => + migrateEntry(nestedEntry) + ) as NonEmptyNestedEntriesArray; + } + newEntry.field = entryToMigrate.field.replace('.text', '.caseless'); + return newEntry; +}; + +const reduceOsTypes = (acc: string[], tag: string): string[] => { + if (tag.startsWith('os:')) { + // TODO: check OS against type + return [...acc, tag.replace('os:', '')]; + } + return [...acc]; +}; + +export type OldExceptionListSoSchema = ExceptionListSoSchema & { + _tags: string[]; +}; + +export const migrations = { + '7.10.0': ( + doc: SavedObjectUnsanitizedDoc + ): SavedObjectSanitizedDoc => ({ + ...doc, + ...{ + attributes: { + ...doc.attributes, + ...(doc.attributes.entries && + [ENDPOINT_LIST_ID, ENDPOINT_TRUSTED_APPS_LIST_ID].includes(doc.attributes.list_id) && { + entries: (doc.attributes.entries as EntriesArray).map(migrateEntry), + }), + ...(doc.attributes._tags && + doc.attributes._tags.reduce(reduceOsTypes, []).length > 0 && { + os_types: doc.attributes._tags.reduce(reduceOsTypes, []) as OsTypeArray, + }), + }, + }, + references: doc.references || [], + }), +}; diff --git a/x-pack/plugins/lists/server/scripts/exception_lists/new/endpoint_list_item.json b/x-pack/plugins/lists/server/scripts/exception_lists/new/endpoint_list_item.json index 6999441d21941..5e7dee83776bf 100644 --- a/x-pack/plugins/lists/server/scripts/exception_lists/new/endpoint_list_item.json +++ b/x-pack/plugins/lists/server/scripts/exception_lists/new/endpoint_list_item.json @@ -1,10 +1,10 @@ { "item_id": "simple_list_item", - "_tags": ["endpoint", "process", "malware", "os:linux"], "tags": ["user added string for a tag", "malware"], "type": "simple", "description": "This is a sample endpoint type exception", "name": "Sample Endpoint Exception List", + "os_types": ["linux"], "entries": [ { "field": "actingProcess.file.signer", diff --git a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list.json b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list.json index 19027ac189a47..73271514269da 100644 --- a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list.json +++ b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list.json @@ -1,6 +1,5 @@ { "list_id": "simple_list", - "_tags": ["endpoint", "process", "malware", "os:linux"], "tags": ["user added string for a tag", "malware"], "type": "detection", "description": "This is a sample endpoint type exception", diff --git a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_agnostic.json b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_agnostic.json index 4121b13880660..9987f5d46af1b 100644 --- a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_agnostic.json +++ b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_agnostic.json @@ -1,6 +1,5 @@ { "list_id": "endpoint_list", - "_tags": ["endpoint", "process", "malware", "os:linux"], "tags": ["user added string for a tag", "malware"], "type": "endpoint", "description": "This is a sample agnostic endpoint type exception", diff --git a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_detection.json b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_detection.json index 306195f4226e3..986c368bd2de3 100644 --- a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_detection.json +++ b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_detection.json @@ -1,6 +1,5 @@ { "list_id": "detection_list", - "_tags": ["detection"], "tags": ["detection", "sample_tag"], "type": "detection", "description": "This is a sample detection type exception list", diff --git a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item.json b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item.json index eede855aab199..e7eed0a56cb6d 100644 --- a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item.json +++ b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item.json @@ -1,11 +1,11 @@ { "list_id": "simple_list", "item_id": "simple_list_item", - "_tags": ["endpoint", "process", "malware", "os:linux"], "tags": ["user added string for a tag", "malware"], "type": "simple", "description": "This is a sample endpoint type exception", "name": "Sample Endpoint Exception List", + "os_types": ["linux"], "entries": [ { "field": "actingProcess.file.signer", diff --git a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_agnostic.json b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_agnostic.json index 9cda9c12d6b30..d57fb19955e34 100644 --- a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_agnostic.json +++ b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_agnostic.json @@ -1,12 +1,12 @@ { "list_id": "endpoint_list", "item_id": "endpoint_list_item", - "_tags": ["endpoint", "process", "malware", "os:linux"], "tags": ["user added string for a tag", "malware"], "type": "simple", "description": "This is a sample agnostic endpoint type exception", "name": "Sample Endpoint Exception List", "namespace_type": "agnostic", + "os_types": ["linux"], "entries": [ { "field": "actingProcess.file.signer", diff --git a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_auto_id.json b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_auto_id.json index f1281e2ea0560..9cc73577818c5 100644 --- a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_auto_id.json +++ b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_auto_id.json @@ -1,10 +1,10 @@ { "list_id": "simple_list", - "_tags": ["endpoint", "process", "malware", "os:linux"], "tags": ["user added string for a tag", "malware"], "type": "simple", "description": "This is a sample endpoint type exception that has no item_id so it creates a new id each time", "name": "Sample Endpoint Exception List", + "os_types": ["linux"], "comments": [], "entries": [ { diff --git a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_detection_auto_id.json b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_detection_auto_id.json index 833f6c023c5d9..e65f818c1df85 100644 --- a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_detection_auto_id.json +++ b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_detection_auto_id.json @@ -1,6 +1,5 @@ { "list_id": "detection_list", - "_tags": ["detection"], "tags": ["test_tag", "detection", "no_more_bad_guys"], "type": "simple", "description": "This is a sample detection type exception that has no item_id so it creates a new id each time", diff --git a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_with_bad_ip_list.json b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_with_bad_ip_list.json index bab435487ec25..9a5f6e888e6e4 100644 --- a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_with_bad_ip_list.json +++ b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_with_bad_ip_list.json @@ -1,11 +1,11 @@ { "list_id": "endpoint_list", "item_id": "endpoint_list_item_good_rock01", - "_tags": ["endpoint", "process", "malware", "os:windows"], "tags": ["user added string for a tag", "malware"], "type": "simple", "description": "Don't signal when agent.name is rock01 and source.ip is in the goodguys.txt list", "name": "Filter out good guys ip and agent.name rock01", + "os_types": ["windows"], "comments": [], "entries": [ { diff --git a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_with_list.json b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_with_list.json index e0d401eff9269..d0756b990aad0 100644 --- a/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_with_list.json +++ b/x-pack/plugins/lists/server/scripts/exception_lists/new/exception_list_item_with_list.json @@ -1,11 +1,11 @@ { "list_id": "endpoint_list", "item_id": "endpoint_list_item_lg_val_list", - "_tags": ["endpoint", "process", "malware", "os:windows"], "tags": ["user added string for a tag", "malware"], "type": "simple", "description": "This is a sample exception list item with a large value list included", "name": "Sample Endpoint Exception List Item with large value list", + "os_types": ["windows"], "comments": [], "entries": [ { diff --git a/x-pack/plugins/lists/server/scripts/exception_lists/new/trusted_app_list_item_agnostic.json b/x-pack/plugins/lists/server/scripts/exception_lists/new/trusted_app_list_item_agnostic.json index 9f0c306a408f0..293ca14d323f7 100644 --- a/x-pack/plugins/lists/server/scripts/exception_lists/new/trusted_app_list_item_agnostic.json +++ b/x-pack/plugins/lists/server/scripts/exception_lists/new/trusted_app_list_item_agnostic.json @@ -1,12 +1,12 @@ { "list_id": "endpoint_trusted_apps", "item_id": "endpoint_trusted_apps_item", - "_tags": ["endpoint", "os:linux", "os:windows", "os:macos", "trusted-app"], "tags": ["user added string for a tag", "malware"], "type": "simple", "description": "This is a sample agnostic endpoint trusted app entry", "name": "Sample Endpoint Trusted App Entry", "namespace_type": "agnostic", + "os_types": ["linux", "windows", "macos"], "entries": [ { "field": "actingProcess.file.signer", diff --git a/x-pack/plugins/lists/server/scripts/exception_lists/updates/simple_update.json b/x-pack/plugins/lists/server/scripts/exception_lists/updates/simple_update.json index 8d07b29d7b428..15a6f495b7a8f 100644 --- a/x-pack/plugins/lists/server/scripts/exception_lists/updates/simple_update.json +++ b/x-pack/plugins/lists/server/scripts/exception_lists/updates/simple_update.json @@ -1,8 +1,8 @@ { "list_id": "simple_list", - "_tags": ["endpoint", "process", "malware", "os:linux"], "tags": ["user added string for a tag", "malware"], "type": "endpoint", + "os_types": ["linux"], "description": "Different description", "name": "Sample Endpoint Exception List" } diff --git a/x-pack/plugins/lists/server/scripts/exception_lists/updates/simple_update_agnostic.json b/x-pack/plugins/lists/server/scripts/exception_lists/updates/simple_update_agnostic.json index 90d5e0846e53a..fe29ca80c632e 100644 --- a/x-pack/plugins/lists/server/scripts/exception_lists/updates/simple_update_agnostic.json +++ b/x-pack/plugins/lists/server/scripts/exception_lists/updates/simple_update_agnostic.json @@ -1,11 +1,11 @@ { "item_id": "endpoint_list_item", - "_tags": ["endpoint", "process", "malware", "os:windows"], "tags": ["user added string for a tag", "malware"], "type": "simple", "description": "This is a sample agnostic change here this list", "name": "Sample Endpoint Exception List update change", "namespace_type": "agnostic", + "os_types": ["windows"], "entries": [ { "field": "event.category", diff --git a/x-pack/plugins/lists/server/scripts/exception_lists/updates/simple_update_item.json b/x-pack/plugins/lists/server/scripts/exception_lists/updates/simple_update_item.json index 81db909277595..d55f121253406 100644 --- a/x-pack/plugins/lists/server/scripts/exception_lists/updates/simple_update_item.json +++ b/x-pack/plugins/lists/server/scripts/exception_lists/updates/simple_update_item.json @@ -1,5 +1,4 @@ { - "_tags": ["detection"], "comments": [], "description": "Test comments - exception list item", "entries": [ diff --git a/x-pack/plugins/lists/server/services/exception_lists/create_endpoint_list.ts b/x-pack/plugins/lists/server/services/exception_lists/create_endpoint_list.ts index 2e9bb1325632e..fb2b637657bb6 100644 --- a/x-pack/plugins/lists/server/services/exception_lists/create_endpoint_list.ts +++ b/x-pack/plugins/lists/server/services/exception_lists/create_endpoint_list.ts @@ -35,7 +35,6 @@ export const createEndpointList = async ({ const savedObject = await savedObjectsClient.create( savedObjectType, { - _tags: [], comments: undefined, created_at: dateNow, created_by: user, @@ -47,6 +46,7 @@ export const createEndpointList = async ({ list_type: 'list', meta: undefined, name: ENDPOINT_LIST_NAME, + os_types: [], tags: [], tie_breaker_id: tieBreaker ?? uuid.v4(), type: 'endpoint', diff --git a/x-pack/plugins/lists/server/services/exception_lists/create_endpoint_trusted_apps_list.ts b/x-pack/plugins/lists/server/services/exception_lists/create_endpoint_trusted_apps_list.ts index c782cdd302666..d9eedb0af4e77 100644 --- a/x-pack/plugins/lists/server/services/exception_lists/create_endpoint_trusted_apps_list.ts +++ b/x-pack/plugins/lists/server/services/exception_lists/create_endpoint_trusted_apps_list.ts @@ -43,7 +43,6 @@ export const createEndpointTrustedAppsList = async ({ const savedObject = await savedObjectsClient.create( savedObjectType, { - _tags: [], comments: undefined, created_at: dateNow, created_by: user, @@ -55,6 +54,7 @@ export const createEndpointTrustedAppsList = async ({ list_type: 'list', meta: undefined, name: ENDPOINT_TRUSTED_APPS_LIST_NAME, + os_types: [], tags: [], tie_breaker_id: tieBreaker ?? uuid.v4(), type: 'endpoint', diff --git a/x-pack/plugins/lists/server/services/exception_lists/create_exception_list.ts b/x-pack/plugins/lists/server/services/exception_lists/create_exception_list.ts index c8d709ca340ad..91a0506ad06e3 100644 --- a/x-pack/plugins/lists/server/services/exception_lists/create_exception_list.ts +++ b/x-pack/plugins/lists/server/services/exception_lists/create_exception_list.ts @@ -19,13 +19,11 @@ import { NamespaceType, Tags, Version, - _Tags, } from '../../../common/schemas'; import { getSavedObjectType, transformSavedObjectToExceptionList } from './utils'; interface CreateExceptionListOptions { - _tags: _Tags; listId: ListId; savedObjectsClient: SavedObjectsClientContract; namespaceType: NamespaceType; @@ -41,7 +39,6 @@ interface CreateExceptionListOptions { } export const createExceptionList = async ({ - _tags, listId, immutable, savedObjectsClient, @@ -58,7 +55,6 @@ export const createExceptionList = async ({ const savedObjectType = getSavedObjectType({ namespaceType }); const dateNow = new Date().toISOString(); const savedObject = await savedObjectsClient.create(savedObjectType, { - _tags, comments: undefined, created_at: dateNow, created_by: user, @@ -70,6 +66,7 @@ export const createExceptionList = async ({ list_type: 'list', meta, name, + os_types: [], tags, tie_breaker_id: tieBreaker ?? uuid.v4(), type, diff --git a/x-pack/plugins/lists/server/services/exception_lists/create_exception_list_item.ts b/x-pack/plugins/lists/server/services/exception_lists/create_exception_list_item.ts index 47c21735b45f4..9f331362cdd44 100644 --- a/x-pack/plugins/lists/server/services/exception_lists/create_exception_list_item.ts +++ b/x-pack/plugins/lists/server/services/exception_lists/create_exception_list_item.ts @@ -19,8 +19,8 @@ import { MetaOrUndefined, Name, NamespaceType, + OsTypeArray, Tags, - _Tags, } from '../../../common/schemas'; import { @@ -30,7 +30,6 @@ import { } from './utils'; interface CreateExceptionListItemOptions { - _tags: _Tags; comments: CreateCommentsArray; listId: ListId; itemId: ItemId; @@ -44,10 +43,10 @@ interface CreateExceptionListItemOptions { tags: Tags; tieBreaker?: string; type: ExceptionListItemType; + osTypes: OsTypeArray; } export const createExceptionListItem = async ({ - _tags, comments, entries, itemId, @@ -55,6 +54,7 @@ export const createExceptionListItem = async ({ savedObjectsClient, namespaceType, name, + osTypes, description, meta, user, @@ -69,7 +69,6 @@ export const createExceptionListItem = async ({ user, }); const savedObject = await savedObjectsClient.create(savedObjectType, { - _tags, comments: transformedComments, created_at: dateNow, created_by: user, @@ -81,6 +80,7 @@ export const createExceptionListItem = async ({ list_type: 'item', meta, name, + os_types: osTypes as OsTypeArray, tags, tie_breaker_id: tieBreaker ?? uuid.v4(), type, diff --git a/x-pack/plugins/lists/server/services/exception_lists/exception_list_client.ts b/x-pack/plugins/lists/server/services/exception_lists/exception_list_client.ts index 747458175e3b8..9747c58d1cd0f 100644 --- a/x-pack/plugins/lists/server/services/exception_lists/exception_list_client.ts +++ b/x-pack/plugins/lists/server/services/exception_lists/exception_list_client.ts @@ -109,20 +109,19 @@ export class ExceptionListClient { * being there and existing before the item is inserted into the agnostic endpoint list. */ public createEndpointListItem = async ({ - _tags, comments, description, entries, itemId, meta, name, + osTypes, tags, type, }: CreateEndpointListItemOptions): Promise => { const { savedObjectsClient, user } = this; await this.createEndpointList(); return createExceptionListItem({ - _tags, comments, description, entries, @@ -131,6 +130,7 @@ export class ExceptionListClient { meta, name, namespaceType: 'agnostic', + osTypes, savedObjectsClient, tags, type, @@ -145,7 +145,6 @@ export class ExceptionListClient { * return of null but at least the list exists again. */ public updateEndpointListItem = async ({ - _tags, _version, comments, description, @@ -154,13 +153,13 @@ export class ExceptionListClient { itemId, meta, name, + osTypes, tags, type, }: UpdateEndpointListItemOptions): Promise => { const { savedObjectsClient, user } = this; await this.createEndpointList(); return updateExceptionListItem({ - _tags, _version, comments, description, @@ -170,6 +169,7 @@ export class ExceptionListClient { meta, name, namespaceType: 'agnostic', + osTypes, savedObjectsClient, tags, type, @@ -189,7 +189,6 @@ export class ExceptionListClient { }; public createExceptionList = async ({ - _tags, description, immutable, listId, @@ -202,7 +201,6 @@ export class ExceptionListClient { }: CreateExceptionListOptions): Promise => { const { savedObjectsClient, user } = this; return createExceptionList({ - _tags, description, immutable, listId, @@ -218,7 +216,6 @@ export class ExceptionListClient { }; public updateExceptionList = async ({ - _tags, _version, id, description, @@ -226,13 +223,13 @@ export class ExceptionListClient { meta, name, namespaceType, + osTypes, tags, type, version, }: UpdateExceptionListOptions): Promise => { const { savedObjectsClient, user } = this; return updateExceptionList({ - _tags, _version, description, id, @@ -240,6 +237,7 @@ export class ExceptionListClient { meta, name, namespaceType, + osTypes, savedObjectsClient, tags, type, @@ -263,7 +261,6 @@ export class ExceptionListClient { }; public createExceptionListItem = async ({ - _tags, comments, description, entries, @@ -272,12 +269,12 @@ export class ExceptionListClient { meta, name, namespaceType, + osTypes, tags, type, }: CreateExceptionListItemOptions): Promise => { const { savedObjectsClient, user } = this; return createExceptionListItem({ - _tags, comments, description, entries, @@ -286,6 +283,7 @@ export class ExceptionListClient { meta, name, namespaceType, + osTypes, savedObjectsClient, tags, type, @@ -294,7 +292,6 @@ export class ExceptionListClient { }; public updateExceptionListItem = async ({ - _tags, _version, comments, description, @@ -304,12 +301,12 @@ export class ExceptionListClient { meta, name, namespaceType, + osTypes, tags, type, }: UpdateExceptionListItemOptions): Promise => { const { savedObjectsClient, user } = this; return updateExceptionListItem({ - _tags, _version, comments, description, @@ -319,6 +316,7 @@ export class ExceptionListClient { meta, name, namespaceType, + osTypes, savedObjectsClient, tags, type, diff --git a/x-pack/plugins/lists/server/services/exception_lists/exception_list_client_types.ts b/x-pack/plugins/lists/server/services/exception_lists/exception_list_client_types.ts index 963716b55ea77..1fef2da5d975e 100644 --- a/x-pack/plugins/lists/server/services/exception_lists/exception_list_client_types.ts +++ b/x-pack/plugins/lists/server/services/exception_lists/exception_list_client_types.ts @@ -30,6 +30,7 @@ import { Name, NameOrUndefined, NamespaceType, + OsTypeArray, PageOrUndefined, PerPageOrUndefined, SortFieldOrUndefined, @@ -39,8 +40,6 @@ import { UpdateCommentsArray, Version, VersionOrUndefined, - _Tags, - _TagsOrUndefined, _VersionOrUndefined, } from '../../../common/schemas'; @@ -56,7 +55,6 @@ export interface GetExceptionListOptions { } export interface CreateExceptionListOptions { - _tags: _Tags; listId: ListId; namespaceType: NamespaceType; name: Name; @@ -69,12 +67,12 @@ export interface CreateExceptionListOptions { } export interface UpdateExceptionListOptions { - _tags: _TagsOrUndefined; _version: _VersionOrUndefined; id: IdOrUndefined; listId: ListIdOrUndefined; namespaceType: NamespaceType; name: NameOrUndefined; + osTypes: OsTypeArray; description: DescriptionOrUndefined; meta: MetaOrUndefined; tags: TagsOrUndefined; @@ -116,13 +114,13 @@ export interface GetEndpointListItemOptions { } export interface CreateExceptionListItemOptions { - _tags: _Tags; comments: CreateCommentsArray; entries: EntriesArray; itemId: ItemId; listId: ListId; namespaceType: NamespaceType; name: Name; + osTypes: OsTypeArray; description: Description; meta: MetaOrUndefined; tags: Tags; @@ -130,19 +128,18 @@ export interface CreateExceptionListItemOptions { } export interface CreateEndpointListItemOptions { - _tags: _Tags; comments: CreateCommentsArray; entries: EntriesArray; itemId: ItemId; name: Name; description: Description; meta: MetaOrUndefined; + osTypes: OsTypeArray; tags: Tags; type: ExceptionListItemType; } export interface UpdateExceptionListItemOptions { - _tags: _TagsOrUndefined; _version: _VersionOrUndefined; comments: UpdateCommentsArray; entries: EntriesArray; @@ -150,6 +147,7 @@ export interface UpdateExceptionListItemOptions { itemId: ItemIdOrUndefined; namespaceType: NamespaceType; name: NameOrUndefined; + osTypes: OsTypeArray; description: DescriptionOrUndefined; meta: MetaOrUndefined; tags: TagsOrUndefined; @@ -157,13 +155,13 @@ export interface UpdateExceptionListItemOptions { } export interface UpdateEndpointListItemOptions { - _tags: _TagsOrUndefined; _version: _VersionOrUndefined; comments: UpdateCommentsArray; entries: EntriesArray; id: IdOrUndefined; itemId: ItemIdOrUndefined; name: NameOrUndefined; + osTypes: OsTypeArray; description: DescriptionOrUndefined; meta: MetaOrUndefined; tags: TagsOrUndefined; diff --git a/x-pack/plugins/lists/server/services/exception_lists/update_exception_list.ts b/x-pack/plugins/lists/server/services/exception_lists/update_exception_list.ts index c26ff1bca4484..a9a666672d7bb 100644 --- a/x-pack/plugins/lists/server/services/exception_lists/update_exception_list.ts +++ b/x-pack/plugins/lists/server/services/exception_lists/update_exception_list.ts @@ -16,9 +16,9 @@ import { MetaOrUndefined, NameOrUndefined, NamespaceType, + OsTypeArray, TagsOrUndefined, VersionOrUndefined, - _TagsOrUndefined, _VersionOrUndefined, } from '../../../common/schemas'; @@ -27,12 +27,12 @@ import { getExceptionList } from './get_exception_list'; interface UpdateExceptionListOptions { id: IdOrUndefined; - _tags: _TagsOrUndefined; _version: _VersionOrUndefined; name: NameOrUndefined; description: DescriptionOrUndefined; savedObjectsClient: SavedObjectsClientContract; namespaceType: NamespaceType; + osTypes: OsTypeArray; listId: ListIdOrUndefined; meta: MetaOrUndefined; user: string; @@ -43,7 +43,6 @@ interface UpdateExceptionListOptions { } export const updateExceptionList = async ({ - _tags, _version, id, savedObjectsClient, @@ -67,7 +66,6 @@ export const updateExceptionList = async ({ savedObjectType, exceptionList.id, { - _tags, description, meta, name, diff --git a/x-pack/plugins/lists/server/services/exception_lists/update_exception_list_item.ts b/x-pack/plugins/lists/server/services/exception_lists/update_exception_list_item.ts index ccb74e8796705..9c3399b7509a5 100644 --- a/x-pack/plugins/lists/server/services/exception_lists/update_exception_list_item.ts +++ b/x-pack/plugins/lists/server/services/exception_lists/update_exception_list_item.ts @@ -17,9 +17,9 @@ import { MetaOrUndefined, NameOrUndefined, NamespaceType, + OsTypeArray, TagsOrUndefined, UpdateCommentsArrayOrUndefined, - _TagsOrUndefined, _VersionOrUndefined, } from '../../../common/schemas'; @@ -33,13 +33,13 @@ import { getExceptionListItem } from './get_exception_list_item'; interface UpdateExceptionListItemOptions { id: IdOrUndefined; comments: UpdateCommentsArrayOrUndefined; - _tags: _TagsOrUndefined; _version: _VersionOrUndefined; name: NameOrUndefined; description: DescriptionOrUndefined; entries: EntriesArray; savedObjectsClient: SavedObjectsClientContract; namespaceType: NamespaceType; + osTypes: OsTypeArray; itemId: ItemIdOrUndefined; meta: MetaOrUndefined; user: string; @@ -49,7 +49,6 @@ interface UpdateExceptionListItemOptions { } export const updateExceptionListItem = async ({ - _tags, _version, comments, entries, @@ -57,6 +56,7 @@ export const updateExceptionListItem = async ({ savedObjectsClient, namespaceType, name, + osTypes, description, itemId, meta, @@ -83,12 +83,12 @@ export const updateExceptionListItem = async ({ savedObjectType, exceptionListItem.id, { - _tags, comments: transformedComments, description, entries, meta, name, + os_types: osTypes, tags, type, updated_by: user, diff --git a/x-pack/plugins/lists/server/services/exception_lists/utils.ts b/x-pack/plugins/lists/server/services/exception_lists/utils.ts index 2989a09b0ce00..6a7bd249bf62a 100644 --- a/x-pack/plugins/lists/server/services/exception_lists/utils.ts +++ b/x-pack/plugins/lists/server/services/exception_lists/utils.ts @@ -71,7 +71,6 @@ export const transformSavedObjectToExceptionList = ({ version: _version, attributes: { /* eslint-disable @typescript-eslint/naming-convention */ - _tags, created_at, created_by, description, @@ -79,6 +78,7 @@ export const transformSavedObjectToExceptionList = ({ list_id, meta, name, + os_types, tags, tie_breaker_id, type, @@ -93,7 +93,6 @@ export const transformSavedObjectToExceptionList = ({ // TODO: Change this to do a decode and throw if the saved object is not as expected. // TODO: Do a throw if after the decode this is not the correct "list_type: list" return { - _tags, _version, created_at, created_by, @@ -104,6 +103,7 @@ export const transformSavedObjectToExceptionList = ({ meta, name, namespace_type: getExceptionListType({ savedObjectType: savedObject.type }), + os_types, tags, tie_breaker_id, type: exceptionListType.is(type) ? type : 'detection', @@ -124,11 +124,11 @@ export const transformSavedObjectUpdateToExceptionList = ({ const { version: _version, attributes: { - _tags, description, immutable, meta, name, + os_types: osTypes, tags, type, updated_by: updatedBy, @@ -141,7 +141,6 @@ export const transformSavedObjectUpdateToExceptionList = ({ // TODO: Change this to do a decode and throw if the saved object is not as expected. // TODO: Do a throw if after the decode this is not the correct "list_type: list" return { - _tags: _tags ?? exceptionList._tags, _version, created_at: exceptionList.created_at, created_by: exceptionList.created_by, @@ -152,6 +151,7 @@ export const transformSavedObjectUpdateToExceptionList = ({ meta: meta ?? exceptionList.meta, name: name ?? exceptionList.name, namespace_type: getExceptionListType({ savedObjectType: savedObject.type }), + os_types: osTypes ?? exceptionList.os_types, tags: tags ?? exceptionList.tags, tie_breaker_id: exceptionList.tie_breaker_id, type: exceptionListType.is(type) ? type : exceptionList.type, @@ -171,7 +171,6 @@ export const transformSavedObjectToExceptionListItem = ({ version: _version, attributes: { /* eslint-disable @typescript-eslint/naming-convention */ - _tags, comments, created_at, created_by, @@ -181,6 +180,7 @@ export const transformSavedObjectToExceptionListItem = ({ list_id, meta, name, + os_types, tags, tie_breaker_id, type, @@ -194,7 +194,6 @@ export const transformSavedObjectToExceptionListItem = ({ // TODO: Do a throw if after the decode this is not the correct "list_type: item" // TODO: Do a throw if item_id or entries is not defined. return { - _tags, _version, comments: comments ?? [], created_at, @@ -207,6 +206,7 @@ export const transformSavedObjectToExceptionListItem = ({ meta, name, namespace_type: getExceptionListType({ savedObjectType: savedObject.type }), + os_types, tags, tie_breaker_id, type: exceptionListItemType.is(type) ? type : 'simple', @@ -226,12 +226,12 @@ export const transformSavedObjectUpdateToExceptionListItem = ({ const { version: _version, attributes: { - _tags, comments, description, entries, meta, name, + os_types: osTypes, tags, type, updated_by: updatedBy, @@ -245,7 +245,6 @@ export const transformSavedObjectUpdateToExceptionListItem = ({ // TODO: Update exception list and item types (perhaps separating out) so as to avoid // defaulting return { - _tags: _tags ?? exceptionListItem._tags, _version, comments: comments ?? exceptionListItem.comments, created_at: exceptionListItem.created_at, @@ -258,6 +257,7 @@ export const transformSavedObjectUpdateToExceptionListItem = ({ meta: meta ?? exceptionListItem.meta, name: name ?? exceptionListItem.name, namespace_type: getExceptionListType({ savedObjectType: savedObject.type }), + os_types: osTypes ?? exceptionListItem.os_types, tags: tags ?? exceptionListItem.tags, tie_breaker_id: exceptionListItem.tie_breaker_id, type: exceptionListItemType.is(type) ? type : exceptionListItem.type, diff --git a/x-pack/plugins/maps/common/constants.ts b/x-pack/plugins/maps/common/constants.ts index be891b6e59608..469a4023434a8 100644 --- a/x-pack/plugins/maps/common/constants.ts +++ b/x-pack/plugins/maps/common/constants.ts @@ -5,6 +5,7 @@ */ import { i18n } from '@kbn/i18n'; import { FeatureCollection } from 'geojson'; + export const EMS_APP_NAME = 'kibana'; export const EMS_CATALOGUE_PATH = 'ems/catalogue'; diff --git a/x-pack/plugins/maps/public/actions/layer_actions.test.ts b/x-pack/plugins/maps/public/actions/layer_actions.test.ts new file mode 100644 index 0000000000000..09a22dca271d7 --- /dev/null +++ b/x-pack/plugins/maps/public/actions/layer_actions.test.ts @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { addLayer } from './layer_actions'; +import { LayerDescriptor } from '../../common/descriptor_types'; +import { LICENSED_FEATURES } from '../licensed_features'; + +const getStoreMock = jest.fn(); +const dispatchMock = jest.fn(); + +describe('layer_actions', () => { + afterEach(() => { + jest.resetAllMocks(); + }); + + describe('addLayer', () => { + const notifyLicensedFeatureUsageMock = jest.fn(); + + beforeEach(() => { + // eslint-disable-next-line @typescript-eslint/no-var-requires + require('../licensed_features').notifyLicensedFeatureUsage = (feature: LICENSED_FEATURES) => { + notifyLicensedFeatureUsageMock(feature); + }; + + // eslint-disable-next-line @typescript-eslint/no-var-requires + require('../selectors/map_selectors').getMapReady = () => { + return true; + }; + + // eslint-disable-next-line @typescript-eslint/no-var-requires + require('../selectors/map_selectors').createLayerInstance = () => { + return { + getLicensedFeatures() { + return [LICENSED_FEATURES.GEO_SHAPE_AGGS_GEO_TILE]; + }, + }; + }; + }); + + it('should register feature-use', async () => { + const action = addLayer(({} as unknown) as LayerDescriptor); + await action(dispatchMock, getStoreMock); + expect(notifyLicensedFeatureUsageMock).toHaveBeenCalledWith( + LICENSED_FEATURES.GEO_SHAPE_AGGS_GEO_TILE + ); + }); + }); +}); diff --git a/x-pack/plugins/maps/public/actions/layer_actions.ts b/x-pack/plugins/maps/public/actions/layer_actions.ts index 675bb14722889..19c9adfadd45a 100644 --- a/x-pack/plugins/maps/public/actions/layer_actions.ts +++ b/x-pack/plugins/maps/public/actions/layer_actions.ts @@ -14,6 +14,7 @@ import { getSelectedLayerId, getMapReady, getMapColors, + createLayerInstance, } from '../selectors/map_selectors'; import { FLYOUT_STATE } from '../reducers/ui'; import { cancelRequest } from '../reducers/non_serializable_instances'; @@ -42,6 +43,7 @@ import { ILayer } from '../classes/layers/layer'; import { IVectorLayer } from '../classes/layers/vector_layer/vector_layer'; import { LAYER_STYLE_TYPE, LAYER_TYPE } from '../../common/constants'; import { IVectorStyle } from '../classes/styles/vector/vector_style'; +import { notifyLicensedFeatureUsage } from '../licensed_features'; export function trackCurrentLayerState(layerId: string) { return { @@ -108,7 +110,7 @@ export function cloneLayer(layerId: string) { } export function addLayer(layerDescriptor: LayerDescriptor) { - return (dispatch: Dispatch, getState: () => MapStoreState) => { + return async (dispatch: Dispatch, getState: () => MapStoreState) => { const isMapReady = getMapReady(getState()); if (!isMapReady) { dispatch({ @@ -123,6 +125,10 @@ export function addLayer(layerDescriptor: LayerDescriptor) { layer: layerDescriptor, }); dispatch(syncDataForLayerId(layerDescriptor.id)); + + const layer = createLayerInstance(layerDescriptor); + const features = await layer.getLicensedFeatures(); + features.forEach(notifyLicensedFeatureUsage); }; } diff --git a/x-pack/plugins/maps/public/classes/layers/blended_vector_layer/blended_vector_layer.ts b/x-pack/plugins/maps/public/classes/layers/blended_vector_layer/blended_vector_layer.ts index 9b6a67ac28ad0..65a76f0c54ffb 100644 --- a/x-pack/plugins/maps/public/classes/layers/blended_vector_layer/blended_vector_layer.ts +++ b/x-pack/plugins/maps/public/classes/layers/blended_vector_layer/blended_vector_layer.ts @@ -38,6 +38,7 @@ import { VectorLayerDescriptor, } from '../../../../common/descriptor_types'; import { IVectorSource } from '../../sources/vector_source'; +import { LICENSED_FEATURES } from '../../../licensed_features'; const ACTIVE_COUNT_DATA_ID = 'ACTIVE_COUNT_DATA_ID'; @@ -327,4 +328,11 @@ export class BlendedVectorLayer extends VectorLayer implements IVectorLayer { super._syncData(syncContext, activeSource, activeStyle); } + + async getLicensedFeatures(): Promise { + return [ + ...(await this._clusterSource.getLicensedFeatures()), + ...(await this._documentSource.getLicensedFeatures()), + ]; + } } diff --git a/x-pack/plugins/maps/public/classes/layers/layer.tsx b/x-pack/plugins/maps/public/classes/layers/layer.tsx index d6bd5180375ce..d7fd5d34a9dd0 100644 --- a/x-pack/plugins/maps/public/classes/layers/layer.tsx +++ b/x-pack/plugins/maps/public/classes/layers/layer.tsx @@ -34,6 +34,7 @@ import { Attribution, ImmutableSourceProperty, ISource, SourceEditorArgs } from import { DataRequestContext } from '../../actions'; import { IStyle } from '../styles/style'; import { getJoinAggKey } from '../../../common/get_agg_key'; +import { LICENSED_FEATURES } from '../../licensed_features'; export interface ILayer { getBounds(dataRequestContext: DataRequestContext): Promise; @@ -91,6 +92,7 @@ export interface ILayer { showJoinEditor(): boolean; getJoinsDisabledReason(): string | null; isFittable(): Promise; + getLicensedFeatures(): Promise; } export type Footnote = { icon: ReactElement; @@ -538,4 +540,8 @@ export class AbstractLayer implements ILayer { supportsLabelsOnTop(): boolean { return false; } + + async getLicensedFeatures(): Promise { + return []; + } } diff --git a/x-pack/plugins/maps/public/classes/layers/vector_layer/vector_layer.tsx b/x-pack/plugins/maps/public/classes/layers/vector_layer/vector_layer.tsx index a2532d4e7b10e..c44ebcf969f7c 100644 --- a/x-pack/plugins/maps/public/classes/layers/vector_layer/vector_layer.tsx +++ b/x-pack/plugins/maps/public/classes/layers/vector_layer/vector_layer.tsx @@ -1090,4 +1090,8 @@ export class VectorLayer extends AbstractLayer { }); return targetFeature ? targetFeature : null; } + + async getLicensedFeatures() { + return await this._source.getLicensedFeatures(); + } } diff --git a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.js b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.js index 89258f04612fd..181af6b17b7dd 100644 --- a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.js +++ b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.js @@ -24,12 +24,14 @@ import { MVT_GETGRIDTILE_API_PATH, GEOTILE_GRID_AGG_NAME, GEOCENTROID_AGG_NAME, + ES_GEO_FIELD_TYPE, } from '../../../../common/constants'; import { i18n } from '@kbn/i18n'; import { getDataSourceLabel } from '../../../../common/i18n_getters'; import { AbstractESAggSource, DEFAULT_METRIC } from '../es_agg_source'; import { DataRequestAbortError } from '../../util/data_request'; import { registerSource } from '../source_registry'; +import { LICENSED_FEATURES } from '../../../licensed_features'; import rison from 'rison-node'; import { getHttp } from '../../../kibana_services'; @@ -399,6 +401,13 @@ export class ESGeoGridSource extends AbstractESAggSource { return [VECTOR_SHAPE_TYPE.POINT]; } + + async getLicensedFeatures() { + const geoField = await this._getGeoField(); + return geoField.type === ES_GEO_FIELD_TYPE.GEO_SHAPE + ? [LICENSED_FEATURES.GEO_SHAPE_AGGS_GEO_TILE] + : []; + } } registerSource({ diff --git a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.test.ts b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.test.ts index 06df68283c434..3b1cf3293c0d3 100644 --- a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.test.ts +++ b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.test.ts @@ -4,10 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ import { MapExtent, VectorSourceRequestMeta } from '../../../../common/descriptor_types'; - -jest.mock('../../../kibana_services'); - -import { getIndexPatternService, getSearchService, getHttp } from '../../../kibana_services'; +import { getHttp, getIndexPatternService, getSearchService } from '../../../kibana_services'; import { ESGeoGridSource } from './es_geo_grid_source'; import { ES_GEO_FIELD_TYPE, @@ -16,6 +13,9 @@ import { SOURCE_TYPES, } from '../../../../common/constants'; import { SearchSource } from 'src/plugins/data/public'; +import { LICENSED_FEATURES } from '../../../licensed_features'; + +jest.mock('../../../kibana_services'); export class MockSearchSource { setField = jest.fn(); @@ -27,6 +27,8 @@ export class MockSearchSource { describe('ESGeoGridSource', () => { const geoFieldName = 'bar'; + + let esGeoFieldType = ES_GEO_FIELD_TYPE.GEO_POINT; const mockIndexPatternService = { get() { return { @@ -34,7 +36,7 @@ describe('ESGeoGridSource', () => { getByName() { return { name: geoFieldName, - type: ES_GEO_FIELD_TYPE.GEO_POINT, + type: esGeoFieldType, }; }, }, @@ -127,6 +129,11 @@ describe('ESGeoGridSource', () => { }); }); + afterEach(() => { + esGeoFieldType = ES_GEO_FIELD_TYPE.GEO_POINT; + jest.resetAllMocks(); + }); + const extent: MapExtent = { minLon: -160, minLat: -80, @@ -271,4 +278,17 @@ describe('ESGeoGridSource', () => { ); }); }); + + describe('Gold+ usage', () => { + it('Should have none for points', async () => { + expect(await geogridSource.getLicensedFeatures()).toEqual([]); + }); + + it('Should have shape-aggs for geo_shape', async () => { + esGeoFieldType = ES_GEO_FIELD_TYPE.GEO_SHAPE; + expect(await geogridSource.getLicensedFeatures()).toEqual([ + LICENSED_FEATURES.GEO_SHAPE_AGGS_GEO_TILE, + ]); + }); + }); }); diff --git a/x-pack/plugins/maps/public/classes/sources/source.ts b/x-pack/plugins/maps/public/classes/sources/source.ts index 946381817b8fc..c4fb5178c0b56 100644 --- a/x-pack/plugins/maps/public/classes/sources/source.ts +++ b/x-pack/plugins/maps/public/classes/sources/source.ts @@ -15,6 +15,7 @@ import { IField } from '../fields/field'; import { FieldFormatter, MAX_ZOOM, MIN_ZOOM } from '../../../common/constants'; import { AbstractSourceDescriptor } from '../../../common/descriptor_types'; import { OnSourceChangeArgs } from '../../connected_components/layer_panel/view'; +import { LICENSED_FEATURES } from '../../licensed_features'; export type SourceEditorArgs = { onChange: (...args: OnSourceChangeArgs[]) => void; @@ -66,6 +67,7 @@ export interface ISource { getValueSuggestions(field: IField, query: string): Promise; getMinZoom(): number; getMaxZoom(): number; + getLicensedFeatures(): Promise; } export class AbstractSource implements ISource { @@ -188,4 +190,8 @@ export class AbstractSource implements ISource { getMaxZoom() { return MAX_ZOOM; } + + async getLicensedFeatures(): Promise { + return []; + } } diff --git a/x-pack/plugins/maps/public/classes/styles/vector/properties/dynamic_style_property.tsx b/x-pack/plugins/maps/public/classes/styles/vector/properties/dynamic_style_property.tsx index 2bc819daeea90..98b58def905eb 100644 --- a/x-pack/plugins/maps/public/classes/styles/vector/properties/dynamic_style_property.tsx +++ b/x-pack/plugins/maps/public/classes/styles/vector/properties/dynamic_style_property.tsx @@ -6,7 +6,8 @@ import _ from 'lodash'; import React from 'react'; -import { Feature } from 'geojson'; +import { Feature, FeatureCollection } from 'geojson'; +import { FeatureIdentifier, Map as MbMap } from 'mapbox-gl'; import { AbstractStyleProperty, IStyleProperty } from './style_property'; import { DEFAULT_SIGMA } from '../vector_style_defaults'; import { @@ -44,20 +45,14 @@ export interface IDynamicStyleProperty extends IStyleProperty { isOrdinal(): boolean; supportsFieldMeta(): boolean; getFieldMetaRequest(): Promise; - supportsMbFeatureState(): boolean; - getMbLookupFunction(): MB_LOOKUP_FUNCTION; pluckOrdinalStyleMetaFromFeatures(features: Feature[]): RangeFieldMeta | null; pluckCategoricalStyleMetaFromFeatures(features: Feature[]): CategoryFieldMeta | null; getValueSuggestions(query: string): Promise; - - // Returns the name that should be used for accessing the data from the mb-style rule - // Depending on - // - whether the field is used for labeling, icon-orientation, or other properties (color, size, ...), `feature-state` and or `get` is used - // - whether the field was run through a field-formatter, a new dynamic field is created with the formatted-value - // The combination of both will inform what field-name (e.g. the "raw" field name from the properties, the "computed field-name" for an on-the-fly created property (e.g. for feature-state or field-formatting). - // todo: There is an existing limitation to .mvt backed sources, where the field-formatters are not applied. Here, the raw-data needs to be accessed. - getMbPropertyName(): string; - getMbPropertyValue(value: RawValue): RawValue; + enrichGeoJsonAndMbFeatureState( + featureCollection: FeatureCollection, + mbMap: MbMap, + mbSourceId: string + ): boolean; } export class DynamicStyleProperty @@ -356,6 +351,12 @@ export class DynamicStyleProperty ); } + // Returns the name that should be used for accessing the data from the mb-style rule + // Depending on + // - whether the field is used for labeling, icon-orientation, or other properties (color, size, ...), `feature-state` and or `get` is used + // - whether the field was run through a field-formatter, a new dynamic field is created with the formatted-value + // The combination of both will inform what field-name (e.g. the "raw" field name from the properties, the "computed field-name" for an on-the-fly created property (e.g. for feature-state or field-formatting). + // todo: There is an existing limitation to .mvt backed sources, where the field-formatters are not applied. Here, the raw-data needs to be accessed. getMbPropertyName() { if (!this._field) { return ''; @@ -385,6 +386,35 @@ export class DynamicStyleProperty // Calling `isOrdinal` would be equivalent. return this.supportsMbFeatureState() ? getNumericalMbFeatureStateValue(rawValue) : rawValue; } + + enrichGeoJsonAndMbFeatureState( + featureCollection: FeatureCollection, + mbMap: MbMap, + mbSourceId: string + ): boolean { + const supportsFeatureState = this.supportsMbFeatureState(); + const featureIdentifier: FeatureIdentifier = { + source: mbSourceId, + id: undefined, + }; + const featureState: Record = {}; + const targetMbName = this.getMbPropertyName(); + for (let i = 0; i < featureCollection.features.length; i++) { + const feature = featureCollection.features[i]; + const rawValue = feature.properties ? feature.properties[this.getFieldName()] : undefined; + const targetMbValue = this.getMbPropertyValue(rawValue); + if (supportsFeatureState) { + featureState[targetMbName] = targetMbValue; // the same value will be potentially overridden multiple times, if the name remains identical + featureIdentifier.id = feature.id; + mbMap.setFeatureState(featureIdentifier, featureState); + } else { + if (feature.properties) { + feature.properties[targetMbName] = targetMbValue; + } + } + } + return supportsFeatureState; + } } export function getNumericalMbFeatureStateValue(value: RawValue) { diff --git a/x-pack/plugins/maps/public/classes/styles/vector/vector_style.tsx b/x-pack/plugins/maps/public/classes/styles/vector/vector_style.tsx index 5d0d9712ef988..acb158636e0b3 100644 --- a/x-pack/plugins/maps/public/classes/styles/vector/vector_style.tsx +++ b/x-pack/plugins/maps/public/classes/styles/vector/vector_style.tsx @@ -641,7 +641,7 @@ export class VectorStyle implements IVectorStyle { featureCollection: FeatureCollection, mbMap: MbMap, mbSourceId: string - ) { + ): boolean { if (!featureCollection) { return false; } @@ -651,40 +651,24 @@ export class VectorStyle implements IVectorStyle { return false; } - const tmpFeatureIdentifier: FeatureIdentifier = { - source: '', - id: undefined, - }; - const tmpFeatureState: any = {}; - - for (let i = 0; i < featureCollection.features.length; i++) { - const feature = featureCollection.features[i]; - - for (let j = 0; j < dynamicStyleProps.length; j++) { - const dynamicStyleProp = dynamicStyleProps[j]; - const targetMbName = dynamicStyleProp.getMbPropertyName(); - const rawValue = feature.properties - ? feature.properties[dynamicStyleProp.getFieldName()] - : undefined; - const targetMbValue = dynamicStyleProp.getMbPropertyValue(rawValue); - if (dynamicStyleProp.supportsMbFeatureState()) { - tmpFeatureState[targetMbName] = targetMbValue; // the same value will be potentially overridden multiple times, if the name remains identical - } else { - if (feature.properties) { - feature.properties[targetMbName] = targetMbValue; - } - } + let shouldResetAllData = false; + for (let j = 0; j < dynamicStyleProps.length; j++) { + const dynamicStyleProp = dynamicStyleProps[j]; + const usedFeatureState = dynamicStyleProp.enrichGeoJsonAndMbFeatureState( + featureCollection, + mbMap, + mbSourceId + ); + if (!usedFeatureState) { + shouldResetAllData = true; } - tmpFeatureIdentifier.source = mbSourceId; - tmpFeatureIdentifier.id = feature.id; - mbMap.setFeatureState(tmpFeatureIdentifier, tmpFeatureState); } // returns boolean indicating if styles do not support feature-state and some values are stored in geojson properties // this return-value is used in an optimization for style-updates with mapbox-gl. // `true` indicates the entire data needs to reset on the source (otherwise the style-rules will not be reapplied) // `false` indicates the data does not need to be reset on the store, because styles are re-evaluated if they use featureState - return dynamicStyleProps.some((dynamicStyleProp) => !dynamicStyleProp.supportsMbFeatureState()); + return shouldResetAllData; } arePointsSymbolizedAsCircles() { diff --git a/x-pack/plugins/maps/public/index_pattern_util.test.ts b/x-pack/plugins/maps/public/index_pattern_util.test.ts index ffcc6da52677a..010c847f96eba 100644 --- a/x-pack/plugins/maps/public/index_pattern_util.test.ts +++ b/x-pack/plugins/maps/public/index_pattern_util.test.ts @@ -5,6 +5,7 @@ */ jest.mock('./kibana_services', () => ({})); +jest.mock('./licensed_features', () => ({})); import { getSourceFields, @@ -69,7 +70,7 @@ describe('Gold+ licensing', () => { describe('basic license', () => { beforeEach(() => { // eslint-disable-next-line @typescript-eslint/no-var-requires - require('./kibana_services').getIsGoldPlus = () => false; + require('./licensed_features').getIsGoldPlus = () => false; }); describe('getAggregatableGeoFieldTypes', () => { @@ -92,7 +93,7 @@ describe('Gold+ licensing', () => { describe('gold license', () => { beforeEach(() => { // eslint-disable-next-line @typescript-eslint/no-var-requires - require('./kibana_services').getIsGoldPlus = () => true; + require('./licensed_features').getIsGoldPlus = () => true; }); describe('getAggregatableGeoFieldTypes', () => { test('Should add geo_shape field', () => { diff --git a/x-pack/plugins/maps/public/index_pattern_util.ts b/x-pack/plugins/maps/public/index_pattern_util.ts index bd2a14619ac41..7af1571a0bc5b 100644 --- a/x-pack/plugins/maps/public/index_pattern_util.ts +++ b/x-pack/plugins/maps/public/index_pattern_util.ts @@ -6,9 +6,10 @@ import { IFieldType, IndexPattern } from 'src/plugins/data/public'; import { i18n } from '@kbn/i18n'; -import { getIndexPatternService, getIsGoldPlus } from './kibana_services'; +import { getIndexPatternService } from './kibana_services'; import { indexPatterns } from '../../../../src/plugins/data/public'; import { ES_GEO_FIELD_TYPE, ES_GEO_FIELD_TYPES } from '../common/constants'; +import { getIsGoldPlus } from './licensed_features'; export function getGeoTileAggNotSupportedReason(field: IFieldType): string | null { if (!field.aggregatable) { diff --git a/x-pack/plugins/maps/public/kibana_services.ts b/x-pack/plugins/maps/public/kibana_services.ts index c1dfb61e9f3b6..b520e0cb2df01 100644 --- a/x-pack/plugins/maps/public/kibana_services.ts +++ b/x-pack/plugins/maps/public/kibana_services.ts @@ -5,17 +5,10 @@ */ import _ from 'lodash'; +import { CoreStart } from 'kibana/public'; import { MapsLegacyConfig } from '../../../../src/plugins/maps_legacy/config'; import { MapsConfigType } from '../config'; import { MapsPluginStartDependencies } from './plugin'; -import { CoreStart } from '../../../../src/core/public'; - -let licenseId: string | undefined; -export const setLicenseId = (latestLicenseId: string | undefined) => (licenseId = latestLicenseId); -export const getLicenseId = () => licenseId; -let isGoldPlus: boolean = false; -export const setIsGoldPlus = (igp: boolean) => (isGoldPlus = igp); -export const getIsGoldPlus = () => isGoldPlus; let kibanaVersion: string; export const setKibanaVersion = (version: string) => (kibanaVersion = version); diff --git a/x-pack/plugins/maps/public/licensed_features.ts b/x-pack/plugins/maps/public/licensed_features.ts new file mode 100644 index 0000000000000..67fa526da0cbd --- /dev/null +++ b/x-pack/plugins/maps/public/licensed_features.ts @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { ILicense, LicenseType } from '../../licensing/common/types'; +import { LicensingPluginSetup, LicensingPluginStart } from '../../licensing/public'; +import { APP_ID } from '../common/constants'; + +export enum LICENSED_FEATURES { + GEO_SHAPE_AGGS_GEO_TILE = 'GEO_SHAPE_AGGS_GEO_TILE', +} + +export interface LicensedFeatureDetail { + name: string; + license: LicenseType; +} + +export const LICENCED_FEATURES_DETAILS: Record = { + [LICENSED_FEATURES.GEO_SHAPE_AGGS_GEO_TILE]: { + name: 'geo_tile aggregation on geo_shape field-type', + license: 'gold', + }, +}; + +let licenseId: string | undefined; +let isGoldPlus: boolean = false; + +export const getLicenseId = () => licenseId; +export const getIsGoldPlus = () => isGoldPlus; + +export function registerLicensedFeatures(licensingPlugin: LicensingPluginSetup) { + for (const licensedFeature of Object.values(LICENSED_FEATURES)) { + licensingPlugin.featureUsage.register( + LICENCED_FEATURES_DETAILS[licensedFeature].name, + LICENCED_FEATURES_DETAILS[licensedFeature].license + ); + } +} + +let licensingPluginStart: LicensingPluginStart; +export function setLicensingPluginStart(licensingPlugin: LicensingPluginStart) { + licensingPluginStart = licensingPlugin; + licensingPluginStart.license$.subscribe((license: ILicense) => { + const gold = license.check(APP_ID, 'gold'); + isGoldPlus = gold.state === 'valid'; + licenseId = license.uid; + }); +} + +export function notifyLicensedFeatureUsage(licensedFeature: LICENSED_FEATURES) { + if (!licensingPluginStart) { + // eslint-disable-next-line no-console + console.error('May not call notifyLicensedFeatureUsage before plugin start'); + return; + } + licensingPluginStart.featureUsage.notifyUsage( + LICENCED_FEATURES_DETAILS[LICENSED_FEATURES[licensedFeature]].name + ); +} diff --git a/x-pack/plugins/maps/public/meta.test.js b/x-pack/plugins/maps/public/meta.test.js index 3486bf003aee0..c414c8a2d400e 100644 --- a/x-pack/plugins/maps/public/meta.test.js +++ b/x-pack/plugins/maps/public/meta.test.js @@ -12,14 +12,14 @@ jest.mock('@elastic/ems-client'); describe('default use without proxy', () => { beforeEach(() => { require('./kibana_services').getProxyElasticMapsServiceInMaps = () => false; - require('./kibana_services').getLicenseId = () => { - return 'foobarlicenseid'; - }; require('./kibana_services').getIsEmsEnabled = () => true; require('./kibana_services').getEmsTileLayerId = () => '123'; require('./kibana_services').getEmsFileApiUrl = () => 'https://file-api'; require('./kibana_services').getEmsTileApiUrl = () => 'https://tile-api'; require('./kibana_services').getEmsLandingPageUrl = () => 'http://test.com'; + require('./licensed_features').getLicenseId = () => { + return 'foobarlicenseid'; + }; }); test('should construct EMSClient with absolute file and tile API urls', async () => { diff --git a/x-pack/plugins/maps/public/meta.ts b/x-pack/plugins/maps/public/meta.ts index 5142793bede34..4eca6c3e671b7 100644 --- a/x-pack/plugins/maps/public/meta.ts +++ b/x-pack/plugins/maps/public/meta.ts @@ -18,7 +18,6 @@ import { } from '../common/constants'; import { getHttp, - getLicenseId, getIsEmsEnabled, getRegionmapLayers, getTilemap, @@ -29,6 +28,7 @@ import { getProxyElasticMapsServiceInMaps, getKibanaVersion, } from './kibana_services'; +import { getLicenseId } from './licensed_features'; export function getKibanaRegionList(): unknown[] { return getRegionmapLayers(); diff --git a/x-pack/plugins/maps/public/plugin.ts b/x-pack/plugins/maps/public/plugin.ts index 696964f0258d4..5b79863d0dd97 100644 --- a/x-pack/plugins/maps/public/plugin.ts +++ b/x-pack/plugins/maps/public/plugin.ts @@ -18,10 +18,8 @@ import { // @ts-ignore import { MapView } from './inspector/views/map_view'; import { - setIsGoldPlus, setKibanaCommonConfig, setKibanaVersion, - setLicenseId, setMapAppConfig, setStartServices, } from './kibana_services'; @@ -42,7 +40,6 @@ import { MapEmbeddableFactory } from './embeddable/map_embeddable_factory'; import { EmbeddableSetup } from '../../../../src/plugins/embeddable/public'; import { MapsXPackConfig, MapsConfigType } from '../config'; import { getAppTitle } from '../common/i18n_getters'; -import { ILicense } from '../../licensing/common/types'; import { lazyLoadMapModules } from './lazy_load_bundle'; import { MapsStartApi } from './api'; import { createSecurityLayerDescriptors, registerLayerWizard, registerSource } from './api'; @@ -50,8 +47,9 @@ import { SharePluginSetup, SharePluginStart } from '../../../../src/plugins/shar import { EmbeddableStart } from '../../../../src/plugins/embeddable/public'; import { MapsLegacyConfig } from '../../../../src/plugins/maps_legacy/config'; import { DataPublicPluginStart } from '../../../../src/plugins/data/public'; -import { LicensingPluginStart } from '../../licensing/public'; +import { LicensingPluginSetup, LicensingPluginStart } from '../../licensing/public'; import { StartContract as FileUploadStartContract } from '../../file_upload/public'; +import { registerLicensedFeatures, setLicensingPluginStart } from './licensed_features'; export interface MapsPluginSetupDependencies { inspector: InspectorSetupContract; @@ -60,6 +58,7 @@ export interface MapsPluginSetupDependencies { embeddable: EmbeddableSetup; mapsLegacy: { config: MapsLegacyConfig }; share: SharePluginSetup; + licensing: LicensingPluginSetup; } export interface MapsPluginStartDependencies { @@ -97,6 +96,8 @@ export class MapsPlugin } public setup(core: CoreSetup, plugins: MapsPluginSetupDependencies) { + registerLicensedFeatures(plugins.licensing); + const config = this._initializerContext.config.get(); setKibanaCommonConfig(plugins.mapsLegacy.config); setMapAppConfig(config); @@ -138,13 +139,7 @@ export class MapsPlugin } public start(core: CoreStart, plugins: MapsPluginStartDependencies): MapsStartApi { - if (plugins.licensing) { - plugins.licensing.license$.subscribe((license: ILicense) => { - const gold = license.check(APP_ID, 'gold'); - setIsGoldPlus(gold.state === 'valid'); - setLicenseId(license.uid); - }); - } + setLicensingPluginStart(plugins.licensing); plugins.uiActions.addTriggerAction(VISUALIZE_GEO_FIELD_TRIGGER, visualizeGeoFieldAction); setStartServices(core, plugins); diff --git a/x-pack/plugins/maps/public/routing/routes/maps_app/top_nav_config.tsx b/x-pack/plugins/maps/public/routing/routes/maps_app/top_nav_config.tsx index 8a0eb8db4d7aa..917abebfb6b25 100644 --- a/x-pack/plugins/maps/public/routing/routes/maps_app/top_nav_config.tsx +++ b/x-pack/plugins/maps/public/routing/routes/maps_app/top_nav_config.tsx @@ -123,31 +123,56 @@ export function getTopNavConfig({ return { id: savedObjectId }; } - if (hasSaveAndReturnConfig) { - topNavConfigs.push({ - id: 'saveAndReturn', - label: i18n.translate('xpack.maps.topNav.saveAndReturnButtonLabel', { - defaultMessage: 'Save and return', + topNavConfigs.push( + { + id: 'mapSettings', + label: i18n.translate('xpack.maps.topNav.openSettingsButtonLabel', { + defaultMessage: `Map settings`, }), - emphasize: true, - iconType: 'check', - run: () => { - onSave({ - newTitle: savedMap.title ? savedMap.title : '', - newDescription: savedMap.description ? savedMap.description : '', - newCopyOnSave: false, - isTitleDuplicateConfirmed: false, - returnToOrigin: true, - onTitleDuplicate: () => {}, - }); + description: i18n.translate('xpack.maps.topNav.openSettingsDescription', { + defaultMessage: `Open map settings`, + }), + testId: 'openSettingsButton', + disableButton() { + return isOpenSettingsDisabled; }, - testId: 'mapSaveAndReturnButton', - }); - } + run() { + openMapSettings(); + }, + }, + { + id: 'inspect', + label: i18n.translate('xpack.maps.topNav.openInspectorButtonLabel', { + defaultMessage: `inspect`, + }), + description: i18n.translate('xpack.maps.topNav.openInspectorDescription', { + defaultMessage: `Open Inspector`, + }), + testId: 'openInspectorButton', + run() { + getInspector().open(inspectorAdapters, {}); + }, + }, + { + id: 'full-screen', + label: i18n.translate('xpack.maps.topNav.fullScreenButtonLabel', { + defaultMessage: `full screen`, + }), + description: i18n.translate('xpack.maps.topNav.fullScreenDescription', { + defaultMessage: `full screen`, + }), + testId: 'mapsFullScreenMode', + run() { + getCoreChrome().setIsVisible(false); + enableFullScreen(); + }, + } + ); if (hasWritePermissions) { topNavConfigs.push({ id: 'save', + iconType: hasSaveAndReturnConfig ? undefined : 'save', label: hasSaveAndReturnConfig ? i18n.translate('xpack.maps.topNav.saveAsButtonLabel', { defaultMessage: 'Save as', @@ -192,51 +217,27 @@ export function getTopNavConfig({ }); } - topNavConfigs.push( - { - id: 'mapSettings', - label: i18n.translate('xpack.maps.topNav.openSettingsButtonLabel', { - defaultMessage: `Map settings`, - }), - description: i18n.translate('xpack.maps.topNav.openSettingsDescription', { - defaultMessage: `Open map settings`, - }), - testId: 'openSettingsButton', - disableButton() { - return isOpenSettingsDisabled; - }, - run() { - openMapSettings(); - }, - }, - { - id: 'inspect', - label: i18n.translate('xpack.maps.topNav.openInspectorButtonLabel', { - defaultMessage: `inspect`, - }), - description: i18n.translate('xpack.maps.topNav.openInspectorDescription', { - defaultMessage: `Open Inspector`, - }), - testId: 'openInspectorButton', - run() { - getInspector().open(inspectorAdapters, {}); - }, - }, - { - id: 'full-screen', - label: i18n.translate('xpack.maps.topNav.fullScreenButtonLabel', { - defaultMessage: `full screen`, - }), - description: i18n.translate('xpack.maps.topNav.fullScreenDescription', { - defaultMessage: `full screen`, + if (hasSaveAndReturnConfig) { + topNavConfigs.push({ + id: 'saveAndReturn', + label: i18n.translate('xpack.maps.topNav.saveAndReturnButtonLabel', { + defaultMessage: 'Save and return', }), - testId: 'mapsFullScreenMode', - run() { - getCoreChrome().setIsVisible(false); - enableFullScreen(); + emphasize: true, + iconType: 'checkInCircleFilled', + run: () => { + onSave({ + newTitle: savedMap.title ? savedMap.title : '', + newDescription: savedMap.description ? savedMap.description : '', + newCopyOnSave: false, + isTitleDuplicateConfirmed: false, + returnToOrigin: true, + onTitleDuplicate: () => {}, + }); }, - } - ); + testId: 'mapSaveAndReturnButton', + }); + } return topNavConfigs; } diff --git a/x-pack/plugins/maps/public/selectors/map_selectors.ts b/x-pack/plugins/maps/public/selectors/map_selectors.ts index db4371e9cd590..4b5122050eb71 100644 --- a/x-pack/plugins/maps/public/selectors/map_selectors.ts +++ b/x-pack/plugins/maps/public/selectors/map_selectors.ts @@ -52,9 +52,9 @@ import { ITMSSource } from '../classes/sources/tms_source'; import { IVectorSource } from '../classes/sources/vector_source'; import { ILayer } from '../classes/layers/layer'; -function createLayerInstance( +export function createLayerInstance( layerDescriptor: LayerDescriptor, - inspectorAdapters: Adapters + inspectorAdapters?: Adapters ): ILayer { const source: ISource = createSourceInstance(layerDescriptor.sourceDescriptor, inspectorAdapters); @@ -94,7 +94,7 @@ function createLayerInstance( } } -function createSourceInstance(sourceDescriptor: any, inspectorAdapters: Adapters): ISource { +function createSourceInstance(sourceDescriptor: any, inspectorAdapters?: Adapters): ISource { const source = getSourceByType(sourceDescriptor.type); if (!source) { throw new Error(`Unrecognized sourceType ${sourceDescriptor.type}`); diff --git a/x-pack/plugins/security_solution/common/constants.ts b/x-pack/plugins/security_solution/common/constants.ts index 2910f02a187f4..e46bd9e28d8c4 100644 --- a/x-pack/plugins/security_solution/common/constants.ts +++ b/x-pack/plugins/security_solution/common/constants.ts @@ -117,6 +117,7 @@ export const DETECTION_ENGINE_PREPACKAGED_URL = `${DETECTION_ENGINE_RULES_URL}/p export const DETECTION_ENGINE_PRIVILEGES_URL = `${DETECTION_ENGINE_URL}/privileges`; export const DETECTION_ENGINE_INDEX_URL = `${DETECTION_ENGINE_URL}/index`; export const DETECTION_ENGINE_TAGS_URL = `${DETECTION_ENGINE_URL}/tags`; +export const DETECTION_ENGINE_EQL_VALIDATION_URL = `${DETECTION_ENGINE_URL}/validate_eql`; export const DETECTION_ENGINE_RULES_STATUS_URL = `${DETECTION_ENGINE_RULES_URL}/_find_statuses`; export const DETECTION_ENGINE_PREPACKAGED_RULES_STATUS_URL = `${DETECTION_ENGINE_RULES_URL}/prepackaged/_status`; diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/eql_validation_schema.mock.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/eql_validation_schema.mock.ts new file mode 100644 index 0000000000000..96afc0c85df44 --- /dev/null +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/eql_validation_schema.mock.ts @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { EqlValidationSchema } from './eql_validation_schema'; + +export const getEqlValidationSchemaMock = (): EqlValidationSchema => ({ + index: ['index-123'], + query: 'process where process.name == "regsvr32.exe"', +}); diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/eql_validation_schema.test.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/eql_validation_schema.test.ts new file mode 100644 index 0000000000000..84bb8e067bf75 --- /dev/null +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/eql_validation_schema.test.ts @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { pipe } from 'fp-ts/lib/pipeable'; +import { left } from 'fp-ts/lib/Either'; + +import { exactCheck } from '../../../exact_check'; +import { foldLeftRight, getPaths } from '../../../test_utils'; +import { eqlValidationSchema, EqlValidationSchema } from './eql_validation_schema'; +import { getEqlValidationSchemaMock } from './eql_validation_schema.mock'; + +describe('EQL validation schema', () => { + it('requires a value for index', () => { + const payload = { + ...getEqlValidationSchemaMock(), + index: undefined, + }; + const decoded = eqlValidationSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([ + 'Invalid value "undefined" supplied to "index"', + ]); + expect(message.schema).toEqual({}); + }); + + it('requires a value for query', () => { + const payload = { + ...getEqlValidationSchemaMock(), + query: undefined, + }; + const decoded = eqlValidationSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([ + 'Invalid value "undefined" supplied to "query"', + ]); + expect(message.schema).toEqual({}); + }); + + it('validates a payload with index and query', () => { + const payload = getEqlValidationSchemaMock(); + const decoded = eqlValidationSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + const expected: EqlValidationSchema = { + index: ['index-123'], + query: 'process where process.name == "regsvr32.exe"', + }; + + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual(expected); + }); +}); diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/request/eql_validation_schema.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/eql_validation_schema.ts new file mode 100644 index 0000000000000..abbbe33a32258 --- /dev/null +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/request/eql_validation_schema.ts @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as t from 'io-ts'; + +import { index, query } from '../common/schemas'; + +export const eqlValidationSchema = t.exact( + t.type({ + index, + query, + }) +); + +export type EqlValidationSchema = t.TypeOf; diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/response/eql_validation_schema.mock.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/response/eql_validation_schema.mock.ts new file mode 100644 index 0000000000000..98e5db47253fb --- /dev/null +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/response/eql_validation_schema.mock.ts @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { EqlValidationSchema } from './eql_validation_schema'; + +export const getEqlValidationResponseMock = (): EqlValidationSchema => ({ + valid: false, + errors: ['line 3:52: token recognition error at: '], +}); + +export const getValidEqlValidationResponseMock = (): EqlValidationSchema => ({ + valid: true, + errors: [], +}); diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/response/eql_validation_schema.test.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/response/eql_validation_schema.test.ts new file mode 100644 index 0000000000000..939238e340cff --- /dev/null +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/response/eql_validation_schema.test.ts @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { pipe } from 'fp-ts/lib/pipeable'; +import { left } from 'fp-ts/lib/Either'; + +import { exactCheck } from '../../../exact_check'; +import { foldLeftRight, getPaths } from '../../../test_utils'; +import { getEqlValidationResponseMock } from './eql_validation_schema.mock'; +import { eqlValidationSchema } from './eql_validation_schema'; + +describe('EQL validation response schema', () => { + it('validates a typical response', () => { + const payload = getEqlValidationResponseMock(); + const decoded = eqlValidationSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual(getEqlValidationResponseMock()); + }); + + it('invalidates a response with extra properties', () => { + const payload = { ...getEqlValidationResponseMock(), extra: 'nope' }; + const decoded = eqlValidationSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual(['invalid keys "extra"']); + expect(message.schema).toEqual({}); + }); + + it('invalidates a response with missing properties', () => { + const payload = { ...getEqlValidationResponseMock(), valid: undefined }; + const decoded = eqlValidationSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([ + 'Invalid value "undefined" supplied to "valid"', + ]); + expect(message.schema).toEqual({}); + }); + + it('invalidates a response with properties of the wrong type', () => { + const payload = { ...getEqlValidationResponseMock(), errors: 'should be an array' }; + const decoded = eqlValidationSchema.decode(payload); + const checked = exactCheck(payload, decoded); + const message = pipe(checked, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([ + 'Invalid value "should be an array" supplied to "errors"', + ]); + expect(message.schema).toEqual({}); + }); +}); diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/response/eql_validation_schema.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/response/eql_validation_schema.ts new file mode 100644 index 0000000000000..e999e1dd273f8 --- /dev/null +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/response/eql_validation_schema.ts @@ -0,0 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as t from 'io-ts'; + +export const eqlValidationSchema = t.exact( + t.type({ + valid: t.boolean, + errors: t.array(t.string), + }) +); + +export type EqlValidationSchema = t.TypeOf; diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/types/default_array.test.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/types/default_array.test.ts new file mode 100644 index 0000000000000..6e23f31e8a994 --- /dev/null +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/types/default_array.test.ts @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as t from 'io-ts'; + +import { DefaultArray } from './default_array'; +import { pipe } from 'fp-ts/lib/pipeable'; +import { left } from 'fp-ts/lib/Either'; +import { foldLeftRight, getPaths } from '../../../test_utils'; + +const testSchema = t.keyof({ + valid: true, + also_valid: true, +}); +type TestSchema = t.TypeOf; + +const defaultArraySchema = DefaultArray(testSchema); + +describe('default_array', () => { + test('it should validate an empty array', () => { + const payload: string[] = []; + const decoded = defaultArraySchema.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual(payload); + }); + + test('it should validate an array of testSchema', () => { + const payload: TestSchema[] = ['valid']; + const decoded = defaultArraySchema.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual(payload); + }); + + test('it should validate an array of valid testSchema strings', () => { + const payload = ['valid', 'also_valid']; + const decoded = defaultArraySchema.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual(payload); + }); + + test('it should not validate an array with a number', () => { + const payload = ['valid', 123]; + const decoded = defaultArraySchema.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([ + 'Invalid value "123" supplied to "DefaultArray"', + ]); + expect(message.schema).toEqual({}); + }); + + test('it should not validate an array with an invalid string', () => { + const payload = ['valid', 'invalid']; + const decoded = defaultArraySchema.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([ + 'Invalid value "invalid" supplied to "DefaultArray"', + ]); + expect(message.schema).toEqual({}); + }); + + test('it should return a default array entry', () => { + const payload = null; + const decoded = defaultArraySchema.decode(payload); + const message = pipe(decoded, foldLeftRight); + + expect(getPaths(left(message.errors))).toEqual([]); + expect(message.schema).toEqual([]); + }); +}); diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/types/default_array.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/types/default_array.ts new file mode 100644 index 0000000000000..8388eb315b8f4 --- /dev/null +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/types/default_array.ts @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as t from 'io-ts'; +import { Either } from 'fp-ts/lib/Either'; + +/** + * Types the DefaultArray as: + * - If undefined, then a default array will be set + * - If an array is sent in, then the array will be validated to ensure all elements are type C + */ +export const DefaultArray = (codec: C) => { + const arrType = t.array(codec); + type ArrType = t.TypeOf; + return new t.Type( + 'DefaultArray', + arrType.is, + (input, context): Either => + input == null ? t.success([]) : arrType.validate(input, context), + t.identity + ); +}; diff --git a/x-pack/plugins/security_solution/common/detection_engine/schemas/types/index.ts b/x-pack/plugins/security_solution/common/detection_engine/schemas/types/index.ts index 28a66d2948a92..e76dd3fca3740 100644 --- a/x-pack/plugins/security_solution/common/detection_engine/schemas/types/index.ts +++ b/x-pack/plugins/security_solution/common/detection_engine/schemas/types/index.ts @@ -5,6 +5,7 @@ */ export * from './default_actions_array'; +export * from './default_array'; export * from './default_boolean_false'; export * from './default_boolean_true'; export * from './default_empty_string'; diff --git a/x-pack/plugins/security_solution/common/detection_engine/utils.ts b/x-pack/plugins/security_solution/common/detection_engine/utils.ts index f76417099bb17..d7b23755699f5 100644 --- a/x-pack/plugins/security_solution/common/detection_engine/utils.ts +++ b/x-pack/plugins/security_solution/common/detection_engine/utils.ts @@ -19,5 +19,6 @@ export const hasNestedEntry = (entries: EntriesArray): boolean => { export const isEqlRule = (ruleType: Type | undefined): boolean => ruleType === 'eql'; export const isThresholdRule = (ruleType: Type | undefined): boolean => ruleType === 'threshold'; -export const isQueryRule = (ruleType: Type | undefined): boolean => ruleType === 'query'; +export const isQueryRule = (ruleType: Type | undefined): boolean => + ruleType === 'query' || ruleType === 'saved_query'; export const isThreatMatchRule = (ruleType: Type): boolean => ruleType === 'threat_match'; diff --git a/x-pack/plugins/security_solution/common/endpoint/schema/trusted_apps.test.ts b/x-pack/plugins/security_solution/common/endpoint/schema/trusted_apps.test.ts index ef1d9a99b0aeb..352c628f9fa23 100644 --- a/x-pack/plugins/security_solution/common/endpoint/schema/trusted_apps.test.ts +++ b/x-pack/plugins/security_solution/common/endpoint/schema/trusted_apps.test.ts @@ -76,7 +76,7 @@ describe('When invoking Trusted Apps Schema', () => { os: 'windows', entries: [ { - field: 'process.executable.text', + field: 'process.executable.caseless', type: 'match', operator: 'included', value: 'c:/programs files/Anti-Virus', @@ -204,7 +204,7 @@ describe('When invoking Trusted Apps Schema', () => { field: 'process.hash.*', value: 'A4370C0CF81686C0B696FA6261c9d3e0d810ae704ab8301839dffd5d5112f476', }, - { field: 'process.executable.text', value: '/tmp/dir1' }, + { field: 'process.executable.caseless', value: '/tmp/dir1' }, ].forEach((partialEntry) => { const bodyMsg3 = { ...getCreateTrustedAppItem(), diff --git a/x-pack/plugins/security_solution/common/endpoint/schema/trusted_apps.ts b/x-pack/plugins/security_solution/common/endpoint/schema/trusted_apps.ts index 25456115b3713..b4e837c472915 100644 --- a/x-pack/plugins/security_solution/common/endpoint/schema/trusted_apps.ts +++ b/x-pack/plugins/security_solution/common/endpoint/schema/trusted_apps.ts @@ -35,7 +35,7 @@ export const PostTrustedAppCreateRequestSchema = { schema.object({ field: schema.oneOf([ schema.literal('process.hash.*'), - schema.literal('process.executable.text'), + schema.literal('process.executable.caseless'), ]), type: schema.literal('match'), operator: schema.literal('included'), diff --git a/x-pack/plugins/security_solution/common/endpoint/types/trusted_apps.ts b/x-pack/plugins/security_solution/common/endpoint/types/trusted_apps.ts index 75e0347b10078..3568136dd0e7b 100644 --- a/x-pack/plugins/security_solution/common/endpoint/types/trusted_apps.ts +++ b/x-pack/plugins/security_solution/common/endpoint/types/trusted_apps.ts @@ -33,7 +33,7 @@ export interface PostTrustedAppCreateResponse { } export interface MacosLinuxConditionEntry { - field: 'process.hash.*' | 'process.executable.text'; + field: 'process.hash.*' | 'process.executable.caseless'; type: 'match'; operator: 'included'; value: string; diff --git a/x-pack/plugins/security_solution/common/shared_exports.ts b/x-pack/plugins/security_solution/common/shared_exports.ts index bd1086a3f21e9..6269c3cee999c 100644 --- a/x-pack/plugins/security_solution/common/shared_exports.ts +++ b/x-pack/plugins/security_solution/common/shared_exports.ts @@ -5,6 +5,7 @@ */ export { NonEmptyString } from './detection_engine/schemas/types/non_empty_string'; +export { DefaultArray } from './detection_engine/schemas/types/default_array'; export { DefaultUuid } from './detection_engine/schemas/types/default_uuid'; export { DefaultStringArray } from './detection_engine/schemas/types/default_string_array'; export { diff --git a/x-pack/plugins/security_solution/common/shared_imports.ts b/x-pack/plugins/security_solution/common/shared_imports.ts index 564254b6a7596..bfe77d2f9e626 100644 --- a/x-pack/plugins/security_solution/common/shared_imports.ts +++ b/x-pack/plugins/security_solution/common/shared_imports.ts @@ -42,4 +42,6 @@ export { ExceptionListType, Type, ENDPOINT_LIST_ID, + osTypeArray, + OsTypeArray, } from '../../lists/common'; diff --git a/x-pack/plugins/security_solution/cypress/integration/alerts_detection_rules_custom.spec.ts b/x-pack/plugins/security_solution/cypress/integration/alerts_detection_rules_custom.spec.ts index f999c5cecc392..d8832dc4ee600 100644 --- a/x-pack/plugins/security_solution/cypress/integration/alerts_detection_rules_custom.spec.ts +++ b/x-pack/plugins/security_solution/cypress/integration/alerts_detection_rules_custom.spec.ts @@ -93,7 +93,7 @@ import { goToScheduleStepTab, waitForTheRuleToBeExecuted, } from '../tasks/create_new_rule'; -import { saveEditedRule } from '../tasks/edit_rule'; +import { saveEditedRule, waitForKibana } from '../tasks/edit_rule'; import { esArchiverLoad, esArchiverUnload } from '../tasks/es_archiver'; import { loginAndWaitForPageWithoutDateRange } from '../tasks/login'; import { refreshPage } from '../tasks/security_header'; @@ -290,6 +290,7 @@ describe('Custom detection rules deletion and edition', () => { context('Edition', () => { it('Allows a rule to be edited', () => { editFirstRule(); + waitForKibana(); // expect define step to populate cy.get(CUSTOM_QUERY_INPUT).should('have.text', existingRule.customQuery); diff --git a/x-pack/plugins/security_solution/cypress/screens/edit_rule.ts b/x-pack/plugins/security_solution/cypress/screens/edit_rule.ts index 1bf0ff34ebd94..e25eb7453c63c 100644 --- a/x-pack/plugins/security_solution/cypress/screens/edit_rule.ts +++ b/x-pack/plugins/security_solution/cypress/screens/edit_rule.ts @@ -5,3 +5,5 @@ */ export const EDIT_SUBMIT_BUTTON = '[data-test-subj="ruleEditSubmitButton"]'; +export const KIBANA_LOADING_INDICATOR = '[data-test-subj="globalLoadingIndicator"]'; +export const KIBANA_LOADING_COMPLETE_INDICATOR = '[data-test-subj="globalLoadingIndicator-hidden"]'; diff --git a/x-pack/plugins/security_solution/cypress/tasks/edit_rule.ts b/x-pack/plugins/security_solution/cypress/tasks/edit_rule.ts index 690a36058ec33..2dc1318ccb81d 100644 --- a/x-pack/plugins/security_solution/cypress/tasks/edit_rule.ts +++ b/x-pack/plugins/security_solution/cypress/tasks/edit_rule.ts @@ -4,9 +4,13 @@ * you may not use this file except in compliance with the Elastic License. */ -import { EDIT_SUBMIT_BUTTON } from '../screens/edit_rule'; +import { EDIT_SUBMIT_BUTTON, KIBANA_LOADING_COMPLETE_INDICATOR } from '../screens/edit_rule'; export const saveEditedRule = () => { cy.get(EDIT_SUBMIT_BUTTON).should('exist').click({ force: true }); cy.get(EDIT_SUBMIT_BUTTON).should('not.exist'); }; + +export const waitForKibana = () => { + cy.get(KIBANA_LOADING_COMPLETE_INDICATOR).should('exist'); +}; diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/add_exception_modal/index.test.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/add_exception_modal/index.test.tsx index ef2a5770eee8d..037462839c72d 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/add_exception_modal/index.test.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/add_exception_modal/index.test.tsx @@ -257,7 +257,7 @@ describe('When the add exception modal is opened', () => { indexPatterns: { ...stubIndexPattern, fields: [ - { name: 'file.path.text', type: 'string' }, + { name: 'file.path.caseless', type: 'string' }, { name: 'subject_name', type: 'string' }, { name: 'trusted', type: 'string' }, { name: 'file.hash.sha256', type: 'string' }, diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/add_exception_modal/index.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/add_exception_modal/index.tsx index dee1db6482067..ad5bc98243467 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/add_exception_modal/index.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/add_exception_modal/index.tsx @@ -30,6 +30,7 @@ import * as i18nCommon from '../../../translations'; import * as i18n from './translations'; import * as sharedI18n from '../translations'; import { Ecs } from '../../../../../common/ecs'; +import { osTypeArray, OsTypeArray } from '../../../../../common/shared_imports'; import { useAppToasts } from '../../../hooks/use_app_toasts'; import { useKibana } from '../../../lib/kibana'; import { ExceptionBuilderComponent } from '../builder'; @@ -211,12 +212,7 @@ export const AddExceptionModal = memo(function AddExceptionModal({ const initialExceptionItems = useMemo((): ExceptionsBuilderExceptionItem[] => { if (exceptionListType === 'endpoint' && alertData != null && ruleExceptionList) { - return defaultEndpointExceptionItems( - exceptionListType, - ruleExceptionList.list_id, - ruleName, - alertData - ); + return defaultEndpointExceptionItems(ruleExceptionList.list_id, ruleName, alertData); } else { return []; } @@ -265,11 +261,11 @@ export const AddExceptionModal = memo(function AddExceptionModal({ [setShouldBulkCloseAlert] ); - const retrieveAlertOsTypes = useCallback((): string[] => { - const osDefaults = ['windows', 'macos']; + const retrieveAlertOsTypes = useCallback((): OsTypeArray => { + const osDefaults: OsTypeArray = ['windows', 'macos']; if (alertData != null) { const osTypes = alertData.host && alertData.host.os && alertData.host.os.family; - if (osTypes != null && osTypes.length > 0) { + if (osTypeArray.is(osTypes) && osTypes != null && osTypes.length > 0) { return osTypes; } return osDefaults; @@ -316,13 +312,14 @@ export const AddExceptionModal = memo(function AddExceptionModal({ [fetchOrCreateListError, exceptionItemsToAdd] ); + const addExceptionMessage = + exceptionListType === 'endpoint' ? i18n.ADD_ENDPOINT_EXCEPTION : i18n.ADD_EXCEPTION; + return ( - - {exceptionListType === 'endpoint' ? i18n.ADD_ENDPOINT_EXCEPTION : i18n.ADD_EXCEPTION} - + {addExceptionMessage} {ruleName} @@ -429,7 +426,7 @@ export const AddExceptionModal = memo(function AddExceptionModal({ isDisabled={isSubmitButtonDisabled} fill > - {i18n.ADD_EXCEPTION} + {addExceptionMessage} )} diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/builder/helpers.test.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/builder/helpers.test.tsx index 9bfd04cc19d72..2ee0fe88f73f7 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/builder/helpers.test.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/builder/helpers.test.tsx @@ -90,9 +90,9 @@ const getMockNestedParentBuilderEntry = (): FormattedBuilderEntry => ({ const mockEndpointFields = [ { - name: 'file.path.text', + name: 'file.path.caseless', type: 'string', - esTypes: ['text'], + esTypes: ['keyword'], count: 0, scripted: false, searchable: true, @@ -303,8 +303,8 @@ describe('Exception builder helpers', () => { { aggregatable: false, count: 0, - esTypes: ['text'], - name: 'file.path.text', + esTypes: ['keyword'], + name: 'file.path.caseless', readFromDocValues: false, scripted: false, searchable: true, diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/builder/index.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/builder/index.tsx index 165f3314c2f15..5904e0034a51c 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/builder/index.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/builder/index.tsx @@ -234,13 +234,12 @@ export const ExceptionBuilderComponent = ({ // empty `entries` array. Thought about appending an entry item to one, but that // would then be arbitrary, decided to just create a new exception list item const newException = getNewExceptionItem({ - listType, listId, namespaceType: listNamespaceType, ruleName, }); setUpdateExceptions([...exceptions, { ...newException }]); - }, [setUpdateExceptions, exceptions, listType, listId, listNamespaceType, ruleName]); + }, [setUpdateExceptions, exceptions, listId, listNamespaceType, ruleName]); // The builder can have existing exception items, or new exception items that have yet // to be created (and thus lack an id), this was creating some React bugs with relying diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/edit_exception_modal/index.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/edit_exception_modal/index.tsx index 128686428598c..08f7e3af90d0c 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/edit_exception_modal/index.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/edit_exception_modal/index.tsx @@ -40,7 +40,6 @@ import { AddExceptionComments } from '../add_exception_comments'; import { enrichExistingExceptionItemWithComments, enrichExceptionItemsWithOS, - getOperatingSystems, entryHasListType, entryHasNonEcsType, lowercaseHashValues, @@ -228,8 +227,7 @@ export const EditExceptionModal = memo(function EditExceptionModal({ }, ]; if (exceptionListType === 'endpoint') { - const osTypes = exceptionItem._tags ? getOperatingSystems(exceptionItem._tags) : []; - enriched = lowercaseHashValues(enrichExceptionItemsWithOS(enriched, osTypes)); + enriched = lowercaseHashValues(enrichExceptionItemsWithOS(enriched, exceptionItem.os_types)); } return enriched; }, [exceptionItemsToAdd, exceptionItem, comment, exceptionListType]); diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/exceptionable_fields.json b/x-pack/plugins/security_solution/public/common/components/exceptions/exceptionable_fields.json index 037e340ee7fa2..2ea200466445b 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/exceptionable_fields.json +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/exceptionable_fields.json @@ -6,33 +6,33 @@ "Target.process.Ext.code_signature.valid", "Target.process.Ext.services", "Target.process.Ext.user", - "Target.process.command_line.text", - "Target.process.executable.text", + "Target.process.command_line.caseless", + "Target.process.executable.caseless", "Target.process.hash.md5", "Target.process.hash.sha1", "Target.process.hash.sha256", "Target.process.hash.sha512", - "Target.process.name.text", + "Target.process.name.caseless", "Target.process.parent.Ext.code_signature.status", "Target.process.parent.Ext.code_signature.subject_name", "Target.process.parent.Ext.code_signature.trusted", "Target.process.parent.Ext.code_signature.valid", - "Target.process.parent.command_line.text", - "Target.process.parent.executable.text", + "Target.process.parent.command_line.caseless", + "Target.process.parent.executable.caseless", "Target.process.parent.hash.md5", "Target.process.parent.hash.sha1", "Target.process.parent.hash.sha256", "Target.process.parent.hash.sha512", - "Target.process.parent.name.text", + "Target.process.parent.name.caseless", "Target.process.parent.pgid", - "Target.process.parent.working_directory.text", + "Target.process.parent.working_directory.caseless", "Target.process.pe.company", "Target.process.pe.description", "Target.process.pe.file_version", "Target.process.pe.original_file_name", "Target.process.pe.product", "Target.process.pgid", - "Target.process.working_directory.text", + "Target.process.working_directory.caseless", "agent.id", "agent.type", "agent.version", @@ -66,14 +66,14 @@ "file.mode", "file.name", "file.owner", - "file.path.text", + "file.path.caseless", "file.pe.company", "file.pe.description", "file.pe.file_version", "file.pe.original_file_name", "file.pe.product", "file.size", - "file.target_path.text", + "file.target_path.caseless", "file.type", "file.uid", "group.Ext.real.id", @@ -84,9 +84,9 @@ "host.id", "host.os.Ext.variant", "host.os.family", - "host.os.full.text", + "host.os.full.caseless", "host.os.kernel", - "host.os.name.text", + "host.os.name.caseless", "host.os.platform", "host.os.version", "host.type", @@ -96,33 +96,33 @@ "process.Ext.code_signature.valid", "process.Ext.services", "process.Ext.user", - "process.command_line.text", - "process.executable.text", + "process.command_line.caseless", + "process.executable.caseless", "process.hash.md5", "process.hash.sha1", "process.hash.sha256", "process.hash.sha512", - "process.name.text", + "process.name.caseless", "process.parent.Ext.code_signature.status", "process.parent.Ext.code_signature.subject_name", "process.parent.Ext.code_signature.trusted", "process.parent.Ext.code_signature.valid", - "process.parent.command_line.text", - "process.parent.executable.text", + "process.parent.command_line.caseless", + "process.parent.executable.caseless", "process.parent.hash.md5", "process.parent.hash.sha1", "process.parent.hash.sha256", "process.parent.hash.sha512", - "process.parent.name.text", + "process.parent.name.caseless", "process.parent.pgid", - "process.parent.working_directory.text", + "process.parent.working_directory.caseless", "process.pe.company", "process.pe.description", "process.pe.file_version", "process.pe.original_file_name", "process.pe.product", "process.pgid", - "process.working_directory.text", + "process.working_directory.caseless", "rule.uuid", "user.domain", "user.email", diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.test.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.test.tsx index 26fb460aee382..c89bde6d04dd3 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.test.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.test.tsx @@ -10,8 +10,6 @@ import moment from 'moment-timezone'; import { getOperatorType, getExceptionOperatorSelect, - getOperatingSystems, - getTagsInclude, getFormattedComments, filterExceptionItems, getNewExceptionItem, @@ -52,6 +50,7 @@ import { CreateExceptionListItemSchema, ExceptionListItemSchema, EntriesArray, + OsTypeArray, } from '../../../../../lists/common/schemas'; import { IIndexPattern } from 'src/plugins/data/common'; @@ -186,76 +185,18 @@ describe('Exception helpers', () => { }); }); - describe('#getOperatingSystems', () => { - test('it returns null if no operating system tag specified', () => { - const result = getOperatingSystems(['some tag', 'some other tag']); - - expect(result).toEqual([]); - }); - - test('it returns null if operating system tag malformed', () => { - const result = getOperatingSystems(['some tag', 'jibberos:mac,windows', 'some other tag']); - - expect(result).toEqual([]); - }); - - test('it returns operating systems if space included in os tag', () => { - const result = getOperatingSystems(['some tag', 'os: macos', 'some other tag']); - expect(result).toEqual(['macos']); - }); - - test('it returns operating systems if multiple os tags specified', () => { - const result = getOperatingSystems(['some tag', 'os: macos', 'some other tag', 'os:windows']); - expect(result).toEqual(['macos', 'windows']); - }); - }); - describe('#formatOperatingSystems', () => { test('it returns null if no operating system tag specified', () => { - const result = formatOperatingSystems(getOperatingSystems(['some tag', 'some other tag'])); - - expect(result).toEqual(''); - }); - - test('it returns null if operating system tag malformed', () => { - const result = formatOperatingSystems( - getOperatingSystems(['some tag', 'jibberos:mac,windows', 'some other tag']) - ); - + const result = formatOperatingSystems(['some os', 'some other os']); expect(result).toEqual(''); }); - test('it returns formatted operating systems if space included in os tag', () => { - const result = formatOperatingSystems( - getOperatingSystems(['some tag', 'os: macos', 'some other tag']) - ); - - expect(result).toEqual('macOS'); - }); - - test('it returns formatted operating systems if multiple os tags specified', () => { - const result = formatOperatingSystems( - getOperatingSystems(['some tag', 'os: macos', 'some other tag', 'os:windows']) - ); - + test('it returns formatted operating systems if multiple specified', () => { + const result = formatOperatingSystems(['some tag', 'macos', 'some other tag', 'windows']); expect(result).toEqual('macOS, Windows'); }); }); - describe('#getTagsInclude', () => { - test('it returns a tuple of "false" and "null" if no matches found', () => { - const result = getTagsInclude({ tags: ['some', 'tags', 'here'], regex: /(no match)/ }); - - expect(result).toEqual([false, null]); - }); - - test('it returns a tuple of "true" and matching string if matches found', () => { - const result = getTagsInclude({ tags: ['some', 'tags', 'here'], regex: /(some)/ }); - - expect(result).toEqual([true, 'some']); - }); - }); - describe('#getFormattedComments', () => { test('it returns formatted comment object with username and timestamp', () => { const payload = getCommentsArrayMock(); @@ -384,7 +325,6 @@ describe('Exception helpers', () => { test('it removes `temporaryId` from items', () => { const { meta, ...rest } = getNewExceptionItem({ - listType: 'detection', listId: '123', namespaceType: 'single', ruleName: 'rule name', @@ -400,7 +340,6 @@ describe('Exception helpers', () => { const payload = getExceptionListItemSchemaMock(); const result = formatExceptionItemForUpdate(payload); const expected = { - _tags: ['endpoint', 'process', 'malware', 'os:linux'], comments: [], description: 'some description', entries: ENTRIES, @@ -409,6 +348,7 @@ describe('Exception helpers', () => { meta: {}, name: 'some name', namespace_type: 'single', + os_types: ['linux'], tags: ['user added string for a tag', 'malware'], type: 'simple', }; @@ -489,14 +429,14 @@ describe('Exception helpers', () => { }); describe('#enrichExceptionItemsWithOS', () => { - test('it should add an os tag to an exception item', () => { + test('it should add an os to an exception item', () => { const payload = [getExceptionListItemSchemaMock()]; - const osTypes = ['windows']; + const osTypes: OsTypeArray = ['windows']; const result = enrichExceptionItemsWithOS(payload, osTypes); const expected = [ { ...getExceptionListItemSchemaMock(), - _tags: [...getExceptionListItemSchemaMock()._tags, 'os:windows'], + os_types: ['windows'], }, ]; expect(result).toEqual(expected); @@ -504,36 +444,16 @@ describe('Exception helpers', () => { test('it should add multiple os tags to all exception items', () => { const payload = [getExceptionListItemSchemaMock(), getExceptionListItemSchemaMock()]; - const osTypes = ['windows', 'macos']; - const result = enrichExceptionItemsWithOS(payload, osTypes); - const expected = [ - { - ...getExceptionListItemSchemaMock(), - _tags: [...getExceptionListItemSchemaMock()._tags, 'os:windows', 'os:macos'], - }, - { - ...getExceptionListItemSchemaMock(), - _tags: [...getExceptionListItemSchemaMock()._tags, 'os:windows', 'os:macos'], - }, - ]; - expect(result).toEqual(expected); - }); - - test('it should add os tag to all exception items without duplication', () => { - const payload = [ - { ...getExceptionListItemSchemaMock(), _tags: ['os:linux', 'os:windows'] }, - { ...getExceptionListItemSchemaMock(), _tags: ['os:linux'] }, - ]; - const osTypes = ['windows']; + const osTypes: OsTypeArray = ['windows', 'macos']; const result = enrichExceptionItemsWithOS(payload, osTypes); const expected = [ { ...getExceptionListItemSchemaMock(), - _tags: ['os:linux', 'os:windows'], + os_types: ['windows', 'macos'], }, { ...getExceptionListItemSchemaMock(), - _tags: ['os:linux', 'os:windows'], + os_types: ['windows', 'macos'], }, ]; expect(result).toEqual(expected); @@ -715,7 +635,6 @@ describe('Exception helpers', () => { describe('getPrepopulatedItem', () => { test('it returns prepopulated items', () => { const prepopulatedItem = getPrepopulatedItem({ - listType: 'endpoint', listId: 'some_id', ruleName: 'my rule', codeSignature: { subjectName: '', trusted: '' }, @@ -733,7 +652,7 @@ describe('Exception helpers', () => { field: 'file.Ext.code_signature', type: 'nested', }, - { field: 'file.path.text', operator: 'included', type: 'match', value: '' }, + { field: 'file.path.caseless', operator: 'included', type: 'match', value: '' }, { field: 'file.hash.sha256', operator: 'included', type: 'match', value: '' }, { field: 'event.code', operator: 'included', type: 'match', value: '' }, ]); @@ -741,7 +660,6 @@ describe('Exception helpers', () => { test('it returns prepopulated items with values', () => { const prepopulatedItem = getPrepopulatedItem({ - listType: 'endpoint', listId: 'some_id', ruleName: 'my rule', codeSignature: { subjectName: 'someSubjectName', trusted: 'false' }, @@ -764,7 +682,12 @@ describe('Exception helpers', () => { field: 'file.Ext.code_signature', type: 'nested', }, - { field: 'file.path.text', operator: 'included', type: 'match', value: 'some-file-path' }, + { + field: 'file.path.caseless', + operator: 'included', + type: 'match', + value: 'some-file-path', + }, { field: 'file.hash.sha256', operator: 'included', type: 'match', value: 'some-hash' }, { field: 'event.code', operator: 'included', type: 'match', value: 'some-event-code' }, ]); @@ -847,7 +770,7 @@ describe('Exception helpers', () => { describe('defaultEndpointExceptionItems', () => { test('it should return pre-populated items', () => { - const defaultItems = defaultEndpointExceptionItems('endpoint', 'list_id', 'my_rule', { + const defaultItems = defaultEndpointExceptionItems('list_id', 'my_rule', { _id: '123', file: { Ext: { @@ -881,7 +804,7 @@ describe('Exception helpers', () => { type: 'nested', }, { - field: 'file.path.text', + field: 'file.path.caseless', operator: 'included', type: 'match', value: 'some file path', @@ -904,7 +827,7 @@ describe('Exception helpers', () => { type: 'nested', }, { - field: 'file.path.text', + field: 'file.path.caseless', operator: 'included', type: 'match', value: 'some file path', diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.tsx index d4acfa39f995d..684f3390ae41a 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/helpers.tsx @@ -6,7 +6,7 @@ import React from 'react'; import { EuiText, EuiCommentProps, EuiAvatar } from '@elastic/eui'; -import { capitalize, union } from 'lodash'; +import { capitalize } from 'lodash'; import moment from 'moment'; import uuid from 'uuid'; @@ -33,8 +33,8 @@ import { createExceptionListItemSchema, exceptionListItemSchema, UpdateExceptionListItemSchema, - ExceptionListType, EntryNested, + OsTypeArray, } from '../../../shared_imports'; import { IIndexPattern } from '../../../../../../../src/plugins/data/common'; import { validate } from '../../../../common/validate'; @@ -98,20 +98,12 @@ export const getEntryValue = (item: BuilderEntry): string | string[] | undefined } }; -/** - * Retrieves the values of tags marked as os - * - * @param tags an ExceptionItem's tags - */ -export const getOperatingSystems = (tags: string[]): string[] => { - return tags.filter((tag) => tag.startsWith('os:')).map((os) => os.substring(3).trim()); -}; - /** * Formats os value array to a displayable string */ export const formatOperatingSystems = (osTypes: string[]): string => { return osTypes + .filter((os) => ['linux', 'macos', 'windows'].includes(os)) .map((os) => { if (os === 'macos') { return 'macOS'; @@ -121,21 +113,6 @@ export const formatOperatingSystems = (osTypes: string[]): string => { .join(', '); }; -/** - * Returns all tags that match a given regex - */ -export const getTagsInclude = ({ - tags, - regex, -}: { - tags: string[]; - regex: RegExp; -}): [boolean, string | null] => { - const matches: string[] | null = tags.join(';').match(regex); - const match = matches != null ? matches[1] : null; - return [matches != null, match]; -}; - /** * Formats ExceptionItem.comments into EuiCommentList format * @@ -158,18 +135,15 @@ export const getFormattedComments = (comments: CommentsArray): EuiCommentProps[] })); export const getNewExceptionItem = ({ - listType, listId, namespaceType, ruleName, }: { - listType: ExceptionListType; listId: string; namespaceType: NamespaceType; ruleName: string; }): CreateExceptionListItemBuilderSchema => { return { - _tags: [listType], comments: [], description: `${ruleName} - exception list item`, entries: [ @@ -326,14 +300,12 @@ export const enrichExistingExceptionItemWithComments = ( */ export const enrichExceptionItemsWithOS = ( exceptionItems: Array, - osTypes: string[] + osTypes: OsTypeArray ): Array => { - const osTags = osTypes.map((os) => `os:${os}`); return exceptionItems.map((item: ExceptionListItemSchema | CreateExceptionListItemSchema) => { - const newTags = item._tags ? union(item._tags, osTags) : [...osTags]; return { ...item, - _tags: newTags, + os_types: osTypes, }; }); }; @@ -419,7 +391,6 @@ export const getCodeSignatureValue = ( * Returns the default values from the alert data to autofill new endpoint exceptions */ export const getPrepopulatedItem = ({ - listType, listId, ruleName, codeSignature, @@ -428,7 +399,6 @@ export const getPrepopulatedItem = ({ eventCode, listNamespace = 'agnostic', }: { - listType: ExceptionListType; listId: string; listNamespace?: NamespaceType; ruleName: string; @@ -438,7 +408,7 @@ export const getPrepopulatedItem = ({ eventCode: string; }): ExceptionsBuilderExceptionItem => { return { - ...getNewExceptionItem({ listType, listId, namespaceType: listNamespace, ruleName }), + ...getNewExceptionItem({ listId, namespaceType: listNamespace, ruleName }), entries: [ { field: 'file.Ext.code_signature', @@ -459,7 +429,7 @@ export const getPrepopulatedItem = ({ ], }, { - field: 'file.path.text', + field: 'file.path.caseless', operator: 'included', type: 'match', value: filePath ?? '', @@ -514,7 +484,6 @@ export const entryHasNonEcsType = ( * Returns the default values from the alert data to autofill new endpoint exceptions */ export const defaultEndpointExceptionItems = ( - listType: ExceptionListType, listId: string, ruleName: string, alertEcsData: Ecs @@ -523,7 +492,6 @@ export const defaultEndpointExceptionItems = ( return getCodeSignatureValue(alertEcsData).map((codeSignature) => getPrepopulatedItem({ - listType, listId, ruleName, filePath: file && file.path ? file.path[0] : '', diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/use_fetch_or_create_rule_exception_list.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/use_fetch_or_create_rule_exception_list.tsx index 944631d4e9fb5..38cf5722fa894 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/use_fetch_or_create_rule_exception_list.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/use_fetch_or_create_rule_exception_list.tsx @@ -82,7 +82,6 @@ export const useFetchOrCreateRuleExceptionList = ({ type: exceptionListType, namespace_type: 'single', list_id: undefined, - _tags: undefined, tags: undefined, meta: undefined, }; diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/viewer/exception_item/index.stories.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/viewer/exception_item/index.stories.tsx index 39f34ae8a3cf3..a1fa0884b6b0c 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/viewer/exception_item/index.stories.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/viewer/exception_item/index.stories.tsx @@ -43,7 +43,6 @@ storiesOf('Components/ExceptionItem', module) }) .add('with description', () => { const payload = getExceptionListItemSchemaMock(); - payload._tags = []; payload.comments = []; payload.entries = [ { @@ -66,7 +65,6 @@ storiesOf('Components/ExceptionItem', module) }) .add('with comments', () => { const payload = getExceptionListItemSchemaMock(); - payload._tags = []; payload.description = ''; payload.comments = getCommentsArrayMock(); payload.entries = [ @@ -90,7 +88,6 @@ storiesOf('Components/ExceptionItem', module) }) .add('with nested entries', () => { const payload = getExceptionListItemSchemaMock(); - payload._tags = []; payload.description = ''; payload.comments = []; diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/viewer/helpers.test.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/viewer/helpers.test.tsx index 5f6e54b0d3cff..dbd4c805aa950 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/viewer/helpers.test.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/viewer/helpers.test.tsx @@ -175,10 +175,13 @@ describe('Exception viewer helpers', () => { test('it returns formatted description list with a description if one specified', () => { const payload = getExceptionListItemSchemaMock(); - payload._tags = []; payload.description = 'Im a description'; const result = getDescriptionListContent(payload); const expected: DescriptionListItem[] = [ + { + description: 'Linux', + title: 'OS', + }, { description: 'April 20th 2020 @ 15:25:31', title: 'Date created', @@ -198,10 +201,13 @@ describe('Exception viewer helpers', () => { test('it returns just user and date created if no other fields specified', () => { const payload = getExceptionListItemSchemaMock(); - payload._tags = []; payload.description = ''; const result = getDescriptionListContent(payload); const expected: DescriptionListItem[] = [ + { + description: 'Linux', + title: 'OS', + }, { description: 'April 20th 2020 @ 15:25:31', title: 'Date created', diff --git a/x-pack/plugins/security_solution/public/common/components/exceptions/viewer/helpers.tsx b/x-pack/plugins/security_solution/public/common/components/exceptions/viewer/helpers.tsx index 86b0512410e6f..edc3d20b03e5a 100644 --- a/x-pack/plugins/security_solution/public/common/components/exceptions/viewer/helpers.tsx +++ b/x-pack/plugins/security_solution/public/common/components/exceptions/viewer/helpers.tsx @@ -6,12 +6,7 @@ import moment from 'moment'; import { entriesNested, ExceptionListItemSchema } from '../../../../lists_plugin_deps'; -import { - getEntryValue, - getExceptionOperatorSelect, - formatOperatingSystems, - getOperatingSystems, -} from '../helpers'; +import { getEntryValue, getExceptionOperatorSelect, formatOperatingSystems } from '../helpers'; import { FormattedEntry, BuilderEntry, DescriptionListItem } from '../types'; import * as i18n from '../translations'; @@ -80,7 +75,7 @@ export const getDescriptionListContent = ( const details = [ { title: i18n.OPERATING_SYSTEM, - value: formatOperatingSystems(getOperatingSystems(exceptionItem._tags ?? [])), + value: formatOperatingSystems(exceptionItem.os_types), }, { title: i18n.DATE_CREATED, diff --git a/x-pack/plugins/security_solution/public/common/hooks/eql/api.ts b/x-pack/plugins/security_solution/public/common/hooks/eql/api.ts new file mode 100644 index 0000000000000..11fe79910bc87 --- /dev/null +++ b/x-pack/plugins/security_solution/public/common/hooks/eql/api.ts @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { HttpStart } from '../../../../../../../src/core/public'; +import { DETECTION_ENGINE_EQL_VALIDATION_URL } from '../../../../common/constants'; +import { EqlValidationSchema as EqlValidationRequest } from '../../../../common/detection_engine/schemas/request/eql_validation_schema'; +import { EqlValidationSchema as EqlValidationResponse } from '../../../../common/detection_engine/schemas/response/eql_validation_schema'; + +interface ApiParams { + http: HttpStart; + signal: AbortSignal; +} + +export const validateEql = async ({ + http, + query, + index, + signal, +}: ApiParams & EqlValidationRequest) => { + return http.fetch(DETECTION_ENGINE_EQL_VALIDATION_URL, { + method: 'POST', + body: JSON.stringify({ + query, + index, + }), + signal, + }); +}; diff --git a/x-pack/plugins/security_solution/public/detections/components/rules/eql_query_bar/eql_overview_link.tsx b/x-pack/plugins/security_solution/public/detections/components/rules/eql_query_bar/eql_overview_link.tsx new file mode 100644 index 0000000000000..e9891fc066ec2 --- /dev/null +++ b/x-pack/plugins/security_solution/public/detections/components/rules/eql_query_bar/eql_overview_link.tsx @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React from 'react'; +import styled from 'styled-components'; +import { EuiLink, EuiText } from '@elastic/eui'; + +import { useKibana } from '../../../../common/lib/kibana'; +import { EQL_OVERVIEW_LINK_TEXT } from './translations'; + +const InlineText = styled(EuiText)` + display: inline-block; +`; + +export const EqlOverviewLink = () => { + const overviewUrl = useKibana().services.docLinks.links.query.eql; + + return ( + + {EQL_OVERVIEW_LINK_TEXT} + + ); +}; diff --git a/x-pack/plugins/security_solution/public/detections/components/rules/eql_query_bar/eql_query_bar.test.tsx b/x-pack/plugins/security_solution/public/detections/components/rules/eql_query_bar/eql_query_bar.test.tsx index 331c0ba4c4491..5539e5eb2c294 100644 --- a/x-pack/plugins/security_solution/public/detections/components/rules/eql_query_bar/eql_query_bar.test.tsx +++ b/x-pack/plugins/security_solution/public/detections/components/rules/eql_query_bar/eql_query_bar.test.tsx @@ -7,9 +7,12 @@ import React from 'react'; import { shallow, mount } from 'enzyme'; -import { useFormFieldMock } from '../../../../common/mock'; +import { TestProviders, useFormFieldMock } from '../../../../common/mock'; import { mockQueryBar } from '../../../pages/detection_engine/rules/all/__mocks__/mock'; import { EqlQueryBar, EqlQueryBarProps } from './eql_query_bar'; +import { getEqlValidationError } from './validators.mock'; + +jest.mock('../../../../common/lib/kibana'); describe('EqlQueryBar', () => { let mockField: EqlQueryBarProps['field']; @@ -27,7 +30,11 @@ describe('EqlQueryBar', () => { }); it('sets the field value on input change', () => { - const wrapper = mount(); + const wrapper = mount( + + + + ); wrapper .find('[data-test-subj="eqlQueryBarTextInput"]') @@ -44,4 +51,30 @@ describe('EqlQueryBar', () => { expect(mockField.setValue).toHaveBeenCalledWith(expected); }); + + it('does not render errors for a valid query', () => { + const wrapper = mount( + + + + ); + + expect(wrapper.find('[data-test-subj="eql-validation-errors-popover"]').exists()).toEqual( + false + ); + }); + + it('renders errors for an invalid query', () => { + const invalidMockField = useFormFieldMock({ + value: mockQueryBar, + errors: [getEqlValidationError()], + }); + const wrapper = mount( + + + + ); + + expect(wrapper.find('[data-test-subj="eql-validation-errors-popover"]').exists()).toEqual(true); + }); }); diff --git a/x-pack/plugins/security_solution/public/detections/components/rules/eql_query_bar/eql_query_bar.tsx b/x-pack/plugins/security_solution/public/detections/components/rules/eql_query_bar/eql_query_bar.tsx index e3f33ea9b9b87..f7ee5be18154c 100644 --- a/x-pack/plugins/security_solution/public/detections/components/rules/eql_query_bar/eql_query_bar.tsx +++ b/x-pack/plugins/security_solution/public/detections/components/rules/eql_query_bar/eql_query_bar.tsx @@ -4,11 +4,24 @@ * you may not use this file except in compliance with the Elastic License. */ -import React, { FC, useCallback, ChangeEvent } from 'react'; +import React, { FC, useCallback, ChangeEvent, useEffect, useState } from 'react'; +import styled from 'styled-components'; import { EuiFormRow, EuiTextArea } from '@elastic/eui'; -import { FieldHook, getFieldValidityAndErrorMessage } from '../../../../shared_imports'; +import { FieldHook } from '../../../../shared_imports'; +import { useAppToasts } from '../../../../common/hooks/use_app_toasts'; import { DefineStepRule } from '../../../pages/detection_engine/rules/types'; +import * as i18n from './translations'; +import { EqlQueryBarFooter } from './footer'; +import { getValidationResults } from './validators'; + +const TextArea = styled(EuiTextArea)` + display: block; + border: ${({ theme }) => theme.eui.euiBorderThin}; + border-bottom: 0; + box-shadow: none; + min-height: ${({ theme }) => theme.eui.euiFormControlHeight}; +`; export interface EqlQueryBarProps { dataTestSubj: string; @@ -17,14 +30,27 @@ export interface EqlQueryBarProps { } export const EqlQueryBar: FC = ({ dataTestSubj, field, idAria }) => { + const { addError } = useAppToasts(); + const [errorMessages, setErrorMessages] = useState([]); const { setValue } = field; - const { isInvalid, errorMessage } = getFieldValidityAndErrorMessage(field); + const { isValid, message, messages, error } = getValidationResults(field); const fieldValue = field.value.query.query as string; + useEffect(() => { + setErrorMessages(messages ?? []); + }, [messages]); + + useEffect(() => { + if (error) { + addError(error, { title: i18n.EQL_VALIDATION_REQUEST_ERROR }); + } + }, [error, addError]); + const handleChange = useCallback( (e: ChangeEvent) => { const newQuery = e.target.value; + setErrorMessages([]); setValue({ filters: [], query: { @@ -41,19 +67,22 @@ export const EqlQueryBar: FC = ({ dataTestSubj, field, idAria label={field.label} labelAppend={field.labelAppend} helpText={field.helpText} - error={errorMessage} - isInvalid={isInvalid} + error={message} + isInvalid={!isValid} fullWidth data-test-subj={dataTestSubj} describedByIds={idAria ? [idAria] : undefined} > - + <> +