diff --git a/.gitignore b/.gitignore
index 716cea937f9c0..dfd02de7b1186 100644
--- a/.gitignore
+++ b/.gitignore
@@ -31,6 +31,7 @@ disabledPlugins
webpackstats.json
/config/*
!/config/kibana.yml
+!/config/node.options
coverage
selenium
.babel_register_cache.json
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index a0aeed7a34949..11c595a1ad983 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,739 +1,5 @@
# Contributing to Kibana
-We understand that you may not have days at a time to work on Kibana. We ask that you read our contributing guidelines carefully so that you spend less time, overall, struggling to push your PR through our code review processes.
+We understand that you may not have days at a time to work on Kibana. We ask that you read our [developer guide](https://www.elastic.co/guide/en/kibana/master/development.html) carefully so that you spend less time, overall, struggling to push your PR through our code review processes.
-At the same time, reading the contributing guidelines will give you a better idea of how to post meaningful issues that will be more easily be parsed, considered, and resolved. A big win for everyone involved! :tada:
-
-## Table of Contents
-
-A high level overview of our contributing guidelines.
-
-- [Effective issue reporting in Kibana](#effective-issue-reporting-in-kibana)
- - [Voicing the importance of an issue](#voicing-the-importance-of-an-issue)
- - ["My issue isn't getting enough attention"](#my-issue-isnt-getting-enough-attention)
- - ["I want to help!"](#i-want-to-help)
-- [How We Use Git and GitHub](#how-we-use-git-and-github)
- - [Forking](#forking)
- - [Branching](#branching)
- - [Commits and Merging](#commits-and-merging)
- - [Rebasing and fixing merge conflicts](#rebasing-and-fixing-merge-conflicts)
- - [What Goes Into a Pull Request](#what-goes-into-a-pull-request)
-- [Contributing Code](#contributing-code)
- - [Setting Up Your Development Environment](#setting-up-your-development-environment)
- - [Increase node.js heap size](#increase-nodejs-heap-size)
- - [Running Elasticsearch Locally](#running-elasticsearch-locally)
- - [Nightly snapshot (recommended)](#nightly-snapshot-recommended)
- - [Keeping data between snapshots](#keeping-data-between-snapshots)
- - [Source](#source)
- - [Archive](#archive)
- - [Sample Data](#sample-data)
- - [Running Elasticsearch Remotely](#running-elasticsearch-remotely)
- - [Running remote clusters](#running-remote-clusters)
- - [Running Kibana](#running-kibana)
- - [Running Kibana in Open-Source mode](#running-kibana-in-open-source-mode)
- - [Unsupported URL Type](#unsupported-url-type)
- - [Customizing `config/kibana.dev.yml`](#customizing-configkibanadevyml)
- - [Potential Optimization Pitfalls](#potential-optimization-pitfalls)
- - [Setting Up SSL](#setting-up-ssl)
- - [Linting](#linting)
- - [Setup Guide for VS Code Users](#setup-guide-for-vs-code-users)
- - [Internationalization](#internationalization)
- - [Localization](#localization)
- - [Styling with SASS](#styling-with-sass)
- - [Testing and Building](#testing-and-building)
- - [Debugging server code](#debugging-server-code)
- - [Instrumenting with Elastic APM](#instrumenting-with-elastic-apm)
- - [Unit testing frameworks](#unit-testing-frameworks)
- - [Running specific Kibana tests](#running-specific-kibana-tests)
- - [Debugging Unit Tests](#debugging-unit-tests)
- - [Unit Testing Plugins](#unit-testing-plugins)
- - [Automated Accessibility Testing](#automated-accessibility-testing)
- - [Cross-browser compatibility](#cross-browser-compatibility)
- - [Testing compatibility locally](#testing-compatibility-locally)
- - [Running Browser Automation Tests](#running-browser-automation-tests)
- - [Building OS packages](#building-os-packages)
- - [Writing documentation](#writing-documentation)
- - [Release Notes Process](#release-notes-process)
-- [Signing the contributor license agreement](#signing-the-contributor-license-agreement)
-- [Submitting a Pull Request](#submitting-a-pull-request)
-- [Code Reviewing](#code-reviewing)
- - [Getting to the Code Review Stage](#getting-to-the-code-review-stage)
- - [Reviewing Pull Requests](#reviewing-pull-requests)
-
-Don't fret, it's not as daunting as the table of contents makes it out to be!
-
-## Effective issue reporting in Kibana
-
-### Voicing the importance of an issue
-
-We seriously appreciate thoughtful comments. If an issue is important to you, add a comment with a solid write up of your use case and explain why it's so important. Please avoid posting comments comprised solely of a thumbs up emoji 👍.
-
-Granted that you share your thoughts, we might even be able to come up with creative solutions to your specific problem. If everything you'd like to say has already been brought up but you'd still like to add a token of support, feel free to add a [👍 thumbs up reaction](https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comments) on the issue itself and on the comment which best summarizes your thoughts.
-
-### "My issue isn't getting enough attention"
-
-First of all, **sorry about that!** We want you to have a great time with Kibana.
-
-There's hundreds of open issues and prioritizing what to work on is an important aspect of our daily jobs. We prioritize issues according to impact and difficulty, so some issues can be neglected while we work on more pressing issues.
-
-Feel free to bump your issues if you think they've been neglected for a prolonged period.
-
-### "I want to help!"
-
-**Now we're talking**. If you have a bug fix or new feature that you would like to contribute to Kibana, please **find or open an issue about it before you start working on it.** Talk about what you would like to do. It may be that somebody is already working on it, or that there are particular issues that you should know about before implementing the change.
-
-We enjoy working with contributors to get their code accepted. There are many approaches to fixing a problem and it is important to find the best approach before writing too much code.
-
-## How We Use Git and GitHub
-
-### Forking
-
-We follow the [GitHub forking model](https://help.github.com/articles/fork-a-repo/) for collaborating
-on Kibana code. This model assumes that you have a remote called `upstream` which points to the
-official Kibana repo, which we'll refer to in later code snippets.
-
-### Branching
-
-* All work on the next major release goes into master.
-* Past major release branches are named `{majorVersion}.x`. They contain work that will go into the next minor release. For example, if the next minor release is `5.2.0`, work for it should go into the `5.x` branch.
-* Past minor release branches are named `{majorVersion}.{minorVersion}`. They contain work that will go into the next patch release. For example, if the next patch release is `5.3.1`, work for it should go into the `5.3` branch.
-* All work is done on feature branches and merged into one of these branches.
-* Where appropriate, we'll backport changes into older release branches.
-
-### Commits and Merging
-
-* Feel free to make as many commits as you want, while working on a branch.
-* When submitting a PR for review, please perform an interactive rebase to present a logical history that's easy for the reviewers to follow.
-* Please use your commit messages to include helpful information on your changes, e.g. changes to APIs, UX changes, bugs fixed, and an explanation of *why* you made the changes that you did.
-* Resolve merge conflicts by rebasing the target branch over your feature branch, and force-pushing (see below for instructions).
-* When merging, we'll squash your commits into a single commit.
-
-#### Rebasing and fixing merge conflicts
-
-Rebasing can be tricky, and fixing merge conflicts can be even trickier because it involves force pushing. This is all compounded by the fact that attempting to push a rebased branch remotely will be rejected by git, and you'll be prompted to do a `pull`, which is not at all what you should do (this will really mess up your branch's history).
-
-Here's how you should rebase master onto your branch, and how to fix merge conflicts when they arise.
-
-First, make sure master is up-to-date.
-
-```
-git checkout master
-git fetch upstream
-git rebase upstream/master
-```
-
-Then, check out your branch and rebase master on top of it, which will apply all of the new commits on master to your branch, and then apply all of your branch's new commits after that.
-
-```
-git checkout name-of-your-branch
-git rebase master
-```
-
-You want to make sure there are no merge conflicts. If there are merge conflicts, git will pause the rebase and allow you to fix the conflicts before continuing.
-
-You can use `git status` to see which files contain conflicts. They'll be the ones that aren't staged for commit. Open those files, and look for where git has marked the conflicts. Resolve the conflicts so that the changes you want to make to the code have been incorporated in a way that doesn't destroy work that's been done in master. Refer to master's commit history on GitHub if you need to gain a better understanding of how code is conflicting and how best to resolve it.
-
-Once you've resolved all of the merge conflicts, use `git add -A` to stage them to be committed, and then use `git rebase --continue` to tell git to continue the rebase.
-
-When the rebase has completed, you will need to force push your branch because the history is now completely different than what's on the remote. **This is potentially dangerous** because it will completely overwrite what you have on the remote, so you need to be sure that you haven't lost any work when resolving merge conflicts. (If there weren't any merge conflicts, then you can force push without having to worry about this.)
-
-```
-git push origin name-of-your-branch --force
-```
-
-This will overwrite the remote branch with what you have locally. You're done!
-
-**Note that you should not run `git pull`**, for example in response to a push rejection like this:
-
-```
-! [rejected] name-of-your-branch -> name-of-your-branch (non-fast-forward)
-error: failed to push some refs to 'https://github.com/YourGitHubHandle/kibana.git'
-hint: Updates were rejected because the tip of your current branch is behind
-hint: its remote counterpart. Integrate the remote changes (e.g.
-hint: 'git pull ...') before pushing again.
-hint: See the 'Note about fast-forwards' in 'git push --help' for details.
-```
-
-Assuming you've successfully rebased and you're happy with the code, you should force push instead.
-
-### What Goes Into a Pull Request
-
-* Please include an explanation of your changes in your PR description.
-* Links to relevant issues, external resources, or related PRs are very important and useful.
-* Please update any tests that pertain to your code, and add new tests where appropriate.
-* See [Submitting a Pull Request](#submitting-a-pull-request) for more info.
-
-## Contributing Code
-
-These guidelines will help you get your Pull Request into shape so that a code review can start as soon as possible.
-
-### Setting Up Your Development Environment
-
-Fork, then clone the `kibana` repo and change directory into it
-
-```bash
-git clone https://github.com/[YOUR_USERNAME]/kibana.git kibana
-cd kibana
-```
-
-Install the version of Node.js listed in the `.node-version` file. This can be automated with tools such as [nvm](https://github.com/creationix/nvm), [nvm-windows](https://github.com/coreybutler/nvm-windows) or [avn](https://github.com/wbyoung/avn). As we also include a `.nvmrc` file you can switch to the correct version when using nvm by running:
-
-```bash
-nvm use
-```
-
-Install the latest version of [yarn](https://yarnpkg.com).
-
-Bootstrap Kibana and install all the dependencies
-
-```bash
-yarn kbn bootstrap
-```
-
-> Node.js native modules could be in use and node-gyp is the tool used to build them. There are tools you need to install per platform and python versions you need to be using. Please see https://github.com/nodejs/node-gyp#installation and follow the guide according your platform.
-
-(You can also run `yarn kbn` to see the other available commands. For more info about this tool, see https://github.com/elastic/kibana/tree/master/packages/kbn-pm.)
-
-When switching branches which use different versions of npm packages you may need to run;
-```bash
-yarn kbn clean
-```
-
-If you have failures during `yarn kbn bootstrap` you may have some corrupted packages in your yarn cache which you can clean with;
-```bash
-yarn cache clean
-```
-
-#### Increase node.js heap size
-
-Kibana is a big project and for some commands it can happen that the process hits the default heap limit and crashes with an out-of-memory error. If you run into this problem, you can increase maximum heap size by setting the `--max_old_space_size` option on the command line. To set the limit for all commands, simply add the following line to your shell config: `export NODE_OPTIONS="--max_old_space_size=2048"`.
-
-### Running Elasticsearch Locally
-
-There are a few options when it comes to running Elasticsearch locally:
-
-#### Nightly snapshot (recommended)
-
-These snapshots are built on a nightly basis which expire after a couple weeks. If running from an old, untracted branch this snapshot might not exist. In which case you might need to run from source or an archive.
-
-```bash
-yarn es snapshot
-```
-
-##### Keeping data between snapshots
-
-If you want to keep the data inside your Elasticsearch between usages of this command,
-you should use the following command, to keep your data folder outside the downloaded snapshot
-folder:
-
-```bash
-yarn es snapshot -E path.data=../data
-```
-
-The same parameter can be used with the source and archive command shown in the following
-paragraphs.
-
-#### Source
-
-By default, it will reference an [elasticsearch](https://github.com/elastic/elasticsearch) checkout which is a sibling to the Kibana directory named `elasticsearch`. If you wish to use a checkout in another location you can provide that by supplying `--source-path`
-
-```bash
-yarn es source
-```
-
-#### Archive
-
-Use this if you already have a distributable. For released versions, one can be obtained on the [Elasticsearch downloads](https://www.elastic.co/downloads/elasticsearch) page.
-
-```bash
-yarn es archive
-```
-
-**Each of these will run Elasticsearch with a `basic` license. Additional options are available, pass `--help` for more information.**
-
-##### Sample Data
-
-If you're just getting started with Elasticsearch, you could use the following command to populate your instance with a few fake logs to hit the ground running.
-
-```bash
-node scripts/makelogs --auth :
-```
-> The default username and password combination are `elastic:changeme`
-
-> Make sure to execute `node scripts/makelogs` *after* elasticsearch is up and running!
-
-### Running Elasticsearch Remotely
-
-You can save some system resources, and the effort of generating sample data, if you have a remote Elasticsearch cluster to connect to. (**Elasticians: you do! Check with your team about where to find credentials**)
-
-You'll need to [create a `kibana.dev.yml`](#customizing-configkibanadevyml) and add the following to it:
-
-```
-elasticsearch.hosts:
- - {{ url }}
-elasticsearch.username: {{ username }}
-elasticsearch.password: {{ password }}
-elasticsearch.ssl.verificationMode: none
-```
-
-If many other users will be interacting with your remote cluster, you'll want to add the following to avoid causing conflicts:
-
-```
-kibana.index: '.{YourGitHubHandle}-kibana'
-xpack.task_manager.index: '.{YourGitHubHandle}-task-manager-kibana'
-```
-
-### Running remote clusters
-Setup remote clusters for cross cluster search (CCS) and cross cluster replication (CCR).
-
-Start your primary cluster by running:
-```bash
-yarn es snapshot -E path.data=../data_prod1
-```
-
-Start your remote cluster by running:
-```bash
-yarn es snapshot -E transport.port=9500 -E http.port=9201 -E path.data=../data_prod2
-```
-
-Once both clusters are running, start kibana. Kibana will connect to the primary cluster.
-
-Setup the remote cluster in Kibana from either `Management` -> `Elasticsearch` -> `Remote Clusters` UI or by running the following script in `Console`.
-```
-PUT _cluster/settings
-{
- "persistent": {
- "cluster": {
- "remote": {
- "cluster_one": {
- "seeds": [
- "localhost:9500"
- ]
- }
- }
- }
- }
-}
-```
-
-Follow the [cross-cluster search](https://www.elastic.co/guide/en/kibana/current/management-cross-cluster-search.html) instructions for setting up index patterns to search across clusters.
-
-### Running Kibana
-
-Change to your local Kibana directory.
-Start the development server.
-
-```bash
-yarn start
-```
-
-> On Windows, you'll need to use Git Bash, Cygwin, or a similar shell that exposes the `sh` command. And to successfully build you'll need Cygwin optional packages zip, tar, and shasum.
-
-Now you can point your web browser to http://localhost:5601 and start using Kibana! When running `yarn start`, Kibana will also log that it is listening on port 5603 due to the base path proxy, but you should still access Kibana on port 5601.
-
-By default, you can log in with username `elastic` and password `changeme`. See the `--help` options on `yarn es ` if you'd like to configure a different password.
-
-#### Running Kibana in Open-Source mode
-
-If you're looking to only work with the open-source software, supply the license type to `yarn es`:
-
-```bash
-yarn es snapshot --license oss
-```
-
-And start Kibana with only open-source code:
-
-```bash
-yarn start --oss
-```
-
-#### Unsupported URL Type
-
-If you're installing dependencies and seeing an error that looks something like
-
-```
-Unsupported URL Type: link:packages/eslint-config-kibana
-```
-
-you're likely running `npm`. To install dependencies in Kibana you need to run `yarn kbn bootstrap`. For more info, see [Setting Up Your Development Environment](#setting-up-your-development-environment) above.
-
-#### Customizing `config/kibana.dev.yml`
-
-The `config/kibana.yml` file stores user configuration directives. Since this file is checked into source control, however, developer preferences can't be saved without the risk of accidentally committing the modified version. To make customizing configuration easier during development, the Kibana CLI will look for a `config/kibana.dev.yml` file if run with the `--dev` flag. This file behaves just like the non-dev version and accepts any of the [standard settings](https://www.elastic.co/guide/en/kibana/current/settings.html).
-
-#### Potential Optimization Pitfalls
-
- - Webpack is trying to include a file in the bundle that I deleted and is now complaining about it is missing
- - A module id that used to resolve to a single file now resolves to a directory, but webpack isn't adapting
- - (if you discover other scenarios, please send a PR!)
-
-#### Setting Up SSL
-
-Kibana includes self-signed certificates that can be used for development purposes in the browser and for communicating with Elasticsearch: `yarn start --ssl` & `yarn es snapshot --ssl`.
-
-### Linting
-
-A note about linting: We use [eslint](http://eslint.org) to check that the [styleguide](STYLEGUIDE.md) is being followed. It runs in a pre-commit hook and as a part of the tests, but most contributors integrate it with their code editors for real-time feedback.
-
-Here are some hints for getting eslint setup in your favorite editor:
-
-Editor | Plugin
------------|-------------------------------------------------------------------------------
-Sublime | [SublimeLinter-eslint](https://github.com/roadhump/SublimeLinter-eslint#installation)
-Atom | [linter-eslint](https://github.com/AtomLinter/linter-eslint#installation)
-VSCode | [ESLint](https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint)
-IntelliJ | Settings » Languages & Frameworks » JavaScript » Code Quality Tools » ESLint
-`vi` | [scrooloose/syntastic](https://github.com/scrooloose/syntastic)
-
-Another tool we use for enforcing consistent coding style is EditorConfig, which can be set up by installing a plugin in your editor that dynamically updates its configuration. Take a look at the [EditorConfig](http://editorconfig.org/#download) site to find a plugin for your editor, and browse our [`.editorconfig`](https://github.com/elastic/kibana/blob/master/.editorconfig) file to see what config rules we set up.
-
-#### Setup Guide for VS Code Users
-
-Note that for VSCode, to enable "live" linting of TypeScript (and other) file types, you will need to modify your local settings, as shown below. The default for the ESLint extension is to only lint JavaScript file types.
-
-```json
-"eslint.validate": [
- "javascript",
- "javascriptreact",
- { "language": "typescript", "autoFix": true },
- { "language": "typescriptreact", "autoFix": true }
-]
-```
-
-`eslint` can automatically fix trivial lint errors when you save a file by adding this line in your setting.
-
-```json
- "eslint.autoFixOnSave": true,
-```
-
-:warning: It is **not** recommended to use the [`Prettier` extension/IDE plugin](https://prettier.io/) while maintaining the Kibana project. Formatting and styling roles are set in the multiple `.eslintrc.js` files across the project and some of them use the [NPM version of Prettier](https://www.npmjs.com/package/prettier). Using the IDE extension might cause conflicts, applying the formatting to too many files that shouldn't be prettier-ized and/or highlighting errors that are actually OK.
-
-### Internationalization
-
-All user-facing labels and info texts in Kibana should be internationalized. Please take a look at the [readme](packages/kbn-i18n/README.md) and the [guideline](packages/kbn-i18n/GUIDELINE.md) of the i18n package on how to do so.
-
-In order to enable translations in the React parts of the application, the top most component of every `ReactDOM.render` call should be the `Context` component from the `i18n` core service:
-```jsx
-const I18nContext = coreStart.i18n.Context;
-
-ReactDOM.render(
-
- {myComponentTree}
- ,
- container
-);
-```
-
-There are a number of tools created to support internationalization in Kibana that would allow one to validate internationalized labels,
-extract them to a `JSON` file or integrate translations back to Kibana. To know more, please read corresponding [readme](src/dev/i18n/README.md) file.
-
-### Localization
-
-We cannot support accepting contributions to the translations from any source other than the translators we have engaged to do the work.
-We are still to develop a proper process to accept any contributed translations. We certainly appreciate that people care enough about the localization effort to want to help improve the quality. We aim to build out a more comprehensive localization process for the future and will notify you once contributions can be supported, but for the time being, we are not able to incorporate suggestions.
-
-### Styling with SASS
-
-When writing a new component, create a sibling SASS file of the same name and import directly into the JS/TS component file. Doing so ensures the styles are never separated or lost on import and allows for better modularization (smaller individual plugin asset footprint).
-
-All SASS (.scss) files will automatically build with the [EUI](https://elastic.github.io/eui/#/guidelines/sass) & Kibana invisibles (SASS variables, mixins, functions) from the [`globals_[theme].scss` file](src/legacy/ui/public/styles/_globals_v7light.scss).
-
-**Example:**
-
-```tsx
-// component.tsx
-
-import './component.scss';
-
-export const Component = () => {
- return (
-
- );
-}
-```
-
-```scss
-// component.scss
-
-.plgComponent { ... }
-```
-
-Do not use the underscore `_` SASS file naming pattern when importing directly into a javascript file.
-
-### Testing and Building
-
-To ensure that your changes will not break other functionality, please run the test suite and build process before submitting your Pull Request.
-
-Before running the tests you will need to install the projects dependencies as described above.
-
-Once that's done, just run:
-
-```bash
-yarn test && yarn build --skip-os-packages
-```
-
-You can get all build options using the following command:
-
-```bash
-yarn build --help
-```
-
-macOS users on a machine with a discrete graphics card may see significant speedups (up to 2x) when running tests by changing your terminal emulator's GPU settings. In iTerm2:
-- Open Preferences (Command + ,)
-- In the General tab, under the "Magic" section, ensure "GPU rendering" is checked
-- Open "Advanced GPU Settings..."
-- Uncheck the "Prefer integrated to discrete GPU" option
-- Restart iTerm
-
-#### Debugging Server Code
-`yarn debug` will start the server with Node's inspect flag. Kibana's development mode will start three processes on ports `9229`, `9230`, and `9231`. Chrome's developer tools need to be configured to connect to all three connections. Add `localhost:` for each Kibana process in Chrome's developer tools connection tab.
-
-#### Instrumenting with Elastic APM
-Kibana ships with the [Elastic APM Node.js Agent](https://github.com/elastic/apm-agent-nodejs) built-in for debugging purposes.
-
-Its default configuration is meant to be used by core Kibana developers only, but it can easily be re-configured to your needs.
-In its default configuration it's disabled and will, once enabled, send APM data to a centrally managed Elasticsearch cluster accessible only to Elastic employees.
-
-To change the location where data is sent, use the [`serverUrl`](https://www.elastic.co/guide/en/apm/agent/nodejs/current/configuration.html#server-url) APM config option.
-To activate the APM agent, use the [`active`](https://www.elastic.co/guide/en/apm/agent/nodejs/current/configuration.html#active) APM config option.
-
-All config options can be set either via environment variables, or by creating an appropriate config file under `config/apm.dev.js`.
-For more information about configuring the APM agent, please refer to [the documentation](https://www.elastic.co/guide/en/apm/agent/nodejs/current/configuring-the-agent.html).
-
-Example `config/apm.dev.js` file:
-
-```js
-module.exports = {
- active: true,
-};
-```
-
-APM [Real User Monitoring agent](https://www.elastic.co/guide/en/apm/agent/rum-js/current/index.html) is not available in the Kibana distributables,
-however the agent can be enabled by setting `ELASTIC_APM_ACTIVE` to `true`.
-flags
-```
-ELASTIC_APM_ACTIVE=true yarn start
-// activates both Node.js and RUM agent
-```
-
-Once the agent is active, it will trace all incoming HTTP requests to Kibana, monitor for errors, and collect process-level metrics.
-The collected data will be sent to the APM Server and is viewable in the APM UI in Kibana.
-
-#### Unit testing frameworks
-Kibana is migrating unit testing from Mocha to Jest. Legacy unit tests still
-exist in Mocha but all new unit tests should be written in Jest. Mocha tests
-are contained in `__tests__` directories. Whereas Jest tests are stored in
-the same directory as source code files with the `.test.js` suffix.
-
-#### Running specific Kibana tests
-
-The following table outlines possible test file locations and how to invoke them:
-
-| Test runner | Test location | Runner command (working directory is kibana root) |
-| ----------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------- |
-| Jest | `src/**/*.test.js`
`src/**/*.test.ts` | `yarn test:jest -t regexp [test path]` |
-| Jest (integration) | `**/integration_tests/**/*.test.js` | `yarn test:jest_integration -t regexp [test path]` |
-| Mocha | `src/**/__tests__/**/*.js`
`!src/**/public/__tests__/*.js`
`packages/kbn-datemath/test/**/*.js`
`packages/kbn-dev-utils/src/**/__tests__/**/*.js`
`tasks/**/__tests__/**/*.js` | `node scripts/mocha --grep=regexp [test path]` |
-| Functional | `test/*integration/**/config.js`
`test/*functional/**/config.js`
`test/accessibility/config.js` | `yarn test:ftr:server --config test/[directory]/config.js`
`yarn test:ftr:runner --config test/[directory]/config.js --grep=regexp` |
-| Karma | `src/**/public/__tests__/*.js` | `yarn test:karma:debug` |
-
-For X-Pack tests located in `x-pack/` see [X-Pack Testing](x-pack/README.md#testing)
-
-Test runner arguments:
- - Where applicable, the optional arguments `-t=regexp` or `--grep=regexp` will only run tests or test suites whose descriptions matches the regular expression.
- - `[test path]` is the relative path to the test file.
-
- Examples:
- - Run the entire elasticsearch_service test suite:
- ```
- yarn test:jest src/core/server/elasticsearch/elasticsearch_service.test.ts
- ```
- - Run the jest test case whose description matches `stops both admin and data clients`:
- ```
- yarn test:jest -t 'stops both admin and data clients' src/core/server/elasticsearch/elasticsearch_service.test.ts
- ```
- - Run the api integration test case whose description matches the given string:
- ```
- yarn test:ftr:server --config test/api_integration/config.js
- yarn test:ftr:runner --config test/api_integration/config.js --grep='should return 404 if id does not match any sample data sets'
- ```
-
-#### Debugging Unit Tests
-
-The standard `yarn test` task runs several sub tasks and can take several minutes to complete, making debugging failures pretty painful. In order to ease the pain specialized tasks provide alternate methods for running the tests.
-
-You could also add the `--debug` option so that `node` is run using the `--debug-brk` flag. You'll need to connect a remote debugger such as [`node-inspector`](https://github.com/node-inspector/node-inspector) to proceed in this mode.
-
-```bash
-node scripts/mocha --debug
-```
-
-With `yarn test:karma`, you can run only the browser tests. Coverage reports are available for browser tests by running `yarn test:coverage`. You can find the results under the `coverage/` directory that will be created upon completion.
-
-```bash
-yarn test:karma
-```
-
-Using `yarn test:karma:debug` initializes an environment for debugging the browser tests. Includes an dedicated instance of the kibana server for building the test bundle, and a karma server. When running this task the build is optimized for the first time and then a karma-owned instance of the browser is opened. Click the "debug" button to open a new tab that executes the unit tests.
-
-```bash
-yarn test:karma:debug
-```
-
-In the screenshot below, you'll notice the URL is `localhost:9876/debug.html`. You can append a `grep` query parameter to this URL and set it to a string value which will be used to exclude tests which don't match. For example, if you changed the URL to `localhost:9876/debug.html?query=my test` and then refreshed the browser, you'd only see tests run which contain "my test" in the test description.
-
-
-![Browser test debugging](http://i.imgur.com/DwHxgfq.png)
-
-#### Unit Testing Plugins
-
-This should work super if you're using the [Kibana plugin generator](https://github.com/elastic/kibana/tree/master/packages/kbn-plugin-generator). If you're not using the generator, well, you're on your own. We suggest you look at how the generator works.
-
-To run the tests for just your particular plugin run the following command from your plugin:
-
-```bash
-yarn test:mocha
-yarn test:karma:debug # remove the debug flag to run them once and close
-```
-
-#### Automated Accessibility Testing
-
-To run the tests locally:
-
-1. In one terminal window run `node scripts/functional_tests_server --config test/accessibility/config.ts`
-2. In another terminal window run `node scripts/functional_test_runner.js --config test/accessibility/config.ts`
-
-To run the x-pack tests, swap the config file out for `x-pack/test/accessibility/config.ts`.
-
-After the server is up, you can go to this instance of Kibana at `localhost:5620`.
-
-The testing is done using [axe](https://github.com/dequelabs/axe-core). The same thing that runs in CI,
-can be run locally using their browser plugins:
-
-- [Chrome](https://chrome.google.com/webstore/detail/axe-web-accessibility-tes/lhdoppojpmngadmnindnejefpokejbdd?hl=en-US)
-- [Firefox](https://addons.mozilla.org/en-US/firefox/addon/axe-devtools/)
-
-#### Cross-browser Compatibility
-
-##### Testing Compatibility Locally
-
-###### Testing IE on OS X
-
-* [Download VMWare Fusion](http://www.vmware.com/products/fusion/fusion-evaluation.html).
-* [Download IE virtual machines](https://developer.microsoft.com/en-us/microsoft-edge/tools/vms/#downloads) for VMWare.
-* Open VMWare and go to Window > Virtual Machine Library. Unzip the virtual machine and drag the .vmx file into your Virtual Machine Library.
-* Right-click on the virtual machine you just added to your library and select "Snapshots...", and then click the "Take" button in the modal that opens. You can roll back to this snapshot when the VM expires in 90 days.
-* In System Preferences > Sharing, change your computer name to be something simple, e.g. "computer".
-* Run Kibana with `yarn start --host=computer.local` (substituting your computer name).
-* Now you can run your VM, open the browser, and navigate to `http://computer.local:5601` to test Kibana.
-* Alternatively you can use browserstack
-
-##### Running Browser Automation Tests
-
-[Read about the `FunctionalTestRunner`](https://www.elastic.co/guide/en/kibana/current/development-functional-tests.html) to learn more about how you can run and develop functional tests for Kibana core and plugins.
-
-You can also look into the [Scripts README.md](./scripts/README.md) to learn more about using the node scripts we provide for building Kibana, running integration tests, and starting up Kibana and Elasticsearch while you develop.
-
-### Building OS packages
-
-Packages are built using fpm, dpkg, and rpm. Package building has only been tested on Linux and is not supported on any other platform.
-
-```bash
-apt-get install ruby-dev rpm
-gem install fpm -v 1.5.0
-yarn build --skip-archives
-```
-
-To specify a package to build you can add `rpm` or `deb` as an argument.
-
-```bash
-yarn build --rpm
-```
-
-Distributable packages can be found in `target/` after the build completes.
-
-### Writing documentation
-
-Kibana documentation is written in [asciidoc](http://asciidoc.org/) format in
-the `docs/` directory.
-
-To build the docs, clone the [elastic/docs](https://github.com/elastic/docs)
-repo as a sibling of your Kibana repo. Follow the instructions in that project's
-README for getting the docs tooling set up.
-
-**To build the Kibana docs and open them in your browser:**
-
-```bash
-./docs/build_docs --doc kibana/docs/index.asciidoc --chunk 1 --open
-```
-or
-
-```bash
-node scripts/docs.js --open
-```
-
-### Release Notes process
-
-Part of this process only applies to maintainers, since it requires access to GitHub labels.
-
-Kibana publishes [Release Notes](https://www.elastic.co/guide/en/kibana/current/release-notes.html) for major and minor releases. The Release Notes summarize what the PRs accomplish in language that is meaningful to users. To generate the Release Notes, the team runs a script against this repo to collect the merged PRs against the release.
-
-#### Create the Release Notes text
-The text that appears in the Release Notes is pulled directly from your PR title, or a single paragraph of text that you specify in the PR description.
-
-To use a single paragraph of text, enter `Release note:` or a `## Release note` header in the PR description, followed by your text. For example, refer to this [PR](https://github.com/elastic/kibana/pull/65796) that uses the `## Release note` header.
-
-When you create the Release Notes text, use the following best practices:
-* Use present tense.
-* Use sentence case.
-* When you create a feature PR, start with `Adds`.
-* When you create an enhancement PR, start with `Improves`.
-* When you create a bug fix PR, start with `Fixes`.
-* When you create a deprecation PR, start with `Deprecates`.
-
-#### Add your labels
-1. Label the PR with the targeted version (ex: `v7.3.0`).
-2. Label the PR with the appropriate GitHub labels:
- * For a new feature or functionality, use `release_note:enhancement`.
- * For an external-facing fix, use `release_note:fix`. We do not include docs, build, and test fixes in the Release Notes, or unreleased issues that are only on `master`.
- * For a deprecated feature, use `release_note:deprecation`.
- * For a breaking change, use `release_note:breaking`.
- * To **NOT** include your changes in the Release Notes, use `release_note:skip`.
-
-We also produce a blog post that details more important breaking API changes in every major and minor release. When your PR includes a breaking API change, add the `release_note:dev_docs` label, and add a brief summary of the break at the bottom of the PR using the format below:
-
-```
-# Dev Docs
-
-## Name the feature with the break (ex: Visualize Loader)
-
-Summary of the change. Anything Under `#Dev Docs` is used in the blog.
-```
-
-## Signing the contributor license agreement
-
-Please make sure you have signed the [Contributor License Agreement](http://www.elastic.co/contributor-agreement/). We are not asking you to assign copyright to us, but to give us the right to distribute your code without restriction. We ask this of all contributors in order to assure our users of the origin and continuing existence of the code. You only need to sign the CLA once.
-
-## Submitting a Pull Request
-
-Push your local changes to your forked copy of the repository and submit a Pull Request. In the Pull Request, describe what your changes do and mention the number of the issue where discussion has taken place, e.g., “Closes #123″.
-
-Always submit your pull against `master` unless the bug is only present in an older version. If the bug affects both `master` and another branch say so in your pull.
-
-Then sit back and wait. There will probably be discussion about the Pull Request and, if any changes are needed, we'll work with you to get your Pull Request merged into Kibana.
-
-## Code Reviewing
-
-After a pull is submitted, it needs to get to review. If you have commit permission on the Kibana repo you will probably perform these steps while submitting your Pull Request. If not, a member of the Elastic organization will do them for you, though you can help by suggesting a reviewer for your changes if you've interacted with someone while working on the issue.
-
-### Getting to the Code Review Stage
-
-1. Assign the `review` label. This signals to the team that someone needs to give this attention.
-1. Do **not** assign a version label. Someone from Elastic staff will assign a version label, if necessary, when your Pull Request is ready to be merged.
-1. Find someone to review your pull. Don't just pick any yahoo, pick the right person. The right person might be the original reporter of the issue, but it might also be the person most familiar with the code you've changed. If neither of those things apply, or your change is small in scope, try to find someone on the Kibana team without a ton of existing reviews on their plate. As a rule, most pulls will require 2 reviewers, but the first reviewer will pick the 2nd.
-
-### Reviewing Pull Requests
-
-So, you've been assigned a pull to review. Check out our [pull request review guidelines](https://www.elastic.co/guide/en/kibana/master/pr-review.html) for our general philosophy for pull request reviewers.
-
-Thank you so much for reading our guidelines! :tada:
+Our developer guide is written in asciidoc and located under [./docs/developer](./docs/developer) if you want to make edits or access it in raw form.
diff --git a/config/node.options b/config/node.options
new file mode 100644
index 0000000000000..2927d1b576716
--- /dev/null
+++ b/config/node.options
@@ -0,0 +1,6 @@
+## Node command line options
+## See `node --help` and `node --v8-options` for available options
+## Please note you should specify one option per line
+
+## max size of old space in megabytes
+#--max-old-space-size=4096
diff --git a/docs/developer/add-data-guide.asciidoc b/docs/developer/add-data-guide.asciidoc
deleted file mode 100644
index e00e46868bb2d..0000000000000
--- a/docs/developer/add-data-guide.asciidoc
+++ /dev/null
@@ -1,38 +0,0 @@
-[[add-data-guide]]
-== Add Data Guide
-
-`Add Data` in the Kibana Home application contains tutorials for setting up data flows in the Elastic stack.
-
-Each tutorial contains three sets of instructions:
-
-* `On Premise.` Set up a data flow when both Kibana and Elasticsearch are running on premise.
-* `On Premise Elastic Cloud.` Set up a data flow when Kibana is running on premise and Elasticsearch is running on Elastic Cloud.
-* `Elastic Cloud.` Set up a data flow when both Kibana and Elasticsearch are running on Elastic Cloud.
-
-[float]
-=== Creating a new tutorial
-1. Create a new directory in the link:https://github.com/elastic/kibana/tree/master/src/plugins/home/server/tutorials[tutorials directory].
-2. In the new directory, create a file called `index.ts` that exports a function.
-The function must return a function object that conforms to the `TutorialSchema` interface link:https://github.com/elastic/kibana/blob/master/src/plugins/home/server/services/tutorials/lib/tutorial_schema.ts[tutorial schema].
-3. Register the tutorial in link:https://github.com/elastic/kibana/blob/master/src/plugins/home/server/tutorials/register.ts[register.ts] by adding it to the `builtInTutorials`.
-// TODO update path once assets are migrated
-4. Add image assets to the link:https://github.com/elastic/kibana/tree/master/src/legacy/core_plugins/kibana/public/home/tutorial_resources[tutorial_resources directory].
-5. Run Kibana locally to preview the tutorial.
-6. Create a PR and go through the review process to get the changes approved.
-
-If you are creating a new plugin and the tutorial is only related to that plugin, you can also place the `TutorialSchema` object into your plugin folder. Add `home` to the `requiredPlugins` list in your `kibana.json` file.
-Then register the tutorial object by calling `home.tutorials.registerTutorial(tutorialObject)` in the `setup` lifecycle of your server plugin.
-
-[float]
-==== Variables
-String values can contain variables that are substituted when rendered. Variables are specified by `{}`.
-For example: `{config.docs.version}` is rendered as `6.2` when running the tutorial in Kibana 6.2.
-
-link:https://github.com/elastic/kibana/blob/master/src/legacy/core_plugins/kibana/public/home/np_ready/components/tutorial/replace_template_strings.js#L23[Provided variables]
-
-[float]
-==== Markdown
-String values can contain limited Markdown syntax.
-
-link:https://github.com/elastic/kibana/blob/master/src/legacy/core_plugins/kibana/public/home/components/tutorial/content.js#L8[Enabled Markdown grammars]
-
diff --git a/docs/developer/advanced/development-basepath.asciidoc b/docs/developer/advanced/development-basepath.asciidoc
new file mode 100644
index 0000000000000..f0b760a21ea0c
--- /dev/null
+++ b/docs/developer/advanced/development-basepath.asciidoc
@@ -0,0 +1,18 @@
+[[development-basepath]]
+=== Considerations for basepath
+
+In dev mode, {kib} by default runs behind a proxy which adds a random path component to its URL.
+
+You can set this explicitly using `server.basePath` in <>.
+
+Use the server.rewriteBasePath setting to tell {kib} if it should remove the basePath from requests it receives, and to prevent a deprecation warning at startup. This setting cannot end in a slash (/).
+
+If you want to turn off the basepath when in development mode, start {kib} with the `--no-basepath` flag
+
+[source,bash]
+----
+yarn start --no-basepath
+----
+
+
+
diff --git a/docs/developer/core/development-es-snapshots.asciidoc b/docs/developer/advanced/development-es-snapshots.asciidoc
similarity index 90%
rename from docs/developer/core/development-es-snapshots.asciidoc
rename to docs/developer/advanced/development-es-snapshots.asciidoc
index 4cd4f31e582db..92fae7a241edf 100644
--- a/docs/developer/core/development-es-snapshots.asciidoc
+++ b/docs/developer/advanced/development-es-snapshots.asciidoc
@@ -1,7 +1,7 @@
[[development-es-snapshots]]
=== Daily Elasticsearch Snapshots
-For local development and CI, Kibana, by default, uses Elasticsearch snapshots that are built daily when running tasks that require Elasticsearch (e.g. functional tests).
+For local development and CI, {kib}, by default, uses Elasticsearch snapshots that are built daily when running tasks that require Elasticsearch (e.g. functional tests).
A snapshot is just a group of tarballs, one for each supported distribution/architecture/os of Elasticsearch, and a JSON-based manifest file containing metadata about the distributions.
@@ -9,13 +9,13 @@ https://ci.kibana.dev/es-snapshots[A dashboard] is available that shows the curr
==== Process Overview
-1. Elasticsearch snapshots are built for each current tracked branch of Kibana.
+1. Elasticsearch snapshots are built for each current tracked branch of {kib}.
2. Each snapshot is uploaded to a public Google Cloud Storage bucket, `kibana-ci-es-snapshots-daily`.
** At this point, the snapshot is not automatically used in CI or local development. It needs to be tested/verified first.
-3. Each snapshot is tested with the latest commit of the corresponding Kibana branch, using the full CI suite.
+3. Each snapshot is tested with the latest commit of the corresponding {kib} branch, using the full CI suite.
4. After CI
** If the snapshot passes, it is promoted and automatically used in CI and local development.
-** If the snapshot fails, the issue must be investigated and resolved. A new incompatibility may exist between Elasticsearch and Kibana.
+** If the snapshot fails, the issue must be investigated and resolved. A new incompatibility may exist between Elasticsearch and {kib}.
==== Using the latest snapshot
@@ -39,7 +39,7 @@ KBN_ES_SNAPSHOT_USE_UNVERIFIED=true node scripts/functional_tests_server
Currently, there is not a way to run your pull request with the latest unverified snapshot without a code change. You can, however, do it with a small code change.
-1. Edit `Jenkinsfile` in the root of the Kibana repo
+1. Edit `Jenkinsfile` in the root of the {kib} repo
2. Add `env.KBN_ES_SNAPSHOT_USE_UNVERIFIED = 'true'` at the top of the file.
3. Commit the change
@@ -75,13 +75,13 @@ The file structure for this bucket looks like this:
==== How snapshots are built, tested, and promoted
-Each day, a https://kibana-ci.elastic.co/job/elasticsearch+snapshots+trigger/[Jenkins job] runs that triggers Elasticsearch builds for each currently tracked branch/version. This job is automatically updated with the correct branches whenever we release new versions of Kibana.
+Each day, a https://kibana-ci.elastic.co/job/elasticsearch+snapshots+trigger/[Jenkins job] runs that triggers Elasticsearch builds for each currently tracked branch/version. This job is automatically updated with the correct branches whenever we release new versions of {kib}.
===== Build
https://kibana-ci.elastic.co/job/elasticsearch+snapshots+build/[This Jenkins job] builds the Elasticsearch snapshots and uploads them to GCS.
-The Jenkins job pipeline definition is https://github.com/elastic/kibana/blob/master/.ci/es-snapshots/Jenkinsfile_build_es[in the kibana repo].
+The Jenkins job pipeline definition is https://github.com/elastic/kibana/blob/master/.ci/es-snapshots/Jenkinsfile_build_es[in the {kib} repo].
1. Checkout Elasticsearch repo for the given branch/version.
2. Run `./gradlew -p distribution/archives assemble --parallel` to create all of the Elasticsearch distributions.
@@ -91,15 +91,15 @@ The Jenkins job pipeline definition is https://github.com/elastic/kibana/blob/ma
** e.g. `/archives/`
6. Replace `/manifest-latest.json` in GCS with this newest manifest.
** This allows the `KBN_ES_SNAPSHOT_USE_UNVERIFIED` flag to work.
-7. Trigger the verification job, to run the full Kibana CI test suite with this snapshot.
+7. Trigger the verification job, to run the full {kib} CI test suite with this snapshot.
===== Verification and Promotion
-https://kibana-ci.elastic.co/job/elasticsearch+snapshots+verify/[This Jenkins job] tests the latest Elasticsearch snapshot with the full Kibana CI pipeline, and promotes if it there are no test failures.
+https://kibana-ci.elastic.co/job/elasticsearch+snapshots+verify/[This Jenkins job] tests the latest Elasticsearch snapshot with the full {kib} CI pipeline, and promotes if it there are no test failures.
-The Jenkins job pipeline definition is https://github.com/elastic/kibana/blob/master/.ci/es-snapshots/Jenkinsfile_verify_es[in the kibana repo].
+The Jenkins job pipeline definition is https://github.com/elastic/kibana/blob/master/.ci/es-snapshots/Jenkinsfile_verify_es[in the {kib} repo].
-1. Checkout Kibana and set up CI environment as normal.
+1. Checkout {kib} and set up CI environment as normal.
2. Set the `ES_SNAPSHOT_MANIFEST` env var to point to the latest snapshot manifest.
3. Run CI (functional tests, integration tests, etc).
4. After CI
diff --git a/docs/developer/advanced/index.asciidoc b/docs/developer/advanced/index.asciidoc
new file mode 100644
index 0000000000000..139940ee42fe2
--- /dev/null
+++ b/docs/developer/advanced/index.asciidoc
@@ -0,0 +1,12 @@
+[[advanced]]
+== Advanced
+
+* <>
+* <>
+* <>
+
+include::development-es-snapshots.asciidoc[]
+
+include::running-elasticsearch.asciidoc[]
+
+include::development-basepath.asciidoc[]
\ No newline at end of file
diff --git a/docs/developer/advanced/running-elasticsearch.asciidoc b/docs/developer/advanced/running-elasticsearch.asciidoc
new file mode 100644
index 0000000000000..b03c231678eee
--- /dev/null
+++ b/docs/developer/advanced/running-elasticsearch.asciidoc
@@ -0,0 +1,118 @@
+[[running-elasticsearch]]
+=== Running elasticsearch during development
+
+There are many ways to run Elasticsearch while you are developing.
+
+[float]
+
+==== By snapshot
+
+This will run a snapshot of elasticsearch that is usually built nightly. Read more about <>.
+
+[source,bash]
+----
+yarn es snapshot
+----
+
+See all available options, like how to specify a specific license, with the `--help` flag.
+
+[source,bash]
+----
+yarn es snapshot --help
+----
+
+`trial` will give you access to all capabilities.
+
+**Keeping data between snapshots**
+
+If you want to keep the data inside your Elasticsearch between usages of this command, you should use the following command, to keep your data folder outside the downloaded snapshot folder:
+
+[source,bash]
+----
+yarn es snapshot -E path.data=../data
+----
+
+==== By source
+
+If you have the Elasticsearch repo checked out locally and wish to run against that, use `source`. By default, it will reference an elasticsearch checkout which is a sibling to the {kib} directory named elasticsearch. If you wish to use a checkout in another location you can provide that by supplying --source-path
+
+[source,bash]
+----
+yarn es source
+----
+
+==== From an archive
+
+Use this if you already have a distributable. For released versions, one can be obtained on the Elasticsearch downloads page.
+
+[source,bash]
+----
+yarn es archive
+----
+
+Each of these will run Elasticsearch with a basic license. Additional options are available, pass --help for more information.
+
+==== From a remote host
+
+You can save some system resources, and the effort of generating sample data, if you have a remote Elasticsearch cluster to connect to. (Elasticians: you do! Check with your team about where to find credentials)
+
+You'll need to create a kibana.dev.yml (<>) and add the following to it:
+
+[source,bash]
+----
+elasticsearch.hosts:
+ - {{ url }}
+elasticsearch.username: {{ username }}
+elasticsearch.password: {{ password }}
+elasticsearch.ssl.verificationMode: none
+----
+
+If many other users will be interacting with your remote cluster, you'll want to add the following to avoid causing conflicts:
+
+[source,bash]
+----
+kibana.index: '.{YourGitHubHandle}-kibana'
+xpack.task_manager.index: '.{YourGitHubHandle}-task-manager-kibana'
+----
+
+===== Running remote clusters
+
+Setup remote clusters for cross cluster search (CCS) and cross cluster replication (CCR).
+
+Start your primary cluster by running:
+
+[source,bash]
+----
+yarn es snapshot -E path.data=../data_prod1
+----
+
+Start your remote cluster by running:
+
+[source,bash]
+----
+yarn es snapshot -E transport.port=9500 -E http.port=9201 -E path.data=../data_prod2
+----
+
+Once both clusters are running, start {kib}. {kib} will connect to the primary cluster.
+
+Setup the remote cluster in {kib} from either Management -> Elasticsearch -> Remote Clusters UI or by running the following script in Console.
+
+[source,bash]
+----
+PUT _cluster/settings
+{
+ "persistent": {
+ "cluster": {
+ "remote": {
+ "cluster_one": {
+ "seeds": [
+ "localhost:9500"
+ ]
+ }
+ }
+ }
+ }
+}
+----
+
+Follow the cross-cluster search instructions for setting up index patterns to search across clusters (<>).
\ No newline at end of file
diff --git a/docs/developer/architecture/add-data-tutorials.asciidoc b/docs/developer/architecture/add-data-tutorials.asciidoc
new file mode 100644
index 0000000000000..e16b1bc039a10
--- /dev/null
+++ b/docs/developer/architecture/add-data-tutorials.asciidoc
@@ -0,0 +1,38 @@
+[[add-data-tutorials]]
+=== Add data tutorials
+
+`Add Data` in the {kib} Home application contains tutorials for setting up data flows in the Elastic stack.
+
+Each tutorial contains three sets of instructions:
+
+* `On Premise.` Set up a data flow when both {kib} and Elasticsearch are running on premise.
+* `On Premise Elastic Cloud.` Set up a data flow when {kib} is running on premise and Elasticsearch is running on Elastic Cloud.
+* `Elastic Cloud.` Set up a data flow when both {kib} and Elasticsearch are running on Elastic Cloud.
+
+[float]
+==== Creating a new tutorial
+1. Create a new directory in the link:https://github.com/elastic/kibana/tree/master/src/plugins/home/server/tutorials[tutorials directory].
+2. In the new directory, create a file called `index.ts` that exports a function.
+The function must return a function object that conforms to the `TutorialSchema` interface link:{kib-repo}tree/{branch}/src/plugins/home/server/services/tutorials/lib/tutorial_schema.ts[tutorial schema].
+3. Register the tutorial in link:{kib-repo}tree/{branch}/src/plugins/home/server/tutorials/register.ts[register.ts] by adding it to the `builtInTutorials`.
+// TODO update path once assets are migrated
+4. Add image assets to the link:{kib-repo}tree/{branch}/src/legacy/core_plugins/kibana/public/home/tutorial_resources[tutorial_resources directory].
+5. Run {kib} locally to preview the tutorial.
+6. Create a PR and go through the review process to get the changes approved.
+
+If you are creating a new plugin and the tutorial is only related to that plugin, you can also place the `TutorialSchema` object into your plugin folder. Add `home` to the `requiredPlugins` list in your `kibana.json` file.
+Then register the tutorial object by calling `home.tutorials.registerTutorial(tutorialObject)` in the `setup` lifecycle of your server plugin.
+
+[float]
+===== Variables
+String values can contain variables that are substituted when rendered. Variables are specified by `{}`.
+For example: `{config.docs.version}` is rendered as `6.2` when running the tutorial in {kib} 6.2.
+
+link:{kib-repo}tree/{branch}/src/legacy/core_plugins/kibana/public/home/np_ready/components/tutorial/replace_template_strings.js#L23[Provided variables]
+
+[float]
+===== Markdown
+String values can contain limited Markdown syntax.
+
+link:{kib-repo}tree/{branch}/src/legacy/core_plugins/kibana/public/home/components/tutorial/content.js#L8[Enabled Markdown grammars]
+
diff --git a/docs/developer/visualize/development-visualize-index.asciidoc b/docs/developer/architecture/development-visualize-index.asciidoc
similarity index 85%
rename from docs/developer/visualize/development-visualize-index.asciidoc
rename to docs/developer/architecture/development-visualize-index.asciidoc
index ac824b4702a3c..551c41833fb72 100644
--- a/docs/developer/visualize/development-visualize-index.asciidoc
+++ b/docs/developer/architecture/development-visualize-index.asciidoc
@@ -1,13 +1,13 @@
[[development-visualize-index]]
-== Developing Visualizations
+=== Developing Visualizations
[IMPORTANT]
==============================================
-These pages document internal APIs and are not guaranteed to be supported across future versions of Kibana.
+These pages document internal APIs and are not guaranteed to be supported across future versions of {kib}.
==============================================
The internal APIs for creating custom visualizations are in a state of heavy churn as
-they are being migrated to the new Kibana platform, and large refactorings have been
+they are being migrated to the new {kib} platform, and large refactorings have been
happening across minor releases in the `7.x` series. In particular, in `7.5` and later
we have made significant changes to the legacy APIs as we work to gradually replace them.
@@ -20,7 +20,7 @@ If you would like to keep up with progress on the visualizations plugin in the m
here are a few resources:
* The <> documentation, where we try to capture any changes to the APIs as they occur across minors.
-* link:https://github.com/elastic/kibana/issues/44121[Meta issue] which is tracking the move of the plugin to the new Kibana platform
+* link:https://github.com/elastic/kibana/issues/44121[Meta issue] which is tracking the move of the plugin to the new {kib} platform
* Our link:https://www.elastic.co/blog/join-our-elastic-stack-workspace-on-slack[Elastic Stack workspace on Slack].
* The {kib-repo}blob/{branch}/src/plugins/visualizations[source code], which will continue to be
the most accurate source of information.
diff --git a/docs/developer/architecture/index.asciidoc b/docs/developer/architecture/index.asciidoc
new file mode 100644
index 0000000000000..d726a8bd3642d
--- /dev/null
+++ b/docs/developer/architecture/index.asciidoc
@@ -0,0 +1,25 @@
+[[kibana-architecture]]
+== Architecture
+
+[IMPORTANT]
+==============================================
+{kib} developer services and apis are in a state of constant development. We cannot provide backwards compatibility at this time due to the high rate of change.
+==============================================
+
+Our developer services are changing all the time. One of the best ways to discover and learn about them is to read the available
+READMEs from all the plugins inside our {kib-repo}tree/{branch}/src/plugins[open source plugins folder] and our
+{kib-repo}/tree/{branch}/x-pack/plugins[commercial plugins folder].
+
+A few services also automatically generate api documentation which can be browsed inside the {kib-repo}tree/{branch}/docs/development[docs/development section of our repo]
+
+A few notable services are called out below.
+
+* <>
+* <>
+* <>
+
+include::add-data-tutorials.asciidoc[]
+
+include::development-visualize-index.asciidoc[]
+
+include::security/index.asciidoc[]
diff --git a/docs/developer/plugin/development-plugin-feature-registration.asciidoc b/docs/developer/architecture/security/feature-registration.asciidoc
similarity index 96%
rename from docs/developer/plugin/development-plugin-feature-registration.asciidoc
rename to docs/developer/architecture/security/feature-registration.asciidoc
index 203cc201ee626..164f6d1cf9c74 100644
--- a/docs/developer/plugin/development-plugin-feature-registration.asciidoc
+++ b/docs/developer/architecture/security/feature-registration.asciidoc
@@ -1,13 +1,13 @@
[[development-plugin-feature-registration]]
-=== Plugin feature registration
+==== Plugin feature registration
If your plugin will be used with {kib}'s default distribution, then you have the ability to register the features that your plugin provides. Features are typically apps in {kib}; once registered, you can toggle them via Spaces, and secure them via Roles when security is enabled.
-==== UI Capabilities
+===== UI Capabilities
Registering features also gives your plugin access to “UI Capabilities”. These capabilities are boolean flags that you can use to conditionally render your interface, based on the current user's permissions. For example, you can hide or disable a Save button if the current user is not authorized.
-==== Registering a feature
+===== Registering a feature
Feature registration is controlled via the built-in `xpack_main` plugin. To register a feature, call `xpack_main`'s `registerFeature` function from your plugin's `init` function, and provide the appropriate details:
@@ -65,12 +65,12 @@ Registering a feature consists of the following fields. For more information, co
|The ID of the navigation link associated with your feature.
|===
-===== Privilege definition
+====== Privilege definition
The `privileges` section of feature registration allows plugins to implement read/write and read-only modes for their applications.
For a full explanation of fields and options, consult the {kib-repo}blob/{branch}/x-pack/plugins/features/server/feature_registry.ts[feature registry interface].
-==== Using UI Capabilities
+===== Using UI Capabilities
UI Capabilities are available to your public (client) plugin code. These capabilities are read-only, and are used to inform the UI. This object is namespaced by feature id. For example, if your feature id is “foo”, then your UI Capabilities are stored at `uiCapabilities.foo`.
To access capabilities, import them from `ui/capabilities`:
@@ -86,7 +86,7 @@ if (canUserSave) {
-----------
[[example-1-canvas]]
-==== Example 1: Canvas Application
+===== Example 1: Canvas Application
["source","javascript"]
-----------
init(server) {
@@ -118,13 +118,13 @@ init(server) {
}
-----------
-This shows how the Canvas application might register itself as a Kibana feature.
+This shows how the Canvas application might register itself as a {kib} feature.
Note that it specifies different `savedObject` access levels for each privilege:
- Users with read/write access (`all` privilege) need to be able to read/write `canvas-workpad` saved objects, and they need read-only access to `index-pattern` saved objects.
- Users with read-only access (`read` privilege) do not need to have read/write access to any saved objects, but instead get read-only access to `index-pattern` and `canvas-workpad` saved objects.
-Additionally, Canvas registers the `canvas` UI app and `canvas` catalogue entry. This tells Kibana that these entities are available for users with either the `read` or `all` privilege.
+Additionally, Canvas registers the `canvas` UI app and `canvas` catalogue entry. This tells {kib} that these entities are available for users with either the `read` or `all` privilege.
The `all` privilege defines a single “save” UI Capability. To access this in the UI, Canvas could:
@@ -141,7 +141,7 @@ if (canUserSave) {
Because the `read` privilege does not define the `save` capability, users with read-only access will have their `uiCapabilities.canvas.save` flag set to `false`.
[[example-2-dev-tools]]
-==== Example 2: Dev Tools
+===== Example 2: Dev Tools
["source","javascript"]
-----------
@@ -199,7 +199,7 @@ server.route({
-----------
[[example-3-discover]]
-==== Example 3: Discover
+===== Example 3: Discover
Discover takes advantage of subfeature privileges to allow fine-grained access control. In this example,
a single "Create Short URLs" subfeature privilege is defined, which allows users to grant access to this feature without having to grant the `all` privilege to Discover. In other words, you can grant `read` access to Discover, and also grant the ability to create short URLs.
diff --git a/docs/developer/architecture/security/index.asciidoc b/docs/developer/architecture/security/index.asciidoc
new file mode 100644
index 0000000000000..55b2450caf7a7
--- /dev/null
+++ b/docs/developer/architecture/security/index.asciidoc
@@ -0,0 +1,12 @@
+[[development-security]]
+=== Security
+
+{kib} has generally been able to implement security transparently to core and plugin developers, and this largely remains the case. {kib} on two methods that the elasticsearch `Cluster` provides: `callWithRequest` and `callWithInternalUser`.
+
+`callWithRequest` executes requests against Elasticsearch using the authentication credentials of the {kib} end-user. So, if you log into {kib} with the user of `foo` when `callWithRequest` is used, {kib} execute the request against Elasticsearch as the user `foo`. Historically, `callWithRequest` has been used extensively to perform actions that are initiated at the request of {kib} end-users.
+
+`callWithInternalUser` executes requests against Elasticsearch using the internal {kib} server user, and has historically been used for performing actions that aren't initiated by {kib} end users; for example, creating the initial `.kibana` index or performing health checks against Elasticsearch.
+
+However, with the changes that role-based access control (RBAC) introduces, this is no longer cut and dry. {kib} now requires all access to the `.kibana` index goes through the `SavedObjectsClient`. This used to be a best practice, as the `SavedObjectsClient` was responsible for translating the documents stored in Elasticsearch to and from Saved Objects, but RBAC is now taking advantage of this abstraction to implement access control and determine when to use `callWithRequest` versus `callWithInternalUser`.
+
+include::rbac.asciidoc[]
diff --git a/docs/developer/security/rbac.asciidoc b/docs/developer/architecture/security/rbac.asciidoc
similarity index 96%
rename from docs/developer/security/rbac.asciidoc
rename to docs/developer/architecture/security/rbac.asciidoc
index 02b8233a9a3df..ae1979e856e23 100644
--- a/docs/developer/security/rbac.asciidoc
+++ b/docs/developer/architecture/security/rbac.asciidoc
@@ -1,5 +1,5 @@
[[development-security-rbac]]
-=== Role-based access control
+==== Role-based access control
Role-based access control (RBAC) in {kib} relies upon the
{ref}/security-privileges.html#application-privileges[application privileges]
@@ -11,7 +11,7 @@ consumers when using `request.getSavedObjectsClient()` or
`savedObjects.getScopedSavedObjectsClient()`.
[[development-rbac-privileges]]
-==== {kib} Privileges
+===== {kib} Privileges
When {kib} first starts up, it executes the following `POST` request against {es}. This synchronizes the definition of the privileges with various `actions` which are later used to authorize a user:
@@ -19,7 +19,7 @@ When {kib} first starts up, it executes the following `POST` request against {es
----------------------------------
POST /_security/privilege
Content-Type: application/json
-Authorization: Basic kibana changeme
+Authorization: Basic {kib} changeme
{
"kibana-.kibana":{
@@ -56,7 +56,7 @@ The application is created by concatenating the prefix of `kibana-` with the val
==============================================
[[development-rbac-assigning-privileges]]
-==== Assigning {kib} Privileges
+===== Assigning {kib} Privileges
{kib} privileges are assigned to specific roles using the `applications` element. For example, the following role assigns the <> privilege at `*` `resources` (which will in the future be used to secure spaces) to the default {kib} `application`:
@@ -81,7 +81,7 @@ Roles that grant <> should be managed using the <>
+* <>
+* <>
+
+include::stability.asciidoc[]
+
+include::security.asciidoc[]
diff --git a/docs/developer/best-practices/security.asciidoc b/docs/developer/best-practices/security.asciidoc
new file mode 100644
index 0000000000000..26fcc73ce2b90
--- /dev/null
+++ b/docs/developer/best-practices/security.asciidoc
@@ -0,0 +1,55 @@
+[[security-best-practices]]
+=== Security best practices
+
+* XSS
+** Check for usages of `dangerouslySetInnerHtml`, `Element.innerHTML`,
+`Element.outerHTML`
+** Ensure all user input is properly escaped.
+** Ensure any input in `$.html`, `$.append`, `$.appendTo`,
+latexmath:[$.prepend`, `$].prependTo`is escaped. Instead use`$.text`, or
+don’t use jQuery at all.
+* CSRF
+** Ensure all APIs are running inside the {kib} HTTP service.
+* RCE
+** Ensure no usages of `eval`
+** Ensure no usages of dynamic requires
+** Check for template injection
+** Check for usages of templating libraries, including `_.template`, and
+ensure that user provided input isn’t influencing the template and is
+only used as data for rendering the template.
+** Check for possible prototype pollution.
+* Prototype Pollution
+** Check for instances of `anObject[a][b] = c` where a, b, and c are
+user defined. This includes code paths where the following logical code
+steps could be performed in separate files by completely different
+operations, or recursively using dynamic operations.
+** Validate any user input, including API
+url-parameters/query-parameters/payloads, preferable against a schema
+which only allows specific keys/values. At a very minimum, black-list
+`__proto__` and `prototype.constructor` for use within keys
+** When calling APIs which spawn new processes or potentially perform
+code generation from strings, defensively protect against Prototype
+Pollution by checking `Object.hasOwnProperty` if the arguments to the
+APIs originate from an Object. An example is the Code app’s
+https://github.com/elastic/kibana/blob/b49192626a8528af5d888545fb14cd1ce66a72e7/x-pack/legacy/plugins/code/server/lsp/workspace_command.ts#L40-L44[spawnProcess].
+*** Common Node.js offenders: `child_process.spawn`,
+`child_process.exec`, `eval`, `Function('some string')`,
+`vm.runIn*Context(x)`
+*** Common Client-side offenders: `eval`, `Function('some string')`,
+`setTimeout('some string', num)`, `setInterval('some string', num)`
+* Check for accidental reveal of sensitive information
+** The biggest culprit is errors which contain stack traces or other
+sensitive information which end up in the HTTP Response
+* Checked for Mishandled API requests
+** Ensure no sensitive cookies are forwarded to external resources.
+** Ensure that all user controllable variables that are used in
+constructing a URL are escaped properly. This is relevant when using
+`transport.request` with the Elasticsearch client as no automatic
+escaping is performed.
+* Reverse tabnabbing -
+https://github.com/OWASP/CheatSheetSeries/blob/master/cheatsheets/HTML5_Security_Cheat_Sheet.md#tabnabbing
+** When there are user controllable links or hard-coded links to
+third-party domains that specify target="_blank" or target="_window", the a tag should have the rel="noreferrer noopener" attribute specified.
+Allowing users to input markdown is a common culprit, a custom link renderer should be used
+* SSRF - https://www.owasp.org/index.php/Server_Side_Request_Forgery
+All network requests made from the {kib} server should use an explicit configuration or white-list specified in the kibana.yml
\ No newline at end of file
diff --git a/docs/developer/best-practices/stability.asciidoc b/docs/developer/best-practices/stability.asciidoc
new file mode 100644
index 0000000000000..68237a034be52
--- /dev/null
+++ b/docs/developer/best-practices/stability.asciidoc
@@ -0,0 +1,66 @@
+[[stability]]
+=== Stability
+
+Ensure your feature will work under all possible {kib} scenarios.
+
+[float]
+==== Environmental configuration scenarios
+
+* Cloud
+** Does the feature work on *cloud environment*?
+** Does it create a setting that needs to be exposed, or configured
+differently than the default, on Cloud? (whitelisting of certain
+settings/users? Ref:
+https://www.elastic.co/guide/en/cloud/current/ec-add-user-settings.html
+,
+https://www.elastic.co/guide/en/cloud/current/ec-manage-kibana-settings.html)
+** Is there a significant performance impact that may affect Cloud
+{kib} instances?
+** Does it need to be aware of running in a container? (for example
+monitoring)
+* Multiple {kib} instances
+** Pointing to the same index
+** Pointing to different indexes
+*** Should make sure that the {kib} index is not hardcoded anywhere.
+*** Should not be storing a bunch of stuff in {kib} memory.
+*** Should emulate a high availability deployment.
+*** Anticipating different timing related issues due to shared resource
+access.
+*** We need to make sure security is set up in a specific way for
+non-standard {kib} indices. (create their own custom roles)
+* {kib} running behind a reverse proxy or load balancer, without sticky
+sessions. (we have had many discuss/SDH tickets around this)
+* If a proxy/loadbalancer is running between ES and {kib}
+
+[float]
+==== Kibana.yml settings
+
+* Using a custom {kib} index alias
+* When optional dependencies are disabled
+** Ensure all your required dependencies are listed in kibana.json
+dependency list!
+
+[float]
+==== Test coverage
+
+* Does the feature have sufficient unit test coverage? (does it handle
+storeinSessions?)
+* Does the feature have sufficient Functional UI test coverage?
+* Does the feature have sufficient Rest API coverage test coverage?
+* Does the feature have sufficient Integration test coverage?
+
+[float]
+==== Browser coverage
+
+Refer to the list of browsers and OS {kib} supports
+https://www.elastic.co/support/matrix
+
+Does the feature work efficiently on the list of supported browsers?
+
+[float]
+==== Upgrade Scenarios - Migration scenarios-
+
+Does the feature affect old
+indices, saved objects ? - Has the feature been tested with {kib}
+aliases - Read/Write privileges of the indices before and after the
+upgrade?
diff --git a/docs/developer/contributing/development-accessibility-tests.asciidoc b/docs/developer/contributing/development-accessibility-tests.asciidoc
new file mode 100644
index 0000000000000..a3ffefb94cd2a
--- /dev/null
+++ b/docs/developer/contributing/development-accessibility-tests.asciidoc
@@ -0,0 +1,23 @@
+[[development-accessibility-tests]]
+==== Automated Accessibility Testing
+
+To run the tests locally:
+
+[arabic]
+. In one terminal window run
+`node scripts/functional_tests_server --config test/accessibility/config.ts`
+. In another terminal window run
+`node scripts/functional_test_runner.js --config test/accessibility/config.ts`
+
+To run the x-pack tests, swap the config file out for
+`x-pack/test/accessibility/config.ts`.
+
+After the server is up, you can go to this instance of {kib} at
+`localhost:5620`.
+
+The testing is done using https://github.com/dequelabs/axe-core[axe].
+The same thing that runs in CI, can be run locally using their browser
+plugins:
+
+* https://chrome.google.com/webstore/detail/axe-web-accessibility-tes/lhdoppojpmngadmnindnejefpokejbdd?hl=en-US[Chrome]
+* https://addons.mozilla.org/en-US/firefox/addon/axe-devtools/[Firefox]
\ No newline at end of file
diff --git a/docs/developer/contributing/development-documentation.asciidoc b/docs/developer/contributing/development-documentation.asciidoc
new file mode 100644
index 0000000000000..d9fae42eef87e
--- /dev/null
+++ b/docs/developer/contributing/development-documentation.asciidoc
@@ -0,0 +1,34 @@
+[[development-documentation]]
+=== Documentation during development
+
+Docs should be written during development and accompany PRs when relevant. There are multiple types of documentation, and different places to add each.
+
+[float]
+==== Developer services documentation
+
+Documentation about specific services a plugin offers should be encapsulated in:
+
+* README.asciidoc at the base of the plugin folder.
+* Typescript comments for all public services.
+
+[float]
+==== End user documentation
+
+Documentation about user facing features should be written in http://asciidoc.org/[asciidoc] at
+{kib-repo}/tree/master/docs[https://github.com/elastic/kibana/tree/master/docs]
+
+To build the docs, you must clone the https://github.com/elastic/docs[elastic/docs]
+repo as a sibling of your {kib} repo. Follow the instructions in that project's
+README for getting the docs tooling set up.
+
+**To build the docs:**
+
+```bash
+node scripts/docs.js --open
+```
+
+[float]
+==== General developer documentation and guidelines
+
+General developer guildlines and documentation, like this right here, should be written in http://asciidoc.org/[asciidoc]
+at {kib-repo}/tree/master/docs/developer[https://github.com/elastic/kibana/tree/master/docs/developer]
diff --git a/docs/developer/core/development-functional-tests.asciidoc b/docs/developer/contributing/development-functional-tests.asciidoc
similarity index 90%
rename from docs/developer/core/development-functional-tests.asciidoc
rename to docs/developer/contributing/development-functional-tests.asciidoc
index 2b091d9abb9fc..442fc1ac755d3 100644
--- a/docs/developer/core/development-functional-tests.asciidoc
+++ b/docs/developer/contributing/development-functional-tests.asciidoc
@@ -1,38 +1,39 @@
[[development-functional-tests]]
=== Functional Testing
-We use functional tests to make sure the Kibana UI works as expected. It replaces hours of manual testing by automating user interaction. To have better control over our functional test environment, and to make it more accessible to plugin authors, Kibana uses a tool called the `FunctionalTestRunner`.
+We use functional tests to make sure the {kib} UI works as expected. It replaces hours of manual testing by automating user interaction. To have better control over our functional test environment, and to make it more accessible to plugin authors, {kib} uses a tool called the `FunctionalTestRunner`.
[float]
==== Running functional tests
-The `FunctionalTestRunner` is very bare bones and gets most of its functionality from its config file, located at {blob}test/functional/config.js[test/functional/config.js]. If you’re writing a plugin you will have your own config file. See <> for more info.
+The `FunctionalTestRunner` is very bare bones and gets most of its functionality from its config file, located at {blob}test/functional/config.js[test/functional/config.js]. If you’re writing a plugin outside the {kib} repo, you will have your own config file.
+ See <> for more info.
There are three ways to run the tests depending on your goals:
1. Easiest option:
-** Description: Starts up Kibana & Elasticsearch servers, followed by running tests. This is much slower when running the tests multiple times because slow startup time for the servers. Recommended for single-runs.
+** Description: Starts up {kib} & Elasticsearch servers, followed by running tests. This is much slower when running the tests multiple times because slow startup time for the servers. Recommended for single-runs.
** `node scripts/functional_tests`
-*** does everything in a single command, including running Elasticsearch and Kibana locally
+*** does everything in a single command, including running Elasticsearch and {kib} locally
*** tears down everything after the tests run
*** exit code reports success/failure of the tests
2. Best for development:
-** Description: Two commands, run in separate terminals, separate the components that are long-running and slow from those that are ephemeral and fast. Tests can be re-run much faster, and this still runs Elasticsearch & Kibana locally.
+** Description: Two commands, run in separate terminals, separate the components that are long-running and slow from those that are ephemeral and fast. Tests can be re-run much faster, and this still runs Elasticsearch & {kib} locally.
** `node scripts/functional_tests_server`
-*** starts Elasticsearch and Kibana servers
+*** starts Elasticsearch and {kib} servers
*** slow to start
*** can be reused for multiple executions of the tests, thereby saving some time when re-running tests
-*** automatically restarts the Kibana server when relevant changes are detected
+*** automatically restarts the {kib} server when relevant changes are detected
** `node scripts/functional_test_runner`
-*** runs the tests against Kibana & Elasticsearch servers that were started by `node scripts/functional_tests_server`
+*** runs the tests against {kib} & Elasticsearch servers that were started by `node scripts/functional_tests_server`
*** exit code reports success or failure of the tests
3. Custom option:
-** Description: Runs tests against instances of Elasticsearch & Kibana started some other way (like Elastic Cloud, or an instance you are managing in some other way).
+** Description: Runs tests against instances of Elasticsearch & {kib} started some other way (like Elastic Cloud, or an instance you are managing in some other way).
** just executes the functional tests
-** url, credentials, etc. for Elasticsearch and Kibana are specified via environment variables
-** Here's an example that runs against an Elastic Cloud instance. Note that you must run the same branch of tests as the version of Kibana you're testing.
+** url, credentials, etc. for Elasticsearch and {kib} are specified via environment variables
+** Here's an example that runs against an Elastic Cloud instance. Note that you must run the same branch of tests as the version of {kib} you're testing.
+
["source","shell"]
----------
@@ -95,10 +96,10 @@ node scripts/functional_test_runner --exclude-tag skipCloud
When run without any arguments the `FunctionalTestRunner` automatically loads the configuration in the standard location, but you can override that behavior with the `--config` flag. List configs with multiple --config arguments.
-* `--config test/functional/config.js` starts Elasticsearch and Kibana servers with the WebDriver tests configured to run in Chrome.
-* `--config test/functional/config.firefox.js` starts Elasticsearch and Kibana servers with the WebDriver tests configured to run in Firefox.
-* `--config test/api_integration/config.js` starts Elasticsearch and Kibana servers with the api integration tests configuration.
-* `--config test/accessibility/config.ts` starts Elasticsearch and Kibana servers with the WebDriver tests configured to run an accessibility audit using https://www.deque.com/axe/[axe].
+* `--config test/functional/config.js` starts Elasticsearch and {kib} servers with the WebDriver tests configured to run in Chrome.
+* `--config test/functional/config.firefox.js` starts Elasticsearch and {kib} servers with the WebDriver tests configured to run in Firefox.
+* `--config test/api_integration/config.js` starts Elasticsearch and {kib} servers with the api integration tests configuration.
+* `--config test/accessibility/config.ts` starts Elasticsearch and {kib} servers with the WebDriver tests configured to run an accessibility audit using https://www.deque.com/axe/[axe].
There are also command line flags for `--bail` and `--grep`, which behave just like their mocha counterparts. For instance, use `--grep=foo` to run only tests that match a regular expression.
@@ -117,7 +118,7 @@ The tests are written in https://mochajs.org[mocha] using https://github.com/ela
We use https://www.w3.org/TR/webdriver1/[WebDriver Protocol] to run tests in both Chrome and Firefox with the help of https://sites.google.com/a/chromium.org/chromedriver/[chromedriver] and https://firefox-source-docs.mozilla.org/testing/geckodriver/[geckodriver]. When the `FunctionalTestRunner` launches, remote service creates a new webdriver session, which starts the driver and a stripped-down browser instance. We use `browser` service and `webElementWrapper` class to wrap up https://seleniumhq.github.io/selenium/docs/api/javascript/module/selenium-webdriver/[Webdriver API].
-The `FunctionalTestRunner` automatically transpiles functional tests using babel, so that tests can use the same ECMAScript features that Kibana source code uses. See {blob}style_guides/js_style_guide.md[style_guides/js_style_guide.md].
+The `FunctionalTestRunner` automatically transpiles functional tests using babel, so that tests can use the same ECMAScript features that {kib} source code uses. See {blob}style_guides/js_style_guide.md[style_guides/js_style_guide.md].
[float]
===== Definitions
@@ -304,9 +305,9 @@ The `FunctionalTestRunner` comes with three built-in services:
* Phases include: `beforeLoadTests`, `beforeTests`, `beforeEachTest`, `cleanup`
[float]
-===== Kibana Services
+===== {kib} Services
-The Kibana functional tests define the vast majority of the actual functionality used by tests.
+The {kib} functional tests define the vast majority of the actual functionality used by tests.
**browser**:::
* Source: {blob}test/functional/services/browser.ts[test/functional/services/browser.ts]
@@ -356,7 +357,7 @@ await testSubjects.click(‘containerButton’);
**kibanaServer:**:::
* Source: {blob}test/common/services/kibana_server/kibana_server.js[test/common/services/kibana_server/kibana_server.js]
-* Helpers for interacting with Kibana's server
+* Helpers for interacting with {kib}'s server
* Commonly used methods:
** `kibanaServer.uiSettings.update()`
** `kibanaServer.version.get()`
@@ -501,3 +502,13 @@ const log = getService(‘log’);
// log.debug only writes when using the `--debug` or `--verbose` flag.
log.debug(‘done clicking menu’);
-----------
+
+[float]
+==== MacOS testing performance tip
+
+macOS users on a machine with a discrete graphics card may see significant speedups (up to 2x) when running tests by changing your terminal emulator's GPU settings. In iTerm2:
+* Open Preferences (Command + ,)
+* In the General tab, under the "Magic" section, ensure "GPU rendering" is checked
+* Open "Advanced GPU Settings..."
+* Uncheck the "Prefer integrated to discrete GPU" option
+* Restart iTerm
\ No newline at end of file
diff --git a/docs/developer/contributing/development-github.asciidoc b/docs/developer/contributing/development-github.asciidoc
new file mode 100644
index 0000000000000..027b4e73aa9de
--- /dev/null
+++ b/docs/developer/contributing/development-github.asciidoc
@@ -0,0 +1,112 @@
+[[development-github]]
+=== How we use git and github
+
+[float]
+==== Forking
+
+We follow the https://help.github.com/articles/fork-a-repo/[GitHub
+forking model] for collaborating on {kib} code. This model assumes that
+you have a remote called `upstream` which points to the official {kib}
+repo, which we'll refer to in later code snippets.
+
+[float]
+==== Branching
+
+* All work on the next major release goes into master.
+* Past major release branches are named `{majorVersion}.x`. They contain
+work that will go into the next minor release. For example, if the next
+minor release is `5.2.0`, work for it should go into the `5.x` branch.
+* Past minor release branches are named `{majorVersion}.{minorVersion}`.
+They contain work that will go into the next patch release. For example,
+if the next patch release is `5.3.1`, work for it should go into the
+`5.3` branch.
+* All work is done on feature branches and merged into one of these
+branches.
+* Where appropriate, we'll backport changes into older release branches.
+
+[float]
+==== Commits and Merging
+
+* Feel free to make as many commits as you want, while working on a
+branch.
+* When submitting a PR for review, please perform an interactive rebase
+to present a logical history that's easy for the reviewers to follow.
+* Please use your commit messages to include helpful information on your
+changes, e.g. changes to APIs, UX changes, bugs fixed, and an
+explanation of _why_ you made the changes that you did.
+* Resolve merge conflicts by rebasing the target branch over your
+feature branch, and force-pushing (see below for instructions).
+* When merging, we'll squash your commits into a single commit.
+
+[float]
+===== Rebasing and fixing merge conflicts
+
+Rebasing can be tricky, and fixing merge conflicts can be even trickier
+because it involves force pushing. This is all compounded by the fact
+that attempting to push a rebased branch remotely will be rejected by
+git, and you'll be prompted to do a `pull`, which is not at all what you
+should do (this will really mess up your branch's history).
+
+Here's how you should rebase master onto your branch, and how to fix
+merge conflicts when they arise.
+
+First, make sure master is up-to-date.
+
+["source","shell"]
+-----------
+git checkout master
+git fetch upstream
+git rebase upstream/master
+-----------
+
+Then, check out your branch and rebase master on top of it, which will
+apply all of the new commits on master to your branch, and then apply
+all of your branch's new commits after that.
+
+["source","shell"]
+-----------
+git checkout name-of-your-branch
+git rebase master
+-----------
+
+You want to make sure there are no merge conflicts. If there are merge
+conflicts, git will pause the rebase and allow you to fix the conflicts
+before continuing.
+
+You can use `git status` to see which files contain conflicts. They'll
+be the ones that aren't staged for commit. Open those files, and look
+for where git has marked the conflicts. Resolve the conflicts so that
+the changes you want to make to the code have been incorporated in a way
+that doesn't destroy work that's been done in master. Refer to master's
+commit history on GitHub if you need to gain a better understanding of how code is conflicting and how best to resolve it.
+
+Once you've resolved all of the merge conflicts, use `git add -A` to stage them to be committed, and then use
+ `git rebase --continue` to tell git to continue the rebase.
+
+When the rebase has completed, you will need to force push your branch because the history is now completely different than what's on the remote. This is potentially dangerous because it will completely overwrite what you have on the remote, so you need to be sure that you haven't lost any work when resolving merge conflicts. (If there weren't any merge conflicts, then you can force push without having to worry about this.)
+
+["source","shell"]
+-----------
+git push origin name-of-your-branch --force
+-----------
+
+This will overwrite the remote branch with what you have locally. You're done!
+
+**Note that you should not run git pull**, for example in response to a push rejection like this:
+
+["source","shell"]
+-----------
+! [rejected] name-of-your-branch -> name-of-your-branch (non-fast-forward)
+error: failed to push some refs to 'https://github.com/YourGitHubHandle/kibana.git'
+hint: Updates were rejected because the tip of your current branch is behind
+hint: its remote counterpart. Integrate the remote changes (e.g.
+hint: 'git pull ...') before pushing again.
+hint: See the 'Note about fast-forwards' in 'git push --help' for details.
+-----------
+
+Assuming you've successfully rebased and you're happy with the code, you should force push instead.
+
+[float]
+==== Creating a pull request
+
+See <> for the next steps on getting your code changes merged into {kib}.
\ No newline at end of file
diff --git a/docs/developer/contributing/development-pull-request.asciidoc b/docs/developer/contributing/development-pull-request.asciidoc
new file mode 100644
index 0000000000000..5d3c30fec7383
--- /dev/null
+++ b/docs/developer/contributing/development-pull-request.asciidoc
@@ -0,0 +1,32 @@
+[[development-pull-request]]
+=== Submitting a pull request
+
+[float]
+==== What Goes Into a Pull Request
+
+* Please include an explanation of your changes in your PR description.
+* Links to relevant issues, external resources, or related PRs are very important and useful.
+* Please update any tests that pertain to your code, and add new tests where appropriate.
+* Update or add docs when appropriate. Read more about <>.
+
+[float]
+==== Submitting a Pull Request
+
+ 1. Push your local changes to your forked copy of the repository and submit a pull request.
+ 2. Describe what your changes do and mention the number of the issue where discussion has taken place, e.g., “Closes #123″.
+ 3. Assign the `review` and `💝community` label (assuming you are not a member of the Elastic organization). This signals to the team that someone needs to give this attention.
+ 4. Do *not* assign a version label. Someone from Elastic staff will assign a version label, if necessary, when your Pull Request is ready to be merged.
+ 5. If you would like someone specific to review your pull request, assign them. Otherwise an Elastic staff member will assign the appropriate person.
+
+Always submit your pull against master unless the bug is only present in an older version. If the bug affects both master and another branch say so in your pull.
+
+Then sit back and wait. There will probably be discussion about the Pull Request and, if any changes are needed, we'll work with you to get your Pull Request merged into {kib}.
+
+[float]
+==== What to expect during the pull request review process
+
+Most PRs go through several iterations of feedback and updates. Depending on the scope and complexity of the PR, the process can take weeks. Please
+be patient and understand we hold our code base to a high standard.
+
+Check out our <> for our general philosophy for pull request reviews.
+
diff --git a/docs/developer/contributing/development-tests.asciidoc b/docs/developer/contributing/development-tests.asciidoc
new file mode 100644
index 0000000000000..b470ea61669b2
--- /dev/null
+++ b/docs/developer/contributing/development-tests.asciidoc
@@ -0,0 +1,96 @@
+[[development-tests]]
+=== Testing
+
+To ensure that your changes will not break other functionality, please run the test suite and build (<>) before submitting your Pull Request.
+
+[float]
+==== Running specific {kib} tests
+
+The following table outlines possible test file locations and how to
+invoke them:
+
+[width="100%",cols="7%,59%,34%",options="header",]
+|===
+|Test runner |Test location |Runner command (working directory is {kib}
+root)
+|Jest |`src/**/*.test.js` `src/**/*.test.ts`
+|`yarn test:jest -t regexp [test path]`
+
+|Jest (integration) |`**/integration_tests/**/*.test.js`
+|`yarn test:jest_integration -t regexp [test path]`
+
+|Mocha
+|`src/**/__tests__/**/*.js` `!src/**/public/__tests__/*.js``packages/kbn-datemath/test/**/*.js` `packages/kbn-dev-utils/src/**/__tests__/**/*.js` `tasks/**/__tests__/**/*.js`
+|`node scripts/mocha --grep=regexp [test path]`
+
+|Functional
+|`test/*integration/**/config.js` `test/*functional/**/config.js` `test/accessibility/config.js`
+|`yarn test:ftr:server --config test/[directory]/config.js``yarn test:ftr:runner --config test/[directory]/config.js --grep=regexp`
+
+|Karma |`src/**/public/__tests__/*.js` |`yarn test:karma:debug`
+|===
+
+For X-Pack tests located in `x-pack/` see
+link:{kib-repo}tree/{branch}/x-pack/README.md#testing[X-Pack Testing]
+
+Test runner arguments: - Where applicable, the optional arguments
+`-t=regexp` or `--grep=regexp` will only run tests or test suites
+whose descriptions matches the regular expression. - `[test path]` is
+the relative path to the test file.
+
+Examples: - Run the entire elasticsearch_service test suite:
+`yarn test:jest src/core/server/elasticsearch/elasticsearch_service.test.ts`
+- Run the jest test case whose description matches
+`stops both admin and data clients`:
+`yarn test:jest -t 'stops both admin and data clients' src/core/server/elasticsearch/elasticsearch_service.test.ts`
+- Run the api integration test case whose description matches the given
+string: ``` yarn test:ftr:server –config test/api_integration/config.js
+yarn test:ftr:runner –config test/api_integration/config
+
+[float]
+==== Cross-browser compatibility
+
+**Testing IE on OS X**
+
+* http://www.vmware.com/products/fusion/fusion-evaluation.html[Download
+VMWare Fusion].
+* https://developer.microsoft.com/en-us/microsoft-edge/tools/vms/#downloads[Download
+IE virtual machines] for VMWare.
+* Open VMWare and go to Window > Virtual Machine Library. Unzip the
+virtual machine and drag the .vmx file into your Virtual Machine
+Library.
+* Right-click on the virtual machine you just added to your library and
+select "`Snapshots…`", and then click the "`Take`" button in the modal
+that opens. You can roll back to this snapshot when the VM expires in 90
+days.
+* In System Preferences > Sharing, change your computer name to be
+something simple, e.g. "`computer`".
+* Run {kib} with `yarn start --host=computer.local` (substituting
+your computer name).
+* Now you can run your VM, open the browser, and navigate to
+`http://computer.local:5601` to test {kib}.
+* Alternatively you can use browserstack
+
+[float]
+==== Running browser automation tests
+
+Check out <> to learn more about how you can run
+and develop functional tests for {kib} core and plugins.
+
+You can also look into the {kib-repo}tree/{branch}/scripts/README.md[Scripts README.md]
+to learn more about using the node scripts we provide for building
+{kib}, running integration tests, and starting up {kib} and
+Elasticsearch while you develop.
+
+[float]
+==== More testing information:
+
+* <>
+* <>
+* <>
+
+include::development-functional-tests.asciidoc[]
+
+include::development-unit-tests.asciidoc[]
+
+include::development-accessibility-tests.asciidoc[]
\ No newline at end of file
diff --git a/docs/developer/core/development-unit-tests.asciidoc b/docs/developer/contributing/development-unit-tests.asciidoc
similarity index 52%
rename from docs/developer/core/development-unit-tests.asciidoc
rename to docs/developer/contributing/development-unit-tests.asciidoc
index 04cce0dfec901..0009533c9a7c4 100644
--- a/docs/developer/core/development-unit-tests.asciidoc
+++ b/docs/developer/contributing/development-unit-tests.asciidoc
@@ -1,15 +1,11 @@
[[development-unit-tests]]
-=== Unit Testing
+==== Unit testing frameworks
-We use unit tests to make sure that individual software units of {kib} perform as they were designed to.
+{kib} is migrating unit testing from `Mocha` to `Jest`. Legacy unit tests
+still exist in Mocha but all new unit tests should be written in Jest.
[float]
-=== Current Frameworks
-
-{kib} is migrating unit testing from `Mocha` to `Jest`. Legacy unit tests still exist in `Mocha` but all new unit tests should be written in `Jest`.
-
-[float]
-==== Mocha (legacy)
+===== Mocha (legacy)
Mocha tests are contained in `__tests__` directories.
@@ -32,7 +28,7 @@ yarn test:jest
-----------
[float]
-===== Writing Jest Unit Tests
+====== Writing Jest Unit Tests
In order to write those tests there are two main things you need to be aware of.
The first one is the different between `jest.mock` and `jest.doMock`
@@ -42,7 +38,7 @@ specially for the tests implemented on Typescript in order to benefit from the
auto-inference types feature.
[float]
-===== Jest.mock vs Jest.doMock
+====== Jest.mock vs Jest.doMock
Both methods are essentially the same on their roots however the `jest.mock`
calls will get hoisted to the top of the file and can only reference variables
@@ -52,7 +48,7 @@ variables are instantiated at the time we need them which lead us to the next
section where we'll talk about our jest mock files pattern.
[float]
-===== Jest Mock Files Pattern
+====== Jest Mock Files Pattern
Specially on typescript it is pretty common to have in unit tests
`jest.doMock` calls which reference for example imported types. Any error
@@ -79,5 +75,71 @@ like: `import * as Mocks from './mymodule.test.mocks'`,
`import { mockX } from './mymodule.test.mocks'`
or just `import './mymodule.test.mocks'` if there isn't anything
exported to be used.
-
+[float]
+[[debugging-unit-tests]]
+===== Debugging Unit Tests
+
+The standard `yarn test` task runs several sub tasks and can take
+several minutes to complete, making debugging failures pretty painful.
+In order to ease the pain specialized tasks provide alternate methods
+for running the tests.
+
+You could also add the `--debug` option so that `node` is run using
+the `--debug-brk` flag. You’ll need to connect a remote debugger such
+as https://github.com/node-inspector/node-inspector[`node-inspector`]
+to proceed in this mode.
+
+[source,bash]
+----
+node scripts/mocha --debug
+----
+
+With `yarn test:karma`, you can run only the browser tests. Coverage
+reports are available for browser tests by running
+`yarn test:coverage`. You can find the results under the `coverage/`
+directory that will be created upon completion.
+
+[source,bash]
+----
+yarn test:karma
+----
+
+Using `yarn test:karma:debug` initializes an environment for debugging
+the browser tests. Includes an dedicated instance of the {kib} server
+for building the test bundle, and a karma server. When running this task
+the build is optimized for the first time and then a karma-owned
+instance of the browser is opened. Click the "`debug`" button to open a
+new tab that executes the unit tests.
+
+[source,bash]
+----
+yarn test:karma:debug
+----
+
+In the screenshot below, you’ll notice the URL is
+`localhost:9876/debug.html`. You can append a `grep` query parameter
+to this URL and set it to a string value which will be used to exclude
+tests which don’t match. For example, if you changed the URL to
+`localhost:9876/debug.html?query=my test` and then refreshed the
+browser, you’d only see tests run which contain "`my test`" in the test
+description.
+
+image:http://i.imgur.com/DwHxgfq.png[Browser test debugging]
+
+[float]
+===== Unit Testing Plugins
+
+This should work super if you’re using the
+https://github.com/elastic/kibana/tree/master/packages/kbn-plugin-generator[Kibana
+plugin generator]. If you’re not using the generator, well, you’re on
+your own. We suggest you look at how the generator works.
+
+To run the tests for just your particular plugin run the following
+command from your plugin:
+
+[source,bash]
+----
+yarn test:mocha
+yarn test:karma:debug # remove the debug flag to run them once and close
+----
\ No newline at end of file
diff --git a/docs/developer/contributing/index.asciidoc b/docs/developer/contributing/index.asciidoc
new file mode 100644
index 0000000000000..4f987f31cf1f6
--- /dev/null
+++ b/docs/developer/contributing/index.asciidoc
@@ -0,0 +1,89 @@
+[[contributing]]
+== Contributing
+
+Whether you want to fix a bug, implement a feature, or add some other improvements or apis, the following sections will
+guide you on the process.
+
+Read <> to get your environment up and running, then read <>.
+
+* <>
+* <>
+* <>
+* <>
+* <>
+* <>
+* <>
+* <>
+* <>
+* <>
+
+[discrete]
+[[signing-contributor-agreement]]
+=== Signing the contributor license agreement
+
+Please make sure you have signed the [Contributor License Agreement](http://www.elastic.co/contributor-agreement/). We are not asking you to assign copyright to us, but to give us the right to distribute your code without restriction. We ask this of all contributors in order to assure our users of the origin and continuing existence of the code. You only need to sign the CLA once.
+
+[float]
+[[kibana-localization]]
+=== Localization
+
+Read <> for details on our localization practices.
+
+Note that we cannot support accepting contributions to the translations from any source other than the translators we have engaged to do the work.
+We are still to develop a proper process to accept any contributed translations. We certainly appreciate that people care enough about the localization effort to want to help improve the quality. We aim to build out a more comprehensive localization process for the future and will notify you once contributions can be supported, but for the time being, we are not able to incorporate suggestions.
+
+[float]
+[[kibana-release-notes-process]]
+=== Release Notes Process
+
+Part of this process only applies to maintainers, since it requires
+access to GitHub labels.
+
+{kib} publishes https://www.elastic.co/guide/en/kibana/current/release-notes.html[Release Notes] for major and minor releases.
+The Release Notes summarize what the PRs accomplish in language that is meaningful to users.
+ To generate the Release Notes, the team runs a script against this repo to collect the merged PRs against the release.
+
+[float]
+==== Create the Release Notes text
+
+The text that appears in the Release Notes is pulled directly from your PR title, or a single paragraph of text that you specify in the PR description.
+
+To use a single paragraph of text, enter `Release note:` or a `## Release note` header in the PR description, followed by your text. For example, refer to this https://github.com/elastic/kibana/pull/65796[PR] that uses the `## Release note` header.
+
+When you create the Release Notes text, use the following best practices:
+
+* Use present tense.
+* Use sentence case.
+* When you create a feature PR, start with `Adds`.
+* When you create an enhancement PR, start with `Improves`.
+* When you create a bug fix PR, start with `Fixes`.
+* When you create a deprecation PR, start with `Deprecates`.
+
+[float]
+==== Add your labels
+
+[arabic]
+. Label the PR with the targeted version (ex: `v7.3.0`).
+. Label the PR with the appropriate GitHub labels:
+ * For a new feature or functionality, use `release_note:enhancement`.
+ * For an external-facing fix, use `release_note:fix`. We do not include docs, build, and test fixes in the Release Notes, or unreleased issues that are only on `master`.
+ * For a deprecated feature, use `release_note:deprecation`.
+ * For a breaking change, use `release_note:breaking`.
+ * To **NOT** include your changes in the Release Notes, use `release_note:skip`.
+
+
+include::development-github.asciidoc[]
+
+include::development-tests.asciidoc[]
+
+include::interpreting-ci-failures.asciidoc[]
+
+include::development-documentation.asciidoc[]
+
+include::development-pull-request.asciidoc[]
+
+include::kibana-issue-reporting.asciidoc[]
+
+include::pr-review.asciidoc[]
+
+include::linting.asciidoc[]
diff --git a/docs/developer/testing/interpreting-ci-failures.asciidoc b/docs/developer/contributing/interpreting-ci-failures.asciidoc
similarity index 87%
rename from docs/developer/testing/interpreting-ci-failures.asciidoc
rename to docs/developer/contributing/interpreting-ci-failures.asciidoc
index c47a59217d89b..ba3999a310198 100644
--- a/docs/developer/testing/interpreting-ci-failures.asciidoc
+++ b/docs/developer/contributing/interpreting-ci-failures.asciidoc
@@ -1,19 +1,19 @@
[[interpreting-ci-failures]]
-== Interpreting CI Failures
+=== Interpreting CI Failures
-Kibana CI uses a Jenkins feature called "Pipelines" to automate testing of the code in pull requests and on tracked branches. Pipelines are defined within the repository via the `Jenkinsfile` at the root of the project.
+{kib} CI uses a Jenkins feature called "Pipelines" to automate testing of the code in pull requests and on tracked branches. Pipelines are defined within the repository via the `Jenkinsfile` at the root of the project.
More information about Jenkins Pipelines can be found link:https://jenkins.io/doc/book/pipeline/[in the Jenkins book].
[float]
-=== Github Checks
+==== Github Checks
When a test fails it will be reported to Github via Github Checks. We currently bucket tests into several categories which run in parallel to make CI faster. Groups like `ciGroup{X}` get a single check in Github, and other tests like linting, or type checks, get their own checks.
Clicking the link next to the check in the conversation tab of a pull request will take you to the log output from that section of the tests. If that log output is truncated, or doesn't clearly identify what happened, you can usually get more complete information by visiting Jenkins directly.
[float]
-=== Viewing Job Executions in Jenkins
+==== Viewing Job Executions in Jenkins
To view the results of a job execution in Jenkins, either click the link in the comment left by `@elasticmachine` or search for the `kibana-ci` check in the list at the bottom of the PR. This link will take you to the top-level page for the specific job execution that failed.
@@ -25,7 +25,7 @@ image::images/job_view.png[]
4. *Pipeline Steps:*: A breakdown of the pipline that was executed, along with individual log output for each step in the pipeline.
[float]
-=== Viewing ciGroup/test Logs
+==== Viewing ciGroup/test Logs
To view the logs for a failed specific ciGroup, jest, mocha, type checkers, linters, etc., click on the *Pipeline Steps* link in from the Job page.
diff --git a/docs/developer/contributing/kibana-issue-reporting.asciidoc b/docs/developer/contributing/kibana-issue-reporting.asciidoc
new file mode 100644
index 0000000000000..36c50b612d675
--- /dev/null
+++ b/docs/developer/contributing/kibana-issue-reporting.asciidoc
@@ -0,0 +1,46 @@
+[[kibana-issue-reporting]]
+=== Effective issue reporting in {kib}
+
+[float]
+==== Voicing the importance of an issue
+
+We seriously appreciate thoughtful comments. If an issue is important to
+you, add a comment with a solid write up of your use case and explain
+why it’s so important. Please avoid posting comments comprised solely of
+a thumbs up emoji 👍.
+
+Granted that you share your thoughts, we might even be able to come up
+with creative solutions to your specific problem. If everything you’d
+like to say has already been brought up but you’d still like to add a
+token of support, feel free to add a
+https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comments[👍
+thumbs up reaction] on the issue itself and on the comment which best
+summarizes your thoughts.
+
+[float]
+==== "`My issue isn’t getting enough attention`"
+
+First of all, *sorry about that!* We want you to have a great time with
+{kib}.
+
+There’s hundreds of open issues and prioritizing what to work on is an
+important aspect of our daily jobs. We prioritize issues according to
+impact and difficulty, so some issues can be neglected while we work on
+more pressing issues.
+
+Feel free to bump your issues if you think they’ve been neglected for a
+prolonged period.
+
+[float]
+==== "`I want to help!`"
+
+*Now we’re talking*. If you have a bug fix or new feature that you would
+like to contribute to {kib}, please *find or open an issue about it
+before you start working on it.* Talk about what you would like to do.
+It may be that somebody is already working on it, or that there are
+particular issues that you should know about before implementing the
+change.
+
+We enjoy working with contributors to get their code accepted. There are
+many approaches to fixing a problem and it is important to find the best
+approach before writing too much code.
\ No newline at end of file
diff --git a/docs/developer/contributing/linting.asciidoc b/docs/developer/contributing/linting.asciidoc
new file mode 100644
index 0000000000000..234bd90478907
--- /dev/null
+++ b/docs/developer/contributing/linting.asciidoc
@@ -0,0 +1,70 @@
+[[kibana-linting]]
+=== Linting
+
+A note about linting: We use http://eslint.org[eslint] to check that the
+link:STYLEGUIDE.md[styleguide] is being followed. It runs in a
+pre-commit hook and as a part of the tests, but most contributors
+integrate it with their code editors for real-time feedback.
+
+Here are some hints for getting eslint setup in your favorite editor:
+
+[width="100%",cols="13%,87%",options="header",]
+|===
+|Editor |Plugin
+|Sublime
+|https://github.com/roadhump/SublimeLinter-eslint#installation[SublimeLinter-eslint]
+
+|Atom
+|https://github.com/AtomLinter/linter-eslint#installation[linter-eslint]
+
+|VSCode
+|https://marketplace.visualstudio.com/items?itemName=dbaeumer.vscode-eslint[ESLint]
+
+|IntelliJ |Settings » Languages & Frameworks » JavaScript » Code Quality
+Tools » ESLint
+
+|`vi` |https://github.com/scrooloose/syntastic[scrooloose/syntastic]
+|===
+
+Another tool we use for enforcing consistent coding style is
+EditorConfig, which can be set up by installing a plugin in your editor
+that dynamically updates its configuration. Take a look at the
+http://editorconfig.org/#download[EditorConfig] site to find a plugin
+for your editor, and browse our
+https://github.com/elastic/kibana/blob/master/.editorconfig[`.editorconfig`]
+file to see what config rules we set up.
+
+[float]
+==== Setup Guide for VS Code Users
+
+Note that for VSCode, to enable "`live`" linting of TypeScript (and
+other) file types, you will need to modify your local settings, as shown
+below. The default for the ESLint extension is to only lint JavaScript
+file types.
+
+[source,json]
+----
+"eslint.validate": [
+ "javascript",
+ "javascriptreact",
+ { "language": "typescript", "autoFix": true },
+ { "language": "typescriptreact", "autoFix": true }
+]
+----
+
+`eslint` can automatically fix trivial lint errors when you save a
+file by adding this line in your setting.
+
+[source,json]
+----
+ "eslint.autoFixOnSave": true,
+----
+
+:warning: It is *not* recommended to use the
+https://prettier.io/[`Prettier` extension/IDE plugin] while
+maintaining the {kib} project. Formatting and styling roles are set in
+the multiple `.eslintrc.js` files across the project and some of them
+use the https://www.npmjs.com/package/prettier[NPM version of Prettier].
+Using the IDE extension might cause conflicts, applying the formatting
+to too many files that shouldn’t be prettier-ized and/or highlighting
+errors that are actually OK.
\ No newline at end of file
diff --git a/docs/developer/pr-review.asciidoc b/docs/developer/contributing/pr-review.asciidoc
similarity index 90%
rename from docs/developer/pr-review.asciidoc
rename to docs/developer/contributing/pr-review.asciidoc
index 304718e437dc5..ebab3b24aaaee 100644
--- a/docs/developer/pr-review.asciidoc
+++ b/docs/developer/contributing/pr-review.asciidoc
@@ -1,7 +1,7 @@
[[pr-review]]
-== Pull request review guidelines
+=== Pull request review guidelines
-Every change made to Kibana must be held to a high standard, and while the responsibility for quality in a pull request ultimately lies with the author, Kibana team members have the responsibility as reviewers to verify during their review process.
+Every change made to {kib} must be held to a high standard, and while the responsibility for quality in a pull request ultimately lies with the author, {kib} team members have the responsibility as reviewers to verify during their review process.
Frankly, it's impossible to build a concrete list of requirements that encompass all of the possible situations we'll encounter when reviewing pull requests, so instead this document tries to lay out a common set of the few obvious requirements while also outlining a general philosophy that we should have when approaching any PR review.
@@ -11,15 +11,15 @@ While the review process is always done by Elastic staff members, these guidelin
[float]
-=== Target audience
+==== Target audience
-The target audience for this document are pull request reviewers. For Kibana maintainers, the PR review is the only part of the contributing process in which we have complete control. The author of any given pull request may not be up to speed on the latest expectations we have for pull requests, and they may have never read our guidelines at all. It's our responsibility as reviewers to guide folks through this process, but it's hard to do that consistently without a common set of documented principles.
+The target audience for this document are pull request reviewers. For {kib} maintainers, the PR review is the only part of the contributing process in which we have complete control. The author of any given pull request may not be up to speed on the latest expectations we have for pull requests, and they may have never read our guidelines at all. It's our responsibility as reviewers to guide folks through this process, but it's hard to do that consistently without a common set of documented principles.
Pull request authors can benefit from reading this document as well because it'll help establish a common set of expectations between authors and reviewers early.
[float]
-=== Reject fast
+==== Reject fast
Every pull request is different, and before reviewing any given PR, reviewers should consider the optimal way to approach the PR review so that if the change is ultimately rejected, it is done so as early in the process as possible.
@@ -27,7 +27,7 @@ For example, a reviewer may want to do a product level review as early as possib
[float]
-=== The big three
+==== The big three
There are a lot of discrete requirements and guidelines we want to follow in all of our pull requests, but three things in particular stand out as important above all the rest.
@@ -58,24 +58,24 @@ This isn't simply a question of enough test files. The code in the tests themsel
All of our code should have unit tests that verify its behaviors, including not only the "happy path", but also edge cases, error handling, etc. When you change an existing API of a module, then there should always be at least one failing unit test, which in turn means we need to verify that all code consuming that API properly handles the change if necessary. For modules at a high enough level, this will mean we have breaking change in the product, which we'll need to handle accordingly.
-In addition to extensive unit test coverage, PRs should include relevant functional and integration tests. In some cases, we may simply be testing a programmatic interface (e.g. a service) that is integrating with the file system, the network, Elasticsearch, etc. In other cases, we'll be testing REST APIs over HTTP or comparing screenshots/snapshots with prior known acceptable state. In the worst case, we are doing browser-based functional testing on a running instance of Kibana using selenium.
+In addition to extensive unit test coverage, PRs should include relevant functional and integration tests. In some cases, we may simply be testing a programmatic interface (e.g. a service) that is integrating with the file system, the network, Elasticsearch, etc. In other cases, we'll be testing REST APIs over HTTP or comparing screenshots/snapshots with prior known acceptable state. In the worst case, we are doing browser-based functional testing on a running instance of {kib} using selenium.
Enhancements are pretty much always going to have extensive unit tests as a base as well as functional and integration testing. Bug fixes should always include regression tests to ensure that same bug does not manifest again in the future.
--
[float]
-=== Product level review
+==== Product level review
Reviewers are not simply evaluating the code itself, they are also evaluating the quality of the user-facing change in the product. This generally means they need to check out the branch locally and "play around" with it. In addition to the "do we want this change in the product" details, the reviewer should be looking for bugs and evaluating how approachable and useful the feature is as implemented. Special attention should be given to error scenarios and edge cases to ensure they are all handled well within the product.
[float]
-=== Consistency, style, readability
+==== Consistency, style, readability
Having a relatively consistent codebase is an important part of us building a sustainable project. With dozens of active contributors at any given time, we rely on automation to help ensure consistency - we enforce a comprehensive set of linting rules through CI. We're also rolling out prettier to make this even more automatic.
-For things that can't be easily automated, we maintain a link:https://github.com/elastic/kibana/blob/master/STYLEGUIDE.md[style guide] that authors should adhere to and reviewers should keep in mind when they review a pull request.
+For things that can't be easily automated, we maintain a link:{kib-repo}tree/{branch}/STYLEGUIDE.md[style guide] that authors should adhere to and reviewers should keep in mind when they review a pull request.
Beyond that, we're into subjective territory. Statements like "this isn't very readable" are hardly helpful since they can't be qualified, but that doesn't mean a reviewer should outright ignore code that is hard to understand due to how it is written. There isn't one definitively "best" way to write any particular code, so pursuing such shouldn't be our goal. Instead, reviewers and authors alike must accept that there are likely many different appropriate ways to accomplish the same thing with code, and so long as the contribution is utilizing one of those ways, then we're in good shape.
@@ -87,7 +87,7 @@ There may also be times when a person is inspired by a particular contribution t
[float]
-=== Nitpicking
+==== Nitpicking
Nitpicking is when a reviewer identifies trivial and unimportant details in a pull request and asks the author to change them. This is a completely subjective category that is impossible to define universally, and it's equally impractical to define a blanket policy on nitpicking that everyone will be happy with.
@@ -97,13 +97,13 @@ Often, reviewers have an opinion about whether the feedback they are about to gi
[float]
-=== Handling disagreements
+==== Handling disagreements
Conflicting opinions between reviewers and authors happen, and sometimes it is hard to reconcile those opinions. Ideally folks can work together in the spirit of these guidelines toward a consensus, but if that doesn't work out it may be best to bring a third person into the discussion. Our pull requests generally have two reviewers, so an appropriate third person may already be obvious. Otherwise, reach out to the functional area that is most appropriate or to technical leadership if an area isn't obvious.
[float]
-=== Inappropriate review feedback
+==== Inappropriate review feedback
Whether or not a bit of feedback is appropriate for a pull request is often dependent on the motivation for giving the feedback in the first place.
@@ -113,7 +113,7 @@ Inflammatory feedback such as "this is crap" isn't feedback at all. It's both me
[float]
-=== A checklist
+==== A checklist
Establishing a comprehensive checklist for all of the things that should happen in all possible pull requests is impractical, but that doesn't mean we lack a concrete set of minimum requirements that we can enumerate. The following items should be double checked for any pull request:
diff --git a/docs/developer/core-development.asciidoc b/docs/developer/core-development.asciidoc
deleted file mode 100644
index 8f356abd095f2..0000000000000
--- a/docs/developer/core-development.asciidoc
+++ /dev/null
@@ -1,24 +0,0 @@
-[[core-development]]
-== Core Development
-
-* <>
-* <>
-* <>
-* <>
-* <>
-* <>
-* <>
-
-include::core/development-basepath.asciidoc[]
-
-include::core/development-dependencies.asciidoc[]
-
-include::core/development-modules.asciidoc[]
-
-include::core/development-elasticsearch.asciidoc[]
-
-include::core/development-unit-tests.asciidoc[]
-
-include::core/development-functional-tests.asciidoc[]
-
-include::core/development-es-snapshots.asciidoc[]
diff --git a/docs/developer/core/development-basepath.asciidoc b/docs/developer/core/development-basepath.asciidoc
deleted file mode 100644
index d49dfe2938fad..0000000000000
--- a/docs/developer/core/development-basepath.asciidoc
+++ /dev/null
@@ -1,85 +0,0 @@
-[[development-basepath]]
-=== Considerations for basePath
-
-All communication from the Kibana UI to the server needs to respect the
-`server.basePath`. Here are the "blessed" strategies for dealing with this
-based on the context:
-
-[float]
-==== Getting a static asset url
-
-Use webpack to import the asset into the build. This will give you a URL in
-JavaScript and gives webpack a chance to perform optimizations and
-cache-busting.
-
-["source","shell"]
------------
-// in plugin/public/main.js
-import uiChrome from 'ui/chrome';
-import logoUrl from 'plugins/facechimp/assets/banner.png';
-
-uiChrome.setBrand({
- logo: `url(${logoUrl}) center no-repeat`
-});
------------
-
-[float]
-==== API requests from the front-end
-
-Use `chrome.addBasePath()` to append the basePath to the front of the url.
-
-["source","shell"]
------------
-import chrome from 'ui/chrome';
-$http.get(chrome.addBasePath('/api/plugin/things'));
------------
-
-[float]
-==== Server side
-
-Append `request.getBasePath()` to any absolute URL path.
-
-["source","shell"]
------------
-const basePath = server.config().get('server.basePath');
-server.route({
- path: '/redirect',
- handler(request, h) {
- return h.redirect(`${request.getBasePath()}/otherLocation`);
- }
-});
------------
-
-[float]
-==== BasePathProxy in dev mode
-
-The Kibana dev server automatically runs behind a proxy with a random
-`server.basePath`. This way developers will be constantly verifying that their
-code works with basePath, while they write it.
-
-To accomplish this the `serve` task does a few things:
-
-1. change the port for the server to the `dev.basePathProxyTarget` setting (default `5603`)
-2. start a `BasePathProxy` at `server.port`
- - picks a random 3-letter value for `randomBasePath`
- - redirects from `/` to `/{randomBasePath}`
- - redirects from `/{any}/app/{appName}` to `/{randomBasePath}/app/{appName}` so that refreshes should work
- - proxies all requests starting with `/{randomBasePath}/` to the Kibana server
-
-If you're writing scripts that interact with the Kibana API, the base path proxy will likely
-make this difficult. To bypass the base path proxy for a single request, prefix urls with
-`__UNSAFE_bypassBasePath` and the request will be routed to the development Kibana server.
-
-["source","shell"]
------------
-curl "http://elastic:changeme@localhost:5601/__UNSAFE_bypassBasePath/api/status"
------------
-
-This proxy can sometimes have unintended side effects in development, so when
-needed you can opt out by passing the `--no-base-path` flag to the `serve` task
-or `yarn start`.
-
-["source","shell"]
------------
-yarn start --no-base-path
------------
diff --git a/docs/developer/core/development-dependencies.asciidoc b/docs/developer/core/development-dependencies.asciidoc
deleted file mode 100644
index 285d338a23a0d..0000000000000
--- a/docs/developer/core/development-dependencies.asciidoc
+++ /dev/null
@@ -1,103 +0,0 @@
-[[development-dependencies]]
-=== Managing Dependencies
-
-While developing plugins for use in the Kibana front-end environment you will
-probably want to include a library or two (at least). While that should be
-simple to do 90% of the time, there are always outliers, and some of those
-outliers are very popular projects.
-
-Before you can use an external library with Kibana you have to install it. You
-do that using...
-
-[float]
-==== yarn (preferred method)
-
-Once you've http://npmsearch.com[found] a dependency you want to add, you can
-install it like so:
-
-["source","shell"]
------------
-yarn add some-neat-library
------------
-
-At the top of a javascript file, just import the library using it's name:
-
-["source","shell"]
------------
-import someNeatLibrary from 'some-neat-library';
------------
-
-Just like working in node.js, front-end code can require node modules installed
-by yarn without any additional configuration.
-
-[float]
-==== webpackShims
-
-When a library you want to use does use es6 or common.js modules but is not
-available with yarn, you can copy the source of the library into a webpackShim.
-
-["source","shell"]
------------
-# create a directory for our new library to live
-mkdir -p webpackShims/some-neat-library
-# download the library you want to use into that directory
-curl https://cdnjs.com/some-neat-library/library.js > webpackShims/some-neat-library/index.js
------------
-
-Then include the library in your JavaScript code as you normally would:
-
-["source","shell"]
------------
-import someNeatLibrary from 'some-neat-library';
------------
-
-[float]
-==== Shimming third party code
-
-Some JavaScript libraries do not declare their dependencies in a way that tools
-like webpack can understand. It is also often the case that libraries do not
-`export` their provided values, but simply write them to a global variable name
-(or something to that effect).
-
-When pulling code like this into Kibana we need to write "shims" that will
-adapt the third party code to work with our application, other libraries, and
-module system. To do this we can utilize the `webpackShims` directory.
-
-The easiest way to explain how to write a shim is to show you some. Here is our
-webpack shim for jQuery:
-
-["source","shell"]
------------
-// webpackShims/jquery.js
-
-module.exports = window.jQuery = window.$ = require('../node_modules/jquery/dist/jquery');
-require('ui/jquery/findTestSubject')(window.$);
------------
-
-This shim is loaded up anytime an `import 'jquery';` statement is found by
-webpack, because of the way that `webpackShims` behaves like `node_modules`.
-When that happens, the shim does two things:
-
-. Assign the exported value of the actual jQuery module to the window at `$` and `jQuery`, allowing libraries like angular to detect that jQuery is available, and use it as the module's export value.
-. Finally, a jQuery plugin that we wrote is included so that every time a file imports jQuery it will get both jQuery and the `$.findTestSubject` helper function.
-
-Here is what our webpack shim for angular looks like:
-
-["source","shell"]
------------
-// webpackShims/angular.js
-
-require('jquery');
-require('../node_modules/angular/angular');
-require('../node_modules/angular-elastic/elastic');
-require('ui/modules').get('kibana', ['monospaced.elastic']);
-module.exports = window.angular;
------------
-
-What this shim does is fairly simple if you go line by line:
-
-. makes sure that jQuery is loaded before angular (which actually runs the shim)
-. load the angular.js file from the node_modules directory
-. load the angular-elastic plugin, a plugin we want to always be included whenever we import angular
-. use the `ui/modules` module to add the module exported by angular-elastic as a dependency to the `kibana` angular module
-. finally, export the window.angular variable. This means that writing `import angular from 'angular';` will properly set the angular variable to the angular library, rather than undefined which is the default behavior.
diff --git a/docs/developer/core/development-elasticsearch.asciidoc b/docs/developer/core/development-elasticsearch.asciidoc
deleted file mode 100644
index 89f85cfc19fbf..0000000000000
--- a/docs/developer/core/development-elasticsearch.asciidoc
+++ /dev/null
@@ -1,40 +0,0 @@
-[[development-elasticsearch]]
-=== Communicating with Elasticsearch
-
-Kibana exposes two clients on the server and browser for communicating with elasticsearch.
-There is an 'admin' client which is used for managing Kibana's state, and a 'data' client for all
-other requests. The clients use the {jsclient-current}/index.html[elasticsearch.js library].
-
-[float]
-[[client-server]]
-=== Server clients
-
-Server clients are exposed through the elasticsearch plugin.
-[source,javascript]
-----
- const adminCluster = server.plugins.elasticsearch.getCluster('admin');
- const dataCluster = server.plugins.elasticsearch.getCluster('data');
-
- //ping as the configured elasticsearch.user in kibana.yml
- adminCluster.callWithInternalUser('ping');
-
- //ping as the user specified in the current requests header
- adminCluster.callWithRequest(req, 'ping');
-----
-
-[float]
-[[client-browser]]
-=== Browser clients
-
-Browser clients are exposed through AngularJS services.
-
-[source,javascript]
-----
-uiModules.get('kibana')
-.run(function (es) {
- es.ping()
- .catch(err => {
- console.log('error pinging servers');
- });
-});
-----
diff --git a/docs/developer/core/development-modules.asciidoc b/docs/developer/core/development-modules.asciidoc
deleted file mode 100644
index cc5cd69ed8cb9..0000000000000
--- a/docs/developer/core/development-modules.asciidoc
+++ /dev/null
@@ -1,63 +0,0 @@
-[[development-modules]]
-=== Modules and Autoloading
-
-[float]
-==== Autoloading
-
-Because of the disconnect between JS modules and angular directives, filters,
-and services it is difficult to know what you need to import. It is even more
-difficult to know if you broke something by removing an import that looked
-unused.
-
-To prevent this from being an issue the ui module provides "autoloading"
-modules. The sole purpose of these modules is to extend the environment with
-certain components. Here is a breakdown of those modules:
-
-- *`import 'ui/autoload/modules'`*
- Imports angular and several ui services and "components" which Kibana
- depends on without importing. The full list of imports is hard coded in the
- module. Hopefully this list will shrink over time as we properly map out
- the required modules and import them were they are actually necessary.
-
-- *`import 'ui/autoload/all'`*
- Imports all of the modules
-
-[float]
-==== Resolving Require Paths
-
-Kibana uses Webpack to bundle Kibana's dependencies.
-
-Here is how import/require statements are resolved to a file:
-
-. Check the beginning of the module path
- * if the path starts with a '.'
- ** append it the directory of the current file
- ** proceed to *3*
- * if the path starts with a '/'
- ** search for this exact path
- ** proceed to *3*
- * proceed to *2*
-. Search for a named module
- * `moduleName` = dirname(require path)`
- * match if `moduleName` is or starts with one of these aliases
- ** replace the alias with the match and continue to ***3***
- * match when any of these conditions are met:
- ** `./webpackShims/${moduleName}` is a directory
- ** `./node_modules/${moduleName}` is a directory
- * if no match was found
- ** move to the parent directory
- ** start again at *2.iii* until reaching the root directory or a match is found
- * if a match was found
- ** replace the `moduleName` prefix from the require statement with the full path of the match and proceed to *3*
-. Search for a file
- * the first of the following paths that resolves to a **file** is our match
- ** path + '.js'
- ** path + '.json'
- ** path
- ** path/${basename(path)} + '.js'
- ** path/${basename(path)} + '.json'
- ** path/${basename(path)}
- ** path/index + '.js'
- ** path/index + '.json'
- ** path/index
- * if none of the paths matches then an error is thrown
diff --git a/docs/developer/getting-started/building-kibana.asciidoc b/docs/developer/getting-started/building-kibana.asciidoc
new file mode 100644
index 0000000000000..e1f1ca336a5da
--- /dev/null
+++ b/docs/developer/getting-started/building-kibana.asciidoc
@@ -0,0 +1,39 @@
+[[building-kibana]]
+=== Building a {kib} distributable
+
+The following commands will build a {kib} production distributable.
+
+[source,bash]
+----
+yarn build --skip-os-packages
+----
+
+You can get all build options using the following command:
+
+[source,bash]
+----
+yarn build --help
+----
+
+[float]
+==== Building OS packages
+
+Packages are built using fpm, dpkg, and rpm. Package building has only been tested on Linux and is not supported on any other platform.
+
+
+[source,bash]
+----
+apt-get install ruby-dev rpm
+gem install fpm -v 1.5.0
+yarn build --skip-archives
+----
+
+To specify a package to build you can add `rpm` or `deb` as an argument.
+
+
+[source,bash]
+----
+yarn build --rpm
+----
+
+Distributable packages can be found in `target/` after the build completes.
\ No newline at end of file
diff --git a/docs/developer/getting-started/debugging.asciidoc b/docs/developer/getting-started/debugging.asciidoc
new file mode 100644
index 0000000000000..b369dcda748af
--- /dev/null
+++ b/docs/developer/getting-started/debugging.asciidoc
@@ -0,0 +1,59 @@
+[[kibana-debugging]]
+=== Debugging {kib}
+
+For information about how to debug unit tests, refer to <>.
+
+[float]
+==== Server Code
+
+`yarn debug` will start the server with Node's inspect flag. {kib}'s development mode will start three processes on ports `9229`, `9230`, and `9231`. Chrome's developer tools need to be configured to connect to all three connections. Add `localhost:` for each {kib} process in Chrome's developer tools connection tab.
+
+[float]
+==== Instrumenting with Elastic APM
+
+{kib} ships with the
+https://github.com/elastic/apm-agent-nodejs[Elastic APM Node.js Agent]
+built-in for debugging purposes.
+
+Its default configuration is meant to be used by core {kib} developers
+only, but it can easily be re-configured to your needs. In its default
+configuration it’s disabled and will, once enabled, send APM data to a
+centrally managed Elasticsearch cluster accessible only to Elastic
+employees.
+
+To change the location where data is sent, use the
+https://www.elastic.co/guide/en/apm/agent/nodejs/current/configuration.html#server-url[`serverUrl`]
+APM config option. To activate the APM agent, use the
+https://www.elastic.co/guide/en/apm/agent/nodejs/current/configuration.html#active[`active`]
+APM config option.
+
+All config options can be set either via environment variables, or by
+creating an appropriate config file under `config/apm.dev.js`. For
+more information about configuring the APM agent, please refer to
+https://www.elastic.co/guide/en/apm/agent/nodejs/current/configuring-the-agent.html[the
+documentation].
+
+Example `config/apm.dev.js` file:
+
+[source,js]
+----
+module.exports = {
+ active: true,
+};
+----
+
+APM
+https://www.elastic.co/guide/en/apm/agent/rum-js/current/index.html[Real
+User Monitoring agent] is not available in the {kib} distributables,
+however the agent can be enabled by setting `ELASTIC_APM_ACTIVE` to
+`true`. flags
+
+....
+ELASTIC_APM_ACTIVE=true yarn start
+// activates both Node.js and RUM agent
+....
+
+Once the agent is active, it will trace all incoming HTTP requests to
+{kib}, monitor for errors, and collect process-level metrics. The
+collected data will be sent to the APM Server and is viewable in the APM
+UI in {kib}.
\ No newline at end of file
diff --git a/docs/developer/plugin/development-plugin-resources.asciidoc b/docs/developer/getting-started/development-plugin-resources.asciidoc
similarity index 51%
rename from docs/developer/plugin/development-plugin-resources.asciidoc
rename to docs/developer/getting-started/development-plugin-resources.asciidoc
index 3a32c49e40e0f..dfe8efc4fef57 100644
--- a/docs/developer/plugin/development-plugin-resources.asciidoc
+++ b/docs/developer/getting-started/development-plugin-resources.asciidoc
@@ -5,54 +5,35 @@ Here are some resources that are helpful for getting started with plugin develop
[float]
==== Some light reading
-Our {kib-repo}blob/master/CONTRIBUTING.md[contributing guide] can help you get a development environment going.
+If you haven't already, start with <>. If you are planning to add your plugin to the {kib} repo, read the <> guide, if you are building a plugin externally, read <>. In both cases, read up on our recommended <>.
[float]
-==== Plugin Generator
+==== Creating an empty plugin
-We recommend that you kick-start your plugin by generating it with the {kib-repo}tree/{branch}/packages/kbn-plugin-generator[Kibana Plugin Generator]. Run the following in the Kibana repo, and you will be asked a couple questions, see some progress bars, and have a freshly generated plugin ready for you to play with in Kibana's `plugins` folder.
+You can use the <> to get a basic structure for a new plugin. Plugins that are not part of the
+{kib} repo should be developed inside the `plugins` folder. If you are building a new plugin to check in to the {kib} repo,
+you will choose between a few locations:
-["source","shell"]
------------
-node scripts/generate_plugin my_plugin_name # replace "my_plugin_name" with your desired plugin name
------------
-
-
-[float]
-==== Directory structure for plugins
-
-The Kibana directory must be named `kibana`, and your plugin directory should be located in the root of `kibana` in a `plugins` directory, for example:
-
-["source","shell"]
-----
-.
-└── kibana
- └── plugins
- ├── foo-plugin
- └── bar-plugin
-----
-
-[float]
-==== References in the code
- - {kib-repo}blob/{branch}/src/legacy/server/plugins/lib/plugin.js[Plugin class]: What options does the `kibana.Plugin` class accept?
- - <>: What type of exports are available?
+ - {kib-repo}tree/{branch}/x-pack/plugins[x-pack/plugins] for commercially licensed plugins
+ - {kib-repo}tree/{branch}/src/plugins[src/plugins] for open source licensed plugins
+ - {kib-repo}tree/{branch}/examples[examples] for developer example plugins (these will not be included in the distributables)
[float]
==== Elastic UI Framework
If you're developing a plugin that has a user interface, take a look at our https://elastic.github.io/eui[Elastic UI Framework].
-It documents the CSS and React components we use to build Kibana's user interface.
+It documents the CSS and React components we use to build {kib}'s user interface.
You're welcome to use these components, but be aware that they are rapidly evolving, and we might introduce breaking changes that will disrupt your plugin's UI.
[float]
==== TypeScript Support
-Plugin code can be written in http://www.typescriptlang.org/[TypeScript] if desired.
+We recommend your plugin code is written in http://www.typescriptlang.org/[TypeScript].
To enable TypeScript support, create a `tsconfig.json` file at the root of your plugin that looks something like this:
["source","js"]
-----------
{
- // extend Kibana's tsconfig, or use your own settings
+ // extend {kib}'s tsconfig, or use your own settings
"extends": "../../kibana/tsconfig.json",
// tell the TypeScript compiler where to find your source files
@@ -64,10 +45,17 @@ To enable TypeScript support, create a `tsconfig.json` file at the root of your
-----------
TypeScript code is automatically converted into JavaScript during development,
-but not in the distributable version of Kibana. If you use the
-{kib-repo}blob/{branch}/packages/kbn-plugin-helpers[@kbn/plugin-helpers] to build your plugin, then your `.ts` and `.tsx` files will be permanently transpiled before your plugin is archived. If you have your own build process, make sure to run the TypeScript compiler on your source files and ship the compilation output so that your plugin will work with the distributable version of Kibana.
+but not in the distributable version of {kib}. If you use the
+{kib-repo}blob/{branch}/packages/kbn-plugin-helpers[@kbn/plugin-helpers] to build your plugin, then your `.ts` and `.tsx` files will be permanently transpiled before your plugin is archived. If you have your own build process, make sure to run the TypeScript compiler on your source files and ship the compilation output so that your plugin will work with the distributable version of {kib}.
+[float]
==== {kib} platform migration guide
{kib-repo}blob/{branch}/src/core/MIGRATION.md#migrating-legacy-plugins-to-the-new-platform[This guide]
provides an action plan for moving a legacy plugin to the new platform.
+
+[float]
+==== Externally developed plugins
+
+If you are building a plugin outside of the {kib} repo, read <>.
+
diff --git a/docs/developer/getting-started/index.asciidoc b/docs/developer/getting-started/index.asciidoc
new file mode 100644
index 0000000000000..ff1623e22f1eb
--- /dev/null
+++ b/docs/developer/getting-started/index.asciidoc
@@ -0,0 +1,144 @@
+[[development-getting-started]]
+== Getting started
+
+Get started building your own plugins, or contributing directly to the {kib} repo.
+
+[float]
+[[get-kibana-code]]
+=== Get the code
+
+https://help.github.com/en/github/getting-started-with-github/fork-a-repo[Fork], then https://help.github.com/en/github/getting-started-with-github/fork-a-repo#step-2-create-a-local-clone-of-your-fork[clone] the {kib-repo}[{kib} repo] and change directory into it:
+
+[source,bash]
+----
+git clone https://github.com/[YOUR_USERNAME]/kibana.git kibana
+cd kibana
+----
+
+[float]
+=== Install dependencies
+
+Install the version of Node.js listed in the `.node-version` file. This
+can be automated with tools such as
+https://github.com/creationix/nvm[nvm],
+https://github.com/coreybutler/nvm-windows[nvm-windows] or
+https://github.com/wbyoung/avn[avn]. As we also include a `.nvmrc` file
+you can switch to the correct version when using nvm by running:
+
+[source,bash]
+----
+nvm use
+----
+
+Install the latest version of https://yarnpkg.com[yarn].
+
+Bootstrap {kib} and install all the dependencies:
+
+[source,bash]
+----
+yarn kbn bootstrap
+----
+
+____
+Node.js native modules could be in use and node-gyp is the tool used to
+build them. There are tools you need to install per platform and python
+versions you need to be using. Please see
+https://github.com/nodejs/node-gyp#installation[https://github.com/nodejs/node-gyp#installation]
+and follow the guide according your platform.
+____
+
+(You can also run `yarn kbn` to see the other available commands. For
+more info about this tool, see
+{kib-repo}tree/{branch}/packages/kbn-pm[{kib-repo}tree/{branch}packages/kbn-pm].)
+
+When switching branches which use different versions of npm packages you
+may need to run:
+
+[source,bash]
+----
+yarn kbn clean
+----
+
+If you have failures during `yarn kbn bootstrap` you may have some
+corrupted packages in your yarn cache which you can clean with:
+
+[source,bash]
+----
+yarn cache clean
+----
+
+[float]
+=== Configure environmental settings
+
+[[increase-nodejs-heap-size]]
+[float]
+==== Increase node.js heap size
+
+{kib} is a big project and for some commands it can happen that the
+process hits the default heap limit and crashes with an out-of-memory
+error. If you run into this problem, you can increase maximum heap size
+by setting the `--max_old_space_size` option on the command line. To set
+the limit for all commands, simply add the following line to your shell
+config: `export NODE_OPTIONS="--max_old_space_size=2048"`.
+
+[float]
+=== Run Elasticsearch
+
+Run the latest Elasticsearch snapshot. Specify an optional license with the `--license` flag.
+
+[source,bash]
+----
+yarn es snapshot --license trial
+----
+
+`trial` will give you access to all capabilities.
+
+Read about more options for <>, like connecting to a remote host, running from source,
+preserving data inbetween runs, running remote cluster, etc.
+
+[float]
+=== Run {kib}
+
+In another terminal window, start up {kib}. Include developer examples by adding an optional `--run-examples` flag.
+
+[source,bash]
+----
+yarn start --run-examples
+----
+
+View all available options by running `yarn start --help`
+
+Read about more advanced options for <>.
+
+[float]
+=== Code away!
+
+You are now ready to start developing. Changes to your files should be picked up automatically. Server side changes will
+cause the {kib} server to reboot.
+
+[float]
+=== More information
+
+* <>
+
+* <>
+
+* <>
+
+* <>
+
+* <>
+
+* <>
+
+include::running-kibana-advanced.asciidoc[]
+
+include::sample-data.asciidoc[]
+
+include::debugging.asciidoc[]
+
+include::sass.asciidoc[]
+
+include::building-kibana.asciidoc[]
+
+include::development-plugin-resources.asciidoc[]
\ No newline at end of file
diff --git a/docs/developer/getting-started/running-kibana-advanced.asciidoc b/docs/developer/getting-started/running-kibana-advanced.asciidoc
new file mode 100644
index 0000000000000..e36f38de1b366
--- /dev/null
+++ b/docs/developer/getting-started/running-kibana-advanced.asciidoc
@@ -0,0 +1,87 @@
+[[running-kibana-advanced]]
+=== Running {kib}
+
+Change to your local {kib} directory. Start the development server.
+
+[source,bash]
+----
+yarn start
+----
+
+____
+On Windows, you’ll need to use Git Bash, Cygwin, or a similar shell that
+exposes the `sh` command. And to successfully build you’ll need Cygwin
+optional packages zip, tar, and shasum.
+____
+
+Now you can point your web browser to http://localhost:5601 and start
+using {kib}! When running `yarn start`, {kib} will also log that it
+is listening on port 5603 due to the base path proxy, but you should
+still access {kib} on port 5601.
+
+By default, you can log in with username `elastic` and password
+`changeme`. See the `--help` options on `yarn es ` if
+you’d like to configure a different password.
+
+[float]
+==== Running {kib} in Open-Source mode
+
+If you’re looking to only work with the open-source software, supply the
+license type to `yarn es`:
+
+[source,bash]
+----
+yarn es snapshot --license oss
+----
+
+And start {kib} with only open-source code:
+
+[source,bash]
+----
+yarn start --oss
+----
+
+[float]
+==== Unsupported URL Type
+
+If you’re installing dependencies and seeing an error that looks
+something like
+
+....
+Unsupported URL Type: link:packages/eslint-config-kibana
+....
+
+you’re likely running `npm`. To install dependencies in {kib} you
+need to run `yarn kbn bootstrap`. For more info, see
+link:#setting-up-your-development-environment[Setting Up Your
+Development Environment] above.
+
+[float]
+[[customize-kibana-yml]]
+==== Customizing `config/kibana.dev.yml`
+
+The `config/kibana.yml` file stores user configuration directives.
+Since this file is checked into source control, however, developer
+preferences can’t be saved without the risk of accidentally committing
+the modified version. To make customizing configuration easier during
+development, the {kib} CLI will look for a `config/kibana.dev.yml`
+file if run with the `--dev` flag. This file behaves just like the
+non-dev version and accepts any of the
+https://www.elastic.co/guide/en/kibana/current/settings.html[standard
+settings].
+
+[float]
+==== Potential Optimization Pitfalls
+
+* Webpack is trying to include a file in the bundle that I deleted and
+is now complaining about it is missing
+* A module id that used to resolve to a single file now resolves to a
+directory, but webpack isn’t adapting
+* (if you discover other scenarios, please send a PR!)
+
+[float]
+==== Setting Up SSL
+
+{kib} includes self-signed certificates that can be used for
+development purposes in the browser and for communicating with
+Elasticsearch: `yarn start --ssl` & `yarn es snapshot --ssl`.
\ No newline at end of file
diff --git a/docs/developer/getting-started/sample-data.asciidoc b/docs/developer/getting-started/sample-data.asciidoc
new file mode 100644
index 0000000000000..376211ceb2634
--- /dev/null
+++ b/docs/developer/getting-started/sample-data.asciidoc
@@ -0,0 +1,31 @@
+[[sample-data]]
+=== Installing sample data
+
+There are a couple ways to easily get data ingested into Elasticsearch.
+
+[float]
+==== Sample data packages available for one click installation
+
+The easiest is to install one or more of our vailable sample data packages. If you have no data, you should be
+prompted to install when running {kib} for the first time. You can also access and install the sample data packages
+by going to the home page and clicking "add sample data".
+
+[float]
+==== makelogs script
+
+The provided `makelogs` script will generate sample data.
+
+[source,bash]
+----
+node scripts/makelogs --auth :
+----
+
+The default username and password combination are `elastic:changeme`
+
+Make sure to execute `node scripts/makelogs` *after* elasticsearch is up and running!
+
+[float]
+==== CSV upload
+
+If running with a platinum or trial license, you can also use the CSV uploader provided inside the Machine learning app.
+Navigate to the Data visualizer to upload your data from a file.
\ No newline at end of file
diff --git a/docs/developer/getting-started/sass.asciidoc b/docs/developer/getting-started/sass.asciidoc
new file mode 100644
index 0000000000000..194e001f642e1
--- /dev/null
+++ b/docs/developer/getting-started/sass.asciidoc
@@ -0,0 +1,36 @@
+[[kibana-sass]]
+=== Styling with SASS
+
+When writing a new component, create a sibling SASS file of the same
+name and import directly into the JS/TS component file. Doing so ensures
+the styles are never separated or lost on import and allows for better
+modularization (smaller individual plugin asset footprint).
+
+All SASS (.scss) files will automatically build with the
+https://elastic.github.io/eui/#/guidelines/sass[EUI] & {kib} invisibles (SASS variables, mixins, functions) from
+the {kib-repo}tree/{branch}/src/legacy/ui/public/styles/_globals_v7light.scss[globals_THEME.scss] file.
+
+*Example:*
+
+[source,tsx]
+----
+// component.tsx
+
+import './component.scss';
+
+export const Component = () => {
+ return (
+
+ );
+}
+----
+
+[source,scss]
+----
+// component.scss
+
+.plgComponent { ... }
+----
+
+Do not use the underscore `_` SASS file naming pattern when importing
+directly into a javascript file.
\ No newline at end of file
diff --git a/docs/developer/index.asciidoc b/docs/developer/index.asciidoc
index 50e41a4e18207..db57815a1285a 100644
--- a/docs/developer/index.asciidoc
+++ b/docs/developer/index.asciidoc
@@ -3,25 +3,27 @@
[partintro]
--
-Contributing to Kibana can be daunting at first, but it doesn't have to be. If
-you're planning a pull request to the Kibana repository, you may want to start
-with <>.
+Contributing to {kib} can be daunting at first, but it doesn't have to be. The following sections should get you up and
+running in no time. If you have any problems, file an issue in the https://github.com/elastic/kibana/issues[Kibana repo].
-If you'd prefer to use Kibana's internal plugin API, then check out
-<>.
---
+* <>
+* <>
+* <>
+* <>
+* <>
+* <>
-include::core-development.asciidoc[]
+--
-include::plugin-development.asciidoc[]
+include::getting-started/index.asciidoc[]
-include::visualize/development-visualize-index.asciidoc[]
+include::best-practices/index.asciidoc[]
-include::add-data-guide.asciidoc[]
+include::architecture/index.asciidoc[]
-include::security/index.asciidoc[]
+include::contributing/index.asciidoc[]
-include::pr-review.asciidoc[]
+include::plugin/index.asciidoc[]
-include::testing/interpreting-ci-failures.asciidoc[]
+include::advanced/index.asciidoc[]
diff --git a/docs/developer/plugin-development.asciidoc b/docs/developer/plugin-development.asciidoc
deleted file mode 100644
index 691fdb0412fd2..0000000000000
--- a/docs/developer/plugin-development.asciidoc
+++ /dev/null
@@ -1,24 +0,0 @@
-[[plugin-development]]
-== Plugin Development
-
-[IMPORTANT]
-==============================================
-The Kibana plugin interfaces are in a state of constant development. We cannot provide backwards compatibility for plugins due to the high rate of change. Kibana enforces that the installed plugins match the version of Kibana itself. Plugin developers will have to release a new version of their plugin for each new Kibana release as a result.
-==============================================
-
-* <>
-* <>
-* <>
-* <>
-* <>
-
-include::plugin/development-plugin-resources.asciidoc[]
-
-include::plugin/development-uiexports.asciidoc[]
-
-include::plugin/development-plugin-feature-registration.asciidoc[]
-
-include::plugin/development-plugin-functional-tests.asciidoc[]
-
-include::plugin/development-plugin-localization.asciidoc[]
-
diff --git a/docs/developer/plugin/development-uiexports.asciidoc b/docs/developer/plugin/development-uiexports.asciidoc
deleted file mode 100644
index 18d326cbfb9c0..0000000000000
--- a/docs/developer/plugin/development-uiexports.asciidoc
+++ /dev/null
@@ -1,16 +0,0 @@
-[[development-uiexports]]
-=== UI Exports
-
-An aggregate list of available UiExport types:
-
-[cols="> docs are the best place to
+start. However, there are a few differences when developing plugins outside the {kib} repo. These differences are covered here.
+
+[float]
+[[automatic-plugin-generator]]
+==== Automatic plugin generator
+
+We recommend that you kick-start your plugin by generating it with the {kib-repo}tree/{branch}/packages/kbn-plugin-generator[Kibana Plugin Generator]. Run the following in the {kib} repo, and you will be asked a couple questions, see some progress bars, and have a freshly generated plugin ready for you to play with in {kib}'s `plugins` folder.
+
+["source","shell"]
+-----------
+node scripts/generate_plugin my_plugin_name # replace "my_plugin_name" with your desired plugin name
+-----------
+
+[float]
+=== Plugin location
+
+The {kib} directory must be named `kibana`, and your plugin directory should be located in the root of `kibana` in a `plugins` directory, for example:
+
+["source","shell"]
+----
+.
+└── kibana
+ └── plugins
+ ├── foo-plugin
+ └── bar-plugin
+----
+
+* <>
+* <>
+
+include::external-plugin-functional-tests.asciidoc[]
+
+include::external-plugin-localization.asciidoc[]
diff --git a/docs/developer/security/index.asciidoc b/docs/developer/security/index.asciidoc
deleted file mode 100644
index e7ef0b85930e4..0000000000000
--- a/docs/developer/security/index.asciidoc
+++ /dev/null
@@ -1,12 +0,0 @@
-[[development-security]]
-== Security
-
-Kibana has generally been able to implement security transparently to core and plugin developers, and this largely remains the case. {kib} on two methods that the <>'s `Cluster` provides: `callWithRequest` and `callWithInternalUser`.
-
-`callWithRequest` executes requests against Elasticsearch using the authentication credentials of the Kibana end-user. So, if you log into Kibana with the user of `foo` when `callWithRequest` is used, {kib} execute the request against Elasticsearch as the user `foo`. Historically, `callWithRequest` has been used extensively to perform actions that are initiated at the request of Kibana end-users.
-
-`callWithInternalUser` executes requests against Elasticsearch using the internal Kibana server user, and has historically been used for performing actions that aren't initiated by Kibana end users; for example, creating the initial `.kibana` index or performing health checks against Elasticsearch.
-
-However, with the changes that role-based access control (RBAC) introduces, this is no longer cut and dry. {kib} now requires all access to the `.kibana` index goes through the `SavedObjectsClient`. This used to be a best practice, as the `SavedObjectsClient` was responsible for translating the documents stored in Elasticsearch to and from Saved Objects, but RBAC is now taking advantage of this abstraction to implement access control and determine when to use `callWithRequest` versus `callWithInternalUser`.
-
-include::rbac.asciidoc[]
diff --git a/docs/maps/connect-to-ems.asciidoc b/docs/maps/connect-to-ems.asciidoc
index 2b88ffe2e2dda..45ced2e64aa73 100644
--- a/docs/maps/connect-to-ems.asciidoc
+++ b/docs/maps/connect-to-ems.asciidoc
@@ -19,7 +19,7 @@ Maps makes requests directly from the browser to EMS.
To connect to EMS when your Kibana server and browser are in an internal network:
. Set `map.proxyElasticMapsServiceInMaps` to `true` in your <> file to proxy EMS requests through the Kibana server.
-. Update your firewall rules to whitelist connections from your Kibana server to the EMS domains.
+. Update your firewall rules to allow connections from your Kibana server to the EMS domains.
NOTE: Coordinate map and region map visualizations do not support `map.proxyElasticMapsServiceInMaps` and will not proxy EMS requests through the Kibana server.
diff --git a/docs/migration/migrate_8_0.asciidoc b/docs/migration/migrate_8_0.asciidoc
index 82798e948822a..b80503750a26e 100644
--- a/docs/migration/migrate_8_0.asciidoc
+++ b/docs/migration/migrate_8_0.asciidoc
@@ -115,12 +115,17 @@ URL that it derived from the actual server address and `xpack.security.public` s
*Impact:* Any workflow that involved manually clearing generated bundles will have to be updated with the new path.
+[float]]
+=== kibana.keystore has moved from the `data` folder to the `config` folder
+*Details:* By default, kibana.keystore has moved from the configured `path.data` folder to `/config` for archive distributions
+and `/etc/kibana` for package distributions. If a pre-existing keystore exists in the data directory that path will continue to be used.
+
[float]
[[breaking_80_user_role_changes]]
=== User role changes
[float]
-==== `kibana_user` role has been removed and `kibana_admin` has been added.
+=== `kibana_user` role has been removed and `kibana_admin` has been added.
*Details:* The `kibana_user` role has been removed and `kibana_admin` has been added to better
reflect its intended use. This role continues to grant all access to every
diff --git a/docs/setup/production.asciidoc b/docs/setup/production.asciidoc
index 72f275e237490..afb4b37df6a28 100644
--- a/docs/setup/production.asciidoc
+++ b/docs/setup/production.asciidoc
@@ -167,9 +167,9 @@ These can be used to automatically update the list of hosts as a cluster is resi
Kibana has a default maximum memory limit of 1.4 GB, and in most cases, we recommend leaving this unconfigured. In some scenarios, such as large reporting jobs,
it may make sense to tweak limits to meet more specific requirements.
-You can modify this limit by setting `--max-old-space-size` in the `NODE_OPTIONS` environment variable. For deb and rpm, packages this is passed in via `/etc/default/kibana` and can be appended to the bottom of the file.
+You can modify this limit by setting `--max-old-space-size` in the `node.options` config file that can be found inside `kibana/config` folder or any other configured with the environment variable `KIBANA_PATH_CONF` (for example in debian based system would be `/etc/kibana`).
The option accepts a limit in MB:
--------
-NODE_OPTIONS="--max-old-space-size=2048" bin/kibana
+--max-old-space-size=2048
--------
diff --git a/package.json b/package.json
index d58da61047d28..7ab6bfb91a376 100644
--- a/package.json
+++ b/package.json
@@ -67,7 +67,7 @@
"uiFramework:documentComponent": "cd packages/kbn-ui-framework && yarn documentComponent",
"kbn:watch": "node scripts/kibana --dev --logging.json=false",
"build:types": "tsc --p tsconfig.types.json",
- "docs:acceptApiChanges": "node --max-old-space-size=6144 scripts/check_published_api_changes.js --accept",
+ "docs:acceptApiChanges": "node --max-old-space-size=6144 scripts/check_published_api_changes.js --accept",
"kbn:bootstrap": "node scripts/register_git_hook",
"spec_to_console": "node scripts/spec_to_console",
"backport-skip-ci": "backport --prDescription \"[skip-ci]\"",
@@ -256,7 +256,6 @@
"redux-actions": "^2.6.5",
"redux-thunk": "^2.3.0",
"regenerator-runtime": "^0.13.3",
- "regression": "2.0.1",
"request": "^2.88.0",
"require-in-the-middle": "^5.0.2",
"reselect": "^4.0.0",
@@ -408,7 +407,7 @@
"babel-eslint": "^10.0.3",
"babel-jest": "^25.5.1",
"babel-plugin-istanbul": "^6.0.0",
- "backport": "5.4.6",
+ "backport": "5.5.1",
"chai": "3.5.0",
"chance": "1.0.18",
"cheerio": "0.22.0",
diff --git a/scripts/backport.js b/scripts/backport.js
index 2094534e2c4b3..dca5912cfb133 100644
--- a/scripts/backport.js
+++ b/scripts/backport.js
@@ -18,5 +18,10 @@
*/
require('../src/setup_node_env/node_version_validator');
+var process = require('process');
+
+// forward command line args to backport
+var args = process.argv.slice(2);
+
var backport = require('backport');
-backport.run();
+backport.run({}, args);
diff --git a/src/cli_keystore/cli_keystore.js b/src/cli_keystore/cli_keystore.js
index e1561b343ef39..d12c80b361c92 100644
--- a/src/cli_keystore/cli_keystore.js
+++ b/src/cli_keystore/cli_keystore.js
@@ -18,20 +18,16 @@
*/
import _ from 'lodash';
-import { join } from 'path';
import { pkg } from '../core/server/utils';
import Command from '../cli/command';
-import { getDataPath } from '../core/server/path';
import { Keystore } from '../legacy/server/keystore';
-const path = join(getDataPath(), 'kibana.keystore');
-const keystore = new Keystore(path);
-
import { createCli } from './create';
import { listCli } from './list';
import { addCli } from './add';
import { removeCli } from './remove';
+import { getKeystore } from './get_keystore';
const argv = process.env.kbnWorkerArgv
? JSON.parse(process.env.kbnWorkerArgv)
@@ -42,6 +38,8 @@ program
.version(pkg.version)
.description('A tool for managing settings stored in the Kibana keystore');
+const keystore = new Keystore(getKeystore());
+
createCli(program, keystore);
listCli(program, keystore);
addCli(program, keystore);
diff --git a/src/cli_keystore/get_keystore.js b/src/cli_keystore/get_keystore.js
new file mode 100644
index 0000000000000..c8ff2555563ad
--- /dev/null
+++ b/src/cli_keystore/get_keystore.js
@@ -0,0 +1,40 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { existsSync } from 'fs';
+import { join } from 'path';
+
+import Logger from '../cli_plugin/lib/logger';
+import { getConfigDirectory, getDataPath } from '../core/server/path';
+
+export function getKeystore() {
+ const configKeystore = join(getConfigDirectory(), 'kibana.keystore');
+ const dataKeystore = join(getDataPath(), 'kibana.keystore');
+ let keystorePath = null;
+ if (existsSync(dataKeystore)) {
+ const logger = new Logger();
+ logger.log(
+ `kibana.keystore located in the data folder is deprecated. Future versions will use the config folder.`
+ );
+ keystorePath = dataKeystore;
+ } else {
+ keystorePath = configKeystore;
+ }
+ return keystorePath;
+}
diff --git a/src/cli_keystore/get_keystore.test.js b/src/cli_keystore/get_keystore.test.js
new file mode 100644
index 0000000000000..88102b8f51d57
--- /dev/null
+++ b/src/cli_keystore/get_keystore.test.js
@@ -0,0 +1,57 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { getKeystore } from './get_keystore';
+import Logger from '../cli_plugin/lib/logger';
+import fs from 'fs';
+import sinon from 'sinon';
+
+describe('get_keystore', () => {
+ const sandbox = sinon.createSandbox();
+
+ beforeEach(() => {
+ sandbox.stub(Logger.prototype, 'log');
+ });
+
+ afterEach(() => {
+ sandbox.restore();
+ });
+
+ it('uses the config directory if there is no pre-existing keystore', () => {
+ sandbox.stub(fs, 'existsSync').returns(false);
+ expect(getKeystore()).toContain('config');
+ expect(getKeystore()).not.toContain('data');
+ });
+
+ it('uses the data directory if there is a pre-existing keystore in the data directory', () => {
+ sandbox.stub(fs, 'existsSync').returns(true);
+ expect(getKeystore()).toContain('data');
+ expect(getKeystore()).not.toContain('config');
+ });
+
+ it('logs a deprecation warning if the data directory is used', () => {
+ sandbox.stub(fs, 'existsSync').returns(true);
+ getKeystore();
+ sandbox.assert.calledOnce(Logger.prototype.log);
+ sandbox.assert.calledWith(
+ Logger.prototype.log,
+ 'kibana.keystore located in the data folder is deprecated. Future versions will use the config folder.'
+ );
+ });
+});
diff --git a/src/core/server/elasticsearch/legacy/cluster_client.test.ts b/src/core/server/elasticsearch/legacy/cluster_client.test.ts
index 2f0f80728c707..fd57d06e61eee 100644
--- a/src/core/server/elasticsearch/legacy/cluster_client.test.ts
+++ b/src/core/server/elasticsearch/legacy/cluster_client.test.ts
@@ -130,7 +130,7 @@ describe('#callAsInternalUser', () => {
expect(mockEsClientInstance.security.authenticate).toHaveBeenLastCalledWith(mockParams);
});
- test('does not wrap errors if `wrap401Errors` is not set', async () => {
+ test('does not wrap errors if `wrap401Errors` is set to `false`', async () => {
const mockError = { message: 'some error' };
mockEsClientInstance.ping.mockRejectedValue(mockError);
@@ -146,7 +146,7 @@ describe('#callAsInternalUser', () => {
).rejects.toBe(mockAuthenticationError);
});
- test('wraps only 401 errors by default or when `wrap401Errors` is set', async () => {
+ test('wraps 401 errors when `wrap401Errors` is set to `true` or unspecified', async () => {
const mockError = { message: 'some error' };
mockEsClientInstance.ping.mockRejectedValue(mockError);
diff --git a/src/core/server/http/integration_tests/core_service.test.mocks.ts b/src/core/server/http/integration_tests/core_service.test.mocks.ts
index f7ebd18b9c488..c23724b7d332f 100644
--- a/src/core/server/http/integration_tests/core_service.test.mocks.ts
+++ b/src/core/server/http/integration_tests/core_service.test.mocks.ts
@@ -19,10 +19,9 @@
import { elasticsearchServiceMock } from '../../elasticsearch/elasticsearch_service.mock';
export const clusterClientMock = jest.fn();
+export const clusterClientInstanceMock = elasticsearchServiceMock.createLegacyScopedClusterClient();
jest.doMock('../../elasticsearch/legacy/scoped_cluster_client', () => ({
- LegacyScopedClusterClient: clusterClientMock.mockImplementation(function () {
- return elasticsearchServiceMock.createLegacyScopedClusterClient();
- }),
+ LegacyScopedClusterClient: clusterClientMock.mockImplementation(() => clusterClientInstanceMock),
}));
jest.doMock('elasticsearch', () => {
diff --git a/src/core/server/http/integration_tests/core_services.test.ts b/src/core/server/http/integration_tests/core_services.test.ts
index ba39effa77016..0ee53a04d9f87 100644
--- a/src/core/server/http/integration_tests/core_services.test.ts
+++ b/src/core/server/http/integration_tests/core_services.test.ts
@@ -16,9 +16,13 @@
* specific language governing permissions and limitations
* under the License.
*/
+
+import { clusterClientMock, clusterClientInstanceMock } from './core_service.test.mocks';
+
import Boom from 'boom';
import { Request } from 'hapi';
-import { clusterClientMock } from './core_service.test.mocks';
+import { errors as esErrors } from 'elasticsearch';
+import { LegacyElasticsearchErrorHelpers } from '../../elasticsearch/legacy';
import * as kbnTestServer from '../../../../test_utils/kbn_server';
@@ -352,7 +356,7 @@ describe('http service', () => {
});
});
});
- describe('elasticsearch', () => {
+ describe('legacy elasticsearch client', () => {
let root: ReturnType;
beforeEach(async () => {
root = kbnTestServer.createRoot({ plugins: { initialize: false } });
@@ -410,5 +414,31 @@ describe('http service', () => {
const [, , clientHeaders] = client;
expect(clientHeaders).toEqual({ authorization: authorizationHeader });
});
+
+ it('forwards 401 errors returned from elasticsearch', async () => {
+ const { http } = await root.setup();
+ const { createRouter } = http;
+
+ const authenticationError = LegacyElasticsearchErrorHelpers.decorateNotAuthorizedError(
+ new (esErrors.AuthenticationException as any)('Authentication Exception', {
+ body: { error: { header: { 'WWW-Authenticate': 'authenticate header' } } },
+ statusCode: 401,
+ })
+ );
+
+ clusterClientInstanceMock.callAsCurrentUser.mockRejectedValue(authenticationError);
+
+ const router = createRouter('/new-platform');
+ router.get({ path: '/', validate: false }, async (context, req, res) => {
+ await context.core.elasticsearch.legacy.client.callAsCurrentUser('ping');
+ return res.ok();
+ });
+
+ await root.start();
+
+ const response = await kbnTestServer.request.get(root, '/new-platform/').expect(401);
+
+ expect(response.header['www-authenticate']).toEqual('authenticate header');
+ });
});
});
diff --git a/src/core/server/http/router/router.ts b/src/core/server/http/router/router.ts
index 69402a74eda5f..35eec746163ce 100644
--- a/src/core/server/http/router/router.ts
+++ b/src/core/server/http/router/router.ts
@@ -22,6 +22,7 @@ import Boom from 'boom';
import { isConfigSchema } from '@kbn/config-schema';
import { Logger } from '../../logging';
+import { LegacyElasticsearchErrorHelpers } from '../../elasticsearch/legacy/errors';
import { KibanaRequest } from './request';
import { KibanaResponseFactory, kibanaResponseFactory, IKibanaResponse } from './response';
import { RouteConfig, RouteConfigOptions, RouteMethod, validBodyOutput } from './route';
@@ -263,6 +264,10 @@ export class Router implements IRouter {
return hapiResponseAdapter.handle(kibanaResponse);
} catch (e) {
this.log.error(e);
+ // forward 401 (boom) error from ES
+ if (LegacyElasticsearchErrorHelpers.isNotAuthorizedError(e)) {
+ return e;
+ }
return hapiResponseAdapter.toInternalError();
}
}
diff --git a/src/core/server/path/index.test.ts b/src/core/server/path/index.test.ts
index 048622e1f7eab..522e100d85e5d 100644
--- a/src/core/server/path/index.test.ts
+++ b/src/core/server/path/index.test.ts
@@ -18,7 +18,7 @@
*/
import { accessSync, constants } from 'fs';
-import { getConfigPath, getDataPath } from './';
+import { getConfigPath, getDataPath, getConfigDirectory } from './';
describe('Default path finder', () => {
it('should find a kibana.yml', () => {
@@ -30,4 +30,9 @@ describe('Default path finder', () => {
const dataPath = getDataPath();
expect(() => accessSync(dataPath, constants.R_OK)).not.toThrow();
});
+
+ it('should find a config directory', () => {
+ const configDirectory = getConfigDirectory();
+ expect(() => accessSync(configDirectory, constants.R_OK)).not.toThrow();
+ });
});
diff --git a/src/core/server/path/index.ts b/src/core/server/path/index.ts
index 2e05e3856bd4c..1bb650518c47a 100644
--- a/src/core/server/path/index.ts
+++ b/src/core/server/path/index.ts
@@ -30,6 +30,10 @@ const CONFIG_PATHS = [
fromRoot('config/kibana.yml'),
].filter(isString);
+const CONFIG_DIRECTORIES = [process.env.KIBANA_PATH_CONF, fromRoot('config'), '/etc/kibana'].filter(
+ isString
+);
+
const DATA_PATHS = [
process.env.DATA_PATH, // deprecated
fromRoot('data'),
@@ -49,12 +53,19 @@ function findFile(paths: string[]) {
}
/**
- * Get the path where the config files are stored
+ * Get the path of kibana.yml
* @internal
*/
export const getConfigPath = () => findFile(CONFIG_PATHS);
+
+/**
+ * Get the directory containing configuration files
+ * @internal
+ */
+export const getConfigDirectory = () => findFile(CONFIG_DIRECTORIES);
+
/**
- * Get the path where the data can be stored
+ * Get the directory containing runtime data
* @internal
*/
export const getDataPath = () => findFile(DATA_PATHS);
diff --git a/src/dev/build/tasks/bin/scripts/kibana b/src/dev/build/tasks/bin/scripts/kibana
index 558facb9da32b..3283e17008e7c 100755
--- a/src/dev/build/tasks/bin/scripts/kibana
+++ b/src/dev/build/tasks/bin/scripts/kibana
@@ -14,6 +14,7 @@ while [ -h "$SCRIPT" ] ; do
done
DIR="$(dirname "${SCRIPT}")/.."
+CONFIG_DIR=${KIBANA_PATH_CONF:-"$DIR/config"}
NODE="${DIR}/node/bin/node"
test -x "$NODE"
if [ ! -x "$NODE" ]; then
@@ -21,4 +22,8 @@ if [ ! -x "$NODE" ]; then
exit 1
fi
-NODE_OPTIONS="--no-warnings --max-http-header-size=65536 ${NODE_OPTIONS}" NODE_ENV=production exec "${NODE}" "${DIR}/src/cli" ${@}
+if [ -f "${CONFIG_DIR}/node.options" ]; then
+ KBN_NODE_OPTS="$(grep -v ^# < ${CONFIG_DIR}/node.options | xargs)"
+fi
+
+NODE_OPTIONS="--no-warnings --max-http-header-size=65536 $KBN_NODE_OPTS $NODE_OPTIONS" NODE_ENV=production exec "${NODE}" "${DIR}/src/cli" ${@}
diff --git a/src/dev/build/tasks/bin/scripts/kibana-keystore b/src/dev/build/tasks/bin/scripts/kibana-keystore
index 43800c7b895d3..f83df118d24e8 100755
--- a/src/dev/build/tasks/bin/scripts/kibana-keystore
+++ b/src/dev/build/tasks/bin/scripts/kibana-keystore
@@ -14,6 +14,7 @@ while [ -h "$SCRIPT" ] ; do
done
DIR="$(dirname "${SCRIPT}")/.."
+CONFIG_DIR=${KIBANA_PATH_CONF:-"$DIR/config"}
NODE="${DIR}/node/bin/node"
test -x "$NODE"
if [ ! -x "$NODE" ]; then
@@ -21,4 +22,8 @@ if [ ! -x "$NODE" ]; then
exit 1
fi
-"${NODE}" "${DIR}/src/cli_keystore" "$@"
+if [ -f "${CONFIG_DIR}/node.options" ]; then
+ KBN_NODE_OPTS="$(grep -v ^# < ${CONFIG_DIR}/node.options | xargs)"
+fi
+
+NODE_OPTIONS="$KBN_NODE_OPTS $NODE_OPTIONS" "${NODE}" "${DIR}/src/cli_keystore" "$@"
diff --git a/src/dev/build/tasks/bin/scripts/kibana-keystore.bat b/src/dev/build/tasks/bin/scripts/kibana-keystore.bat
old mode 100644
new mode 100755
index b8311db2cfae5..389eb5bf488e4
--- a/src/dev/build/tasks/bin/scripts/kibana-keystore.bat
+++ b/src/dev/build/tasks/bin/scripts/kibana-keystore.bat
@@ -1,6 +1,6 @@
@echo off
-SETLOCAL
+SETLOCAL ENABLEDELAYEDEXPANSION
set SCRIPT_DIR=%~dp0
for %%I in ("%SCRIPT_DIR%..") do set DIR=%%~dpfI
@@ -12,6 +12,21 @@ If Not Exist "%NODE%" (
Exit /B 1
)
+set CONFIG_DIR=%KIBANA_PATH_CONF%
+If [%KIBANA_PATH_CONF%] == [] (
+ set CONFIG_DIR=%DIR%\config
+)
+
+IF EXIST "%CONFIG_DIR%\node.options" (
+ for /F "eol=# tokens=*" %%i in (%CONFIG_DIR%\node.options) do (
+ If [!NODE_OPTIONS!] == [] (
+ set "NODE_OPTIONS=%%i"
+ ) Else (
+ set "NODE_OPTIONS=!NODE_OPTIONS! %%i"
+ )
+ )
+)
+
TITLE Kibana Keystore
"%NODE%" "%DIR%\src\cli_keystore" %*
diff --git a/src/dev/build/tasks/bin/scripts/kibana-plugin b/src/dev/build/tasks/bin/scripts/kibana-plugin
index b843d4966c6d1..f1102e1ef5a32 100755
--- a/src/dev/build/tasks/bin/scripts/kibana-plugin
+++ b/src/dev/build/tasks/bin/scripts/kibana-plugin
@@ -14,6 +14,7 @@ while [ -h "$SCRIPT" ] ; do
done
DIR="$(dirname "${SCRIPT}")/.."
+CONFIG_DIR=${KIBANA_PATH_CONF:-"$DIR/config"}
NODE="${DIR}/node/bin/node"
test -x "$NODE"
if [ ! -x "$NODE" ]; then
@@ -21,4 +22,8 @@ if [ ! -x "$NODE" ]; then
exit 1
fi
-NODE_OPTIONS="--no-warnings ${NODE_OPTIONS}" NODE_ENV=production exec "${NODE}" "${DIR}/src/cli_plugin" "$@"
+if [ -f "${CONFIG_DIR}/node.options" ]; then
+ KBN_NODE_OPTS="$(grep -v ^# < ${CONFIG_DIR}/node.options | xargs)"
+fi
+
+NODE_OPTIONS="--no-warnings $KBN_NODE_OPTS $NODE_OPTIONS" NODE_ENV=production exec "${NODE}" "${DIR}/src/cli_plugin" "$@"
diff --git a/src/dev/build/tasks/bin/scripts/kibana-plugin.bat b/src/dev/build/tasks/bin/scripts/kibana-plugin.bat
index bf382a0657ade..6815b1b9eab8c 100755
--- a/src/dev/build/tasks/bin/scripts/kibana-plugin.bat
+++ b/src/dev/build/tasks/bin/scripts/kibana-plugin.bat
@@ -1,6 +1,6 @@
@echo off
-SETLOCAL
+SETLOCAL ENABLEDELAYEDEXPANSION
set SCRIPT_DIR=%~dp0
for %%I in ("%SCRIPT_DIR%..") do set DIR=%%~dpfI
@@ -13,9 +13,26 @@ If Not Exist "%NODE%" (
Exit /B 1
)
-TITLE Kibana Server
+set CONFIG_DIR=%KIBANA_PATH_CONF%
+If [%KIBANA_PATH_CONF%] == [] (
+ set CONFIG_DIR=%DIR%\config
+)
+
+IF EXIST "%CONFIG_DIR%\node.options" (
+ for /F "eol=# tokens=*" %%i in (%CONFIG_DIR%\node.options) do (
+ If [!NODE_OPTIONS!] == [] (
+ set "NODE_OPTIONS=%%i"
+ ) Else (
+ set "NODE_OPTIONS=!NODE_OPTIONS! %%i"
+ )
+ )
+)
+
+:: Include pre-defined node option
+set "NODE_OPTIONS=--no-warnings %NODE_OPTIONS%"
-set "NODE_OPTIONS=--no-warnings %NODE_OPTIONS%" && "%NODE%" "%DIR%\src\cli_plugin" %*
+TITLE Kibana Server
+"%NODE%" "%DIR%\src\cli_plugin" %*
:finally
diff --git a/src/dev/build/tasks/bin/scripts/kibana.bat b/src/dev/build/tasks/bin/scripts/kibana.bat
index 9d8ba359e53af..d3edc92f110a5 100755
--- a/src/dev/build/tasks/bin/scripts/kibana.bat
+++ b/src/dev/build/tasks/bin/scripts/kibana.bat
@@ -1,6 +1,6 @@
@echo off
-SETLOCAL
+SETLOCAL ENABLEDELAYEDEXPANSION
set SCRIPT_DIR=%~dp0
for %%I in ("%SCRIPT_DIR%..") do set DIR=%%~dpfI
@@ -14,7 +14,27 @@ If Not Exist "%NODE%" (
Exit /B 1
)
-set "NODE_OPTIONS=--no-warnings --max-http-header-size=65536 %NODE_OPTIONS%" && "%NODE%" "%DIR%\src\cli" %*
+set CONFIG_DIR=%KIBANA_PATH_CONF%
+If [%KIBANA_PATH_CONF%] == [] (
+ set CONFIG_DIR=%DIR%\config
+)
+
+IF EXIST "%CONFIG_DIR%\node.options" (
+ for /F "eol=# tokens=*" %%i in (%CONFIG_DIR%\node.options) do (
+ If [!NODE_OPTIONS!] == [] (
+ set "NODE_OPTIONS=%%i"
+ ) Else (
+ set "NODE_OPTIONS=!NODE_OPTIONS! %%i"
+ )
+ )
+)
+
+:: Include pre-defined node option
+set "NODE_OPTIONS=--no-warnings --max-http-header-size=65536 %NODE_OPTIONS%"
+
+:: This should run independently as the last instruction
+:: as we need NODE_OPTIONS previously set to expand
+"%NODE%" "%DIR%\src\cli" %*
:finally
diff --git a/src/dev/build/tasks/copy_source_task.js b/src/dev/build/tasks/copy_source_task.js
index 32eb7bf8712e3..e34f05bd6cfff 100644
--- a/src/dev/build/tasks/copy_source_task.js
+++ b/src/dev/build/tasks/copy_source_task.js
@@ -43,6 +43,7 @@ export const CopySourceTask = {
'typings/**',
'webpackShims/**',
'config/kibana.yml',
+ 'config/node.options',
'tsconfig*.json',
'.i18nrc.json',
'kibana.d.ts',
diff --git a/src/dev/build/tasks/os_packages/package_scripts/post_install.sh b/src/dev/build/tasks/os_packages/package_scripts/post_install.sh
index 9cf08ea38254d..10f11ff51874e 100644
--- a/src/dev/build/tasks/os_packages/package_scripts/post_install.sh
+++ b/src/dev/build/tasks/os_packages/package_scripts/post_install.sh
@@ -1,6 +1,8 @@
#!/bin/sh
set -e
+export KBN_PATH_CONF=${KBN_PATH_CONF:-<%= configDir %>}
+
case $1 in
# Debian
configure)
@@ -35,4 +37,10 @@ case $1 in
esac
chown -R <%= user %>:<%= group %> <%= dataDir %>
-chown <%= user %>:<%= group %> <%= pluginsDir %>
+chmod 2750 <%= dataDir %>
+chmod -R 2755 <%= dataDir %>/*
+
+chown :<%= group %> ${KBN_PATH_CONF}
+chown :<%= group %> ${KBN_PATH_CONF}/kibana.yml
+chmod 2750 ${KBN_PATH_CONF}
+chmod 660 ${KBN_PATH_CONF}/kibana.yml
diff --git a/src/dev/build/tasks/os_packages/service_templates/sysv/etc/init.d/kibana b/src/dev/build/tasks/os_packages/service_templates/sysv/etc/init.d/kibana
index d935dc6e31f80..8facbb709cc5c 100755
--- a/src/dev/build/tasks/os_packages/service_templates/sysv/etc/init.d/kibana
+++ b/src/dev/build/tasks/os_packages/service_templates/sysv/etc/init.d/kibana
@@ -39,7 +39,7 @@ emit() {
start() {
[ ! -d "/var/log/kibana/" ] && mkdir "/var/log/kibana/"
chown "$user":"$group" "/var/log/kibana/"
- chmod 755 "/var/log/kibana/"
+ chmod 2750 "/var/log/kibana/"
[ ! -d "/var/run/kibana/" ] && mkdir "/var/run/kibana/"
chown "$user":"$group" "/var/run/kibana/"
diff --git a/src/plugins/data/public/public.api.md b/src/plugins/data/public/public.api.md
index 01fcefe27df3e..b532bacf5df25 100644
--- a/src/plugins/data/public/public.api.md
+++ b/src/plugins/data/public/public.api.md
@@ -1991,7 +1991,7 @@ export const UI_SETTINGS: {
// src/plugins/data/public/index.ts:393:1 - (ae-forgotten-export) The symbol "parseInterval" needs to be exported by the entry point index.d.ts
// src/plugins/data/public/index.ts:394:1 - (ae-forgotten-export) The symbol "propFilter" needs to be exported by the entry point index.d.ts
// src/plugins/data/public/index.ts:397:1 - (ae-forgotten-export) The symbol "toAbsoluteDates" needs to be exported by the entry point index.d.ts
-// src/plugins/data/public/query/state_sync/connect_to_query_state.ts:40:60 - (ae-forgotten-export) The symbol "FilterStateStore" needs to be exported by the entry point index.d.ts
+// src/plugins/data/public/query/state_sync/connect_to_query_state.ts:41:60 - (ae-forgotten-export) The symbol "FilterStateStore" needs to be exported by the entry point index.d.ts
// src/plugins/data/public/types.ts:52:5 - (ae-forgotten-export) The symbol "createFiltersFromValueClickAction" needs to be exported by the entry point index.d.ts
// src/plugins/data/public/types.ts:53:5 - (ae-forgotten-export) The symbol "createFiltersFromRangeSelectAction" needs to be exported by the entry point index.d.ts
// src/plugins/data/public/types.ts:61:5 - (ae-forgotten-export) The symbol "IndexPatternSelectProps" needs to be exported by the entry point index.d.ts
diff --git a/src/plugins/data/public/query/state_sync/connect_to_query_state.ts b/src/plugins/data/public/query/state_sync/connect_to_query_state.ts
index e74497a5053b4..2e62dac87f6ef 100644
--- a/src/plugins/data/public/query/state_sync/connect_to_query_state.ts
+++ b/src/plugins/data/public/query/state_sync/connect_to_query_state.ts
@@ -24,6 +24,7 @@ import { BaseStateContainer } from '../../../../kibana_utils/public';
import { QuerySetup, QueryStart } from '../query_service';
import { QueryState, QueryStateChange } from './types';
import { FilterStateStore, COMPARE_ALL_OPTIONS, compareFilters } from '../../../common';
+import { validateTimeRange } from '../timefilter';
/**
* Helper to setup two-way syncing of global data and a state container
@@ -159,9 +160,9 @@ export const connectToQueryState = (
// cloneDeep is required because services are mutating passed objects
// and state in state container is frozen
if (syncConfig.time) {
- const time = state.time || timefilter.getTimeDefaults();
+ const time = validateTimeRange(state.time) ? state.time : timefilter.getTimeDefaults();
if (!_.isEqual(time, timefilter.getTime())) {
- timefilter.setTime(_.cloneDeep(time));
+ timefilter.setTime(_.cloneDeep(time!));
}
}
diff --git a/src/plugins/data/public/query/timefilter/index.ts b/src/plugins/data/public/query/timefilter/index.ts
index f71061677ceb7..19386c10ab59f 100644
--- a/src/plugins/data/public/query/timefilter/index.ts
+++ b/src/plugins/data/public/query/timefilter/index.ts
@@ -24,3 +24,4 @@ export { Timefilter, TimefilterContract } from './timefilter';
export { TimeHistory, TimeHistoryContract } from './time_history';
export { changeTimeFilter, convertRangeFilterToTimeRangeString } from './lib/change_time_filter';
export { extractTimeFilter } from './lib/extract_time_filter';
+export { validateTimeRange } from './lib/validate_timerange';
diff --git a/src/legacy/core_plugins/kibana/public/__tests__/vis_type_table/legacy.ts b/src/plugins/data/public/query/timefilter/lib/validate_timerange.test.ts
similarity index 50%
rename from src/legacy/core_plugins/kibana/public/__tests__/vis_type_table/legacy.ts
rename to src/plugins/data/public/query/timefilter/lib/validate_timerange.test.ts
index 216afe5920408..e20849c21a717 100644
--- a/src/legacy/core_plugins/kibana/public/__tests__/vis_type_table/legacy.ts
+++ b/src/plugins/data/public/query/timefilter/lib/validate_timerange.test.ts
@@ -16,24 +16,37 @@
* specific language governing permissions and limitations
* under the License.
*/
-import { PluginInitializerContext } from 'kibana/public';
-// eslint-disable-next-line @kbn/eslint/no-restricted-paths
-import { npStart, npSetup } from 'ui/new_platform';
-import {
- TableVisPlugin,
- TablePluginSetupDependencies,
- // eslint-disable-next-line @kbn/eslint/no-restricted-paths
-} from '../../../../../../plugins/vis_type_table/public/plugin';
-const plugins: Readonly = {
- expressions: npSetup.plugins.expressions,
- visualizations: npSetup.plugins.visualizations,
-};
+import { validateTimeRange } from './validate_timerange';
-const pluginInstance = new TableVisPlugin({} as PluginInitializerContext);
+describe('Validate timerange', () => {
+ test('Validate no range', () => {
+ const ok = validateTimeRange();
-export const setup = pluginInstance.setup(npSetup.core, plugins);
-export const start = pluginInstance.start(npStart.core, {
- data: npStart.plugins.data,
- kibanaLegacy: npStart.plugins.kibanaLegacy,
+ expect(ok).toBe(false);
+ });
+ test('normal range', () => {
+ const ok = validateTimeRange({
+ to: 'now',
+ from: 'now-7d',
+ });
+
+ expect(ok).toBe(true);
+ });
+ test('bad from time', () => {
+ const ok = validateTimeRange({
+ to: 'nowa',
+ from: 'now-7d',
+ });
+
+ expect(ok).toBe(false);
+ });
+ test('bad to time', () => {
+ const ok = validateTimeRange({
+ to: 'now',
+ from: 'nowa-7d',
+ });
+
+ expect(ok).toBe(false);
+ });
});
diff --git a/src/plugins/data/public/query/timefilter/lib/validate_timerange.ts b/src/plugins/data/public/query/timefilter/lib/validate_timerange.ts
new file mode 100644
index 0000000000000..f9e4aa0ae1cab
--- /dev/null
+++ b/src/plugins/data/public/query/timefilter/lib/validate_timerange.ts
@@ -0,0 +1,28 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import dateMath from '@elastic/datemath';
+import { TimeRange } from '../../../../common';
+
+export function validateTimeRange(time?: TimeRange): boolean {
+ if (!time) return false;
+ const momentDateFrom = dateMath.parse(time.from);
+ const momentDateTo = dateMath.parse(time.to);
+ return !!(momentDateFrom && momentDateFrom.isValid() && momentDateTo && momentDateTo.isValid());
+}
diff --git a/src/plugins/data/public/search/aggs/buckets/_terms_other_bucket_helper.test.ts b/src/plugins/data/public/search/aggs/buckets/_terms_other_bucket_helper.test.ts
index 8e862b5692ca3..e9b4629ba88cf 100644
--- a/src/plugins/data/public/search/aggs/buckets/_terms_other_bucket_helper.test.ts
+++ b/src/plugins/data/public/search/aggs/buckets/_terms_other_bucket_helper.test.ts
@@ -316,6 +316,83 @@ describe('Terms Agg Other bucket helper', () => {
}
});
+ test('excludes exists filter for scripted fields', () => {
+ const aggConfigs = getAggConfigs(nestedTerm.aggs);
+ aggConfigs.aggs[1].params.field.scripted = true;
+ const agg = buildOtherBucketAgg(
+ aggConfigs,
+ aggConfigs.aggs[1] as IBucketAggConfig,
+ nestedTermResponse
+ );
+ const expectedResponse = {
+ 'other-filter': {
+ aggs: undefined,
+ filters: {
+ filters: {
+ '-IN': {
+ bool: {
+ must: [],
+ filter: [{ match_phrase: { 'geo.src': 'IN' } }],
+ should: [],
+ must_not: [
+ {
+ script: {
+ script: {
+ lang: undefined,
+ params: { value: 'ios' },
+ source: '(undefined) == value',
+ },
+ },
+ },
+ {
+ script: {
+ script: {
+ lang: undefined,
+ params: { value: 'win xp' },
+ source: '(undefined) == value',
+ },
+ },
+ },
+ ],
+ },
+ },
+ '-US': {
+ bool: {
+ must: [],
+ filter: [{ match_phrase: { 'geo.src': 'US' } }],
+ should: [],
+ must_not: [
+ {
+ script: {
+ script: {
+ lang: undefined,
+ params: { value: 'ios' },
+ source: '(undefined) == value',
+ },
+ },
+ },
+ {
+ script: {
+ script: {
+ lang: undefined,
+ params: { value: 'win xp' },
+ source: '(undefined) == value',
+ },
+ },
+ },
+ ],
+ },
+ },
+ },
+ },
+ },
+ };
+ expect(agg).toBeDefined();
+ if (agg) {
+ expect(agg()).toEqual(expectedResponse);
+ }
+ });
+
test('returns false when nested terms agg has no buckets', () => {
const aggConfigs = getAggConfigs(nestedTerm.aggs);
const agg = buildOtherBucketAgg(
diff --git a/src/plugins/data/public/search/aggs/buckets/_terms_other_bucket_helper.ts b/src/plugins/data/public/search/aggs/buckets/_terms_other_bucket_helper.ts
index fba3d35f002af..1a7deafb548ae 100644
--- a/src/plugins/data/public/search/aggs/buckets/_terms_other_bucket_helper.ts
+++ b/src/plugins/data/public/search/aggs/buckets/_terms_other_bucket_helper.ts
@@ -202,10 +202,12 @@ export const buildOtherBucketAgg = (
return;
}
- if (
- !aggWithOtherBucket.params.missingBucket ||
- agg.buckets.some((bucket: { key: string }) => bucket.key === '__missing__')
- ) {
+ const hasScriptedField = !!aggWithOtherBucket.params.field.scripted;
+ const hasMissingBucket = !!aggWithOtherBucket.params.missingBucket;
+ const hasMissingBucketKey = agg.buckets.some(
+ (bucket: { key: string }) => bucket.key === '__missing__'
+ );
+ if (!hasScriptedField && (!hasMissingBucket || hasMissingBucketKey)) {
filters.push(
buildExistsFilter(
aggWithOtherBucket.params.field,
diff --git a/src/plugins/data/public/ui/filter_bar/filter_editor/lib/filter_editor_utils.test.ts b/src/plugins/data/public/ui/filter_bar/filter_editor/lib/filter_editor_utils.test.ts
index 12cdf13caeb55..e2caca7895c42 100644
--- a/src/plugins/data/public/ui/filter_bar/filter_editor/lib/filter_editor_utils.test.ts
+++ b/src/plugins/data/public/ui/filter_bar/filter_editor/lib/filter_editor_utils.test.ts
@@ -177,11 +177,27 @@ describe('Filter editor utils', () => {
it('should return true for range filter with from/to', () => {
const isValid = isFilterValid(stubIndexPattern, stubFields[0], isBetweenOperator, {
from: 'foo',
- too: 'goo',
+ to: 'goo',
});
expect(isValid).toBe(true);
});
+ it('should return false for date range filter with bad from', () => {
+ const isValid = isFilterValid(stubIndexPattern, stubFields[4], isBetweenOperator, {
+ from: 'foo',
+ to: 'now',
+ });
+ expect(isValid).toBe(false);
+ });
+
+ it('should return false for date range filter with bad to', () => {
+ const isValid = isFilterValid(stubIndexPattern, stubFields[4], isBetweenOperator, {
+ from: '2020-01-01',
+ to: 'mau',
+ });
+ expect(isValid).toBe(false);
+ });
+
it('should return true for exists filter without params', () => {
const isValid = isFilterValid(stubIndexPattern, stubFields[0], existsOperator);
expect(isValid).toBe(true);
diff --git a/src/plugins/data/public/ui/filter_bar/filter_editor/lib/filter_editor_utils.ts b/src/plugins/data/public/ui/filter_bar/filter_editor/lib/filter_editor_utils.ts
index 114be67e490cf..97a59fa69f458 100644
--- a/src/plugins/data/public/ui/filter_bar/filter_editor/lib/filter_editor_utils.ts
+++ b/src/plugins/data/public/ui/filter_bar/filter_editor/lib/filter_editor_utils.ts
@@ -85,7 +85,10 @@ export function isFilterValid(
if (typeof params !== 'object') {
return false;
}
- return validateParams(params.from, field.type) || validateParams(params.to, field.type);
+ return (
+ (!params.from || validateParams(params.from, field.type)) &&
+ (!params.to || validateParams(params.to, field.type))
+ );
case 'exists':
return true;
default:
diff --git a/src/plugins/data/public/ui/filter_bar/filter_editor/range_value_input.tsx b/src/plugins/data/public/ui/filter_bar/filter_editor/range_value_input.tsx
index 65b842f0bd4aa..bdfd1014625d8 100644
--- a/src/plugins/data/public/ui/filter_bar/filter_editor/range_value_input.tsx
+++ b/src/plugins/data/public/ui/filter_bar/filter_editor/range_value_input.tsx
@@ -17,8 +17,9 @@
* under the License.
*/
-import { EuiIcon, EuiLink, EuiFormHelpText, EuiFormControlLayoutDelimited } from '@elastic/eui';
-import { FormattedMessage, InjectedIntl, injectI18n } from '@kbn/i18n/react';
+import moment from 'moment';
+import { EuiFormControlLayoutDelimited } from '@elastic/eui';
+import { InjectedIntl, injectI18n } from '@kbn/i18n/react';
import { get } from 'lodash';
import React from 'react';
import { useKibana } from '../../../../../kibana_react/public';
@@ -41,8 +42,17 @@ interface Props {
function RangeValueInputUI(props: Props) {
const kibana = useKibana();
- const dataMathDocLink = kibana.services.docLinks!.links.date.dateMath;
const type = props.field ? props.field.type : 'string';
+ const tzConfig = kibana.services.uiSettings!.get('dateFormat:tz');
+
+ const formatDateChange = (value: string | number | boolean) => {
+ if (typeof value !== 'string' && typeof value !== 'number') return value;
+
+ const momentParsedValue = moment(value).tz(tzConfig);
+ if (momentParsedValue.isValid()) return momentParsedValue?.format('YYYY-MM-DDTHH:mm:ss.SSSZ');
+
+ return value;
+ };
const onFromChange = (value: string | number | boolean) => {
if (typeof value !== 'string' && typeof value !== 'number') {
@@ -71,6 +81,9 @@ function RangeValueInputUI(props: Props) {
type={type}
value={props.value ? props.value.from : undefined}
onChange={onFromChange}
+ onBlur={(value) => {
+ onFromChange(formatDateChange(value));
+ }}
placeholder={props.intl.formatMessage({
id: 'data.filter.filterEditor.rangeStartInputPlaceholder',
defaultMessage: 'Start of the range',
@@ -83,6 +96,9 @@ function RangeValueInputUI(props: Props) {
type={type}
value={props.value ? props.value.to : undefined}
onChange={onToChange}
+ onBlur={(value) => {
+ onToChange(formatDateChange(value));
+ }}
placeholder={props.intl.formatMessage({
id: 'data.filter.filterEditor.rangeEndInputPlaceholder',
defaultMessage: 'End of the range',
@@ -90,19 +106,6 @@ function RangeValueInputUI(props: Props) {
/>
}
/>
- {type === 'date' ? (
-
-
- {' '}
-
-
-
- ) : (
- ''
- )}
);
}
diff --git a/src/plugins/data/public/ui/filter_bar/filter_editor/value_input_type.tsx b/src/plugins/data/public/ui/filter_bar/filter_editor/value_input_type.tsx
index 3737dae1bf9ef..1a165c78d4d79 100644
--- a/src/plugins/data/public/ui/filter_bar/filter_editor/value_input_type.tsx
+++ b/src/plugins/data/public/ui/filter_bar/filter_editor/value_input_type.tsx
@@ -27,6 +27,7 @@ interface Props {
value?: string | number;
type: string;
onChange: (value: string | number | boolean) => void;
+ onBlur?: (value: string | number | boolean) => void;
placeholder: string;
intl: InjectedIntl;
controlOnly?: boolean;
@@ -66,6 +67,7 @@ class ValueInputTypeUI extends Component {
placeholder={this.props.placeholder}
value={value}
onChange={this.onChange}
+ onBlur={this.onBlur}
isInvalid={!isEmpty(value) && !validateParams(value, this.props.type)}
controlOnly={this.props.controlOnly}
className={this.props.className}
@@ -126,6 +128,13 @@ class ValueInputTypeUI extends Component {
const params = event.target.value;
this.props.onChange(params);
};
+
+ private onBlur = (event: React.ChangeEvent) => {
+ if (this.props.onBlur) {
+ const params = event.target.value;
+ this.props.onBlur(params);
+ }
+ };
}
export const ValueInputType = injectI18n(ValueInputTypeUI);
diff --git a/src/plugins/discover/public/application/embeddable/index.ts b/src/plugins/discover/public/application/embeddable/index.ts
index b86a8daa119c5..1c4e06c7c3ade 100644
--- a/src/plugins/discover/public/application/embeddable/index.ts
+++ b/src/plugins/discover/public/application/embeddable/index.ts
@@ -20,4 +20,3 @@
export { SEARCH_EMBEDDABLE_TYPE } from './constants';
export * from './types';
export * from './search_embeddable_factory';
-export * from './search_embeddable';
diff --git a/src/plugins/discover/public/application/embeddable/search_embeddable.ts b/src/plugins/discover/public/application/embeddable/search_embeddable.ts
index e03a6b938bc4f..9a3dd0d310ff7 100644
--- a/src/plugins/discover/public/application/embeddable/search_embeddable.ts
+++ b/src/plugins/discover/public/application/embeddable/search_embeddable.ts
@@ -38,7 +38,7 @@ import * as columnActions from '../angular/doc_table/actions/columns';
import searchTemplate from './search_template.html';
import { ISearchEmbeddable, SearchInput, SearchOutput } from './types';
import { SortOrder } from '../angular/doc_table/components/table_header/helpers';
-import { getSortForSearchSource } from '../angular/doc_table/lib/get_sort_for_search_source';
+import { getSortForSearchSource } from '../angular/doc_table';
import {
getRequestInspectorStats,
getResponseInspectorStats,
diff --git a/src/plugins/discover/public/application/embeddable/search_embeddable_factory.ts b/src/plugins/discover/public/application/embeddable/search_embeddable_factory.ts
index 1dc5947792d5c..f61fa361f0c0e 100644
--- a/src/plugins/discover/public/application/embeddable/search_embeddable_factory.ts
+++ b/src/plugins/discover/public/application/embeddable/search_embeddable_factory.ts
@@ -28,8 +28,8 @@ import {
} from '../../../../embeddable/public';
import { TimeRange } from '../../../../data/public';
-import { SearchEmbeddable } from './search_embeddable';
-import { SearchInput, SearchOutput } from './types';
+
+import { SearchInput, SearchOutput, SearchEmbeddable } from './types';
import { SEARCH_EMBEDDABLE_TYPE } from './constants';
interface StartServices {
@@ -92,7 +92,8 @@ export class SearchEmbeddableFactory
const savedObject = await getServices().getSavedSearchById(savedObjectId);
const indexPattern = savedObject.searchSource.getField('index');
const { executeTriggerActions } = await this.getStartServices();
- return new SearchEmbeddable(
+ const { SearchEmbeddable: SearchEmbeddableClass } = await import('./search_embeddable');
+ return new SearchEmbeddableClass(
{
savedSearch: savedObject,
$rootScope,
diff --git a/src/plugins/discover/public/application/embeddable/types.ts b/src/plugins/discover/public/application/embeddable/types.ts
index 80576eb4ed7cb..d7fa9b3bc23d3 100644
--- a/src/plugins/discover/public/application/embeddable/types.ts
+++ b/src/plugins/discover/public/application/embeddable/types.ts
@@ -17,7 +17,12 @@
* under the License.
*/
-import { EmbeddableInput, EmbeddableOutput, IEmbeddable } from 'src/plugins/embeddable/public';
+import {
+ Embeddable,
+ EmbeddableInput,
+ EmbeddableOutput,
+ IEmbeddable,
+} from 'src/plugins/embeddable/public';
import { SortOrder } from '../angular/doc_table/components/table_header/helpers';
import { Filter, IIndexPattern, TimeRange, Query } from '../../../../data/public';
import { SavedSearch } from '../..';
@@ -40,3 +45,7 @@ export interface SearchOutput extends EmbeddableOutput {
export interface ISearchEmbeddable extends IEmbeddable {
getSavedSearch(): SavedSearch;
}
+
+export interface SearchEmbeddable extends Embeddable {
+ type: string;
+}
diff --git a/src/plugins/discover/public/plugin.ts b/src/plugins/discover/public/plugin.ts
index e97ac783c616f..20e13d204e0e9 100644
--- a/src/plugins/discover/public/plugin.ts
+++ b/src/plugins/discover/public/plugin.ts
@@ -66,6 +66,7 @@ import {
DISCOVER_APP_URL_GENERATOR,
DiscoverUrlGenerator,
} from './url_generator';
+import { SearchEmbeddableFactory } from './application/embeddable';
declare module '../../share/public' {
export interface UrlGeneratorStateMapping {
@@ -345,12 +346,7 @@ export class DiscoverPlugin
/**
* register embeddable with a slimmer embeddable version of inner angular
*/
- private async registerEmbeddable(
- core: CoreSetup,
- plugins: DiscoverSetupPlugins
- ) {
- const { SearchEmbeddableFactory } = await import('./application/embeddable');
-
+ private registerEmbeddable(core: CoreSetup, plugins: DiscoverSetupPlugins) {
if (!this.getEmbeddableInjector) {
throw Error('Discover plugin method getEmbeddableInjector is undefined');
}
diff --git a/src/plugins/kibana_usage_collection/server/collectors/find_all.ts b/src/plugins/kibana_usage_collection/server/collectors/find_all.ts
index e6363551eba9c..5bb4f20b5c5b1 100644
--- a/src/plugins/kibana_usage_collection/server/collectors/find_all.ts
+++ b/src/plugins/kibana_usage_collection/server/collectors/find_all.ts
@@ -28,7 +28,7 @@ export async function findAll(
savedObjectsClient: ISavedObjectsRepository,
opts: SavedObjectsFindOptions
): Promise>> {
- const { page = 1, perPage = 100, ...options } = opts;
+ const { page = 1, perPage = 10000, ...options } = opts;
const { saved_objects: savedObjects, total } = await savedObjectsClient.find({
...options,
page,
diff --git a/src/plugins/ui_actions/public/context_menu/open_context_menu.test.ts b/src/plugins/ui_actions/public/context_menu/open_context_menu.test.ts
new file mode 100644
index 0000000000000..77ce04ba24b35
--- /dev/null
+++ b/src/plugins/ui_actions/public/context_menu/open_context_menu.test.ts
@@ -0,0 +1,84 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { createInteractionPositionTracker } from './open_context_menu';
+import { fireEvent } from '@testing-library/dom';
+
+let targetEl: Element;
+const top = 100;
+const left = 100;
+const right = 200;
+const bottom = 200;
+beforeEach(() => {
+ targetEl = document.createElement('div');
+ jest.spyOn(targetEl, 'getBoundingClientRect').mockImplementation(() => ({
+ top,
+ left,
+ right,
+ bottom,
+ width: right - left,
+ height: bottom - top,
+ x: left,
+ y: top,
+ toJSON: () => {},
+ }));
+ document.body.append(targetEl);
+});
+afterEach(() => {
+ targetEl.remove();
+});
+
+test('should use last clicked element position if mouse position is outside target element', () => {
+ const { resolveLastPosition } = createInteractionPositionTracker();
+
+ fireEvent.click(targetEl, { clientX: 0, clientY: 0 });
+ const { x, y } = resolveLastPosition();
+
+ expect(y).toBe(bottom);
+ expect(x).toBe(left + (right - left) / 2);
+});
+
+test('should use mouse position if mouse inside clicked element', () => {
+ const { resolveLastPosition } = createInteractionPositionTracker();
+
+ const mouseX = 150;
+ const mouseY = 150;
+ fireEvent.click(targetEl, { clientX: mouseX, clientY: mouseY });
+
+ const { x, y } = resolveLastPosition();
+
+ expect(y).toBe(mouseX);
+ expect(x).toBe(mouseY);
+});
+
+test('should use position of previous element, if latest element is no longer in DOM', () => {
+ const { resolveLastPosition } = createInteractionPositionTracker();
+
+ const detachedElement = document.createElement('div');
+ const spy = jest.spyOn(detachedElement, 'getBoundingClientRect');
+
+ fireEvent.click(targetEl);
+ fireEvent.click(detachedElement);
+
+ const { x, y } = resolveLastPosition();
+
+ expect(y).toBe(bottom);
+ expect(x).toBe(left + (right - left) / 2);
+ expect(spy).not.toBeCalled();
+});
diff --git a/src/plugins/ui_actions/public/context_menu/open_context_menu.tsx b/src/plugins/ui_actions/public/context_menu/open_context_menu.tsx
index 5892c184f8a81..0d9a4c7be5670 100644
--- a/src/plugins/ui_actions/public/context_menu/open_context_menu.tsx
+++ b/src/plugins/ui_actions/public/context_menu/open_context_menu.tsx
@@ -26,14 +26,86 @@ import ReactDOM from 'react-dom';
let activeSession: ContextMenuSession | null = null;
const CONTAINER_ID = 'contextMenu-container';
-let initialized = false;
+/**
+ * Tries to find best position for opening context menu using mousemove and click event
+ * Returned position is relative to document
+ */
+export function createInteractionPositionTracker() {
+ let lastMouseX = 0;
+ let lastMouseY = 0;
+ const lastClicks: Array<{ el?: Element; mouseX: number; mouseY: number }> = [];
+ const MAX_LAST_CLICKS = 10;
+
+ /**
+ * Track both `mouseup` and `click`
+ * `mouseup` is for clicks and brushes with mouse
+ * `click` is a fallback for keyboard interactions
+ */
+ document.addEventListener('mouseup', onClick, true);
+ document.addEventListener('click', onClick, true);
+ document.addEventListener('mousemove', onMouseUpdate, { passive: true });
+ document.addEventListener('mouseenter', onMouseUpdate, { passive: true });
+ function onClick(event: MouseEvent) {
+ lastClicks.push({
+ el: event.target as Element,
+ mouseX: event.clientX,
+ mouseY: event.clientY,
+ });
+ if (lastClicks.length > MAX_LAST_CLICKS) {
+ lastClicks.shift();
+ }
+ }
+ function onMouseUpdate(event: MouseEvent) {
+ lastMouseX = event.clientX;
+ lastMouseY = event.clientY;
+ }
+
+ return {
+ resolveLastPosition: (): { x: number; y: number } => {
+ const lastClick = [...lastClicks]
+ .reverse()
+ .find(({ el }) => el && document.body.contains(el));
+ if (!lastClick) {
+ // fallback to last mouse position
+ return {
+ x: lastMouseX,
+ y: lastMouseY,
+ };
+ }
+
+ const { top, left, bottom, right } = lastClick.el!.getBoundingClientRect();
+
+ const mouseX = lastClick.mouseX;
+ const mouseY = lastClick.mouseY;
+
+ if (top <= mouseY && bottom >= mouseY && left <= mouseX && right >= mouseX) {
+ // click was inside target element
+ return {
+ x: mouseX,
+ y: mouseY,
+ };
+ } else {
+ // keyboard edge case. no cursor position. use target element position instead
+ return {
+ x: left + (right - left) / 2,
+ y: bottom,
+ };
+ }
+ },
+ };
+}
+
+const { resolveLastPosition } = createInteractionPositionTracker();
function getOrCreateContainerElement() {
let container = document.getElementById(CONTAINER_ID);
- const y = getMouseY() + document.body.scrollTop;
+ let { x, y } = resolveLastPosition();
+ y = y + window.scrollY;
+ x = x + window.scrollX;
+
if (!container) {
container = document.createElement('div');
- container.style.left = getMouseX() + 'px';
+ container.style.left = x + 'px';
container.style.top = y + 'px';
container.style.position = 'absolute';
@@ -44,38 +116,12 @@ function getOrCreateContainerElement() {
container.id = CONTAINER_ID;
document.body.appendChild(container);
} else {
- container.style.left = getMouseX() + 'px';
+ container.style.left = x + 'px';
container.style.top = y + 'px';
}
return container;
}
-let x: number = 0;
-let y: number = 0;
-
-function initialize() {
- if (!initialized) {
- document.addEventListener('mousemove', onMouseUpdate, false);
- document.addEventListener('mouseenter', onMouseUpdate, false);
- initialized = true;
- }
-}
-
-function onMouseUpdate(e: any) {
- x = e.pageX;
- y = e.pageY;
-}
-
-function getMouseX() {
- return x;
-}
-
-function getMouseY() {
- return y;
-}
-
-initialize();
-
/**
* A FlyoutSession describes the session of one opened flyout panel. It offers
* methods to close the flyout panel again. If you open a flyout panel you should make
@@ -87,16 +133,6 @@ initialize();
* @extends EventEmitter
*/
class ContextMenuSession extends EventEmitter {
- /**
- * Binds the current flyout session to an Angular scope, meaning this flyout
- * session will be closed as soon as the Angular scope gets destroyed.
- * @param {object} scope - An angular scope object to bind to.
- */
- public bindToAngularScope(scope: ng.IScope): void {
- const removeWatch = scope.$on('$destroy', () => this.close());
- this.on('closed', () => removeWatch());
- }
-
/**
* Closes the opened flyout as long as it's still the open one.
* If this is not the active session anymore, this method won't do anything.
@@ -151,6 +187,7 @@ export function openContextMenu(
panelPaddingSize="none"
anchorPosition="downRight"
withTitle
+ ownFocus={true}
>
{
+ if (!(await collector.isReady())) {
+ return collector.type;
+ }
+ })
+ )
+ ).filter((collectorType): collectorType is string => !!collectorType);
+ const allReady = collectorTypesNotReady.length === 0;
if (!allReady && this.maximumWaitTimeForAllCollectorsInS >= 0) {
const nowTimestamp = +new Date();
@@ -119,21 +121,24 @@ export class CollectorSet {
callCluster: LegacyAPICaller,
collectors: Map> = this.collectors
) => {
- const responses = [];
- for (const collector of collectors.values()) {
- this.logger.debug(`Fetching data from ${collector.type} collector`);
- try {
- responses.push({
- type: collector.type,
- result: await collector.fetch(callCluster),
- });
- } catch (err) {
- this.logger.warn(err);
- this.logger.warn(`Unable to fetch data from ${collector.type} collector`);
- }
- }
-
- return responses;
+ const responses = await Promise.all(
+ [...collectors.values()].map(async (collector) => {
+ this.logger.debug(`Fetching data from ${collector.type} collector`);
+ try {
+ return {
+ type: collector.type,
+ result: await collector.fetch(callCluster),
+ };
+ } catch (err) {
+ this.logger.warn(err);
+ this.logger.warn(`Unable to fetch data from ${collector.type} collector`);
+ }
+ })
+ );
+
+ return responses.filter(
+ (response): response is { type: string; result: unknown } => typeof response !== 'undefined'
+ );
};
/*
diff --git a/src/legacy/core_plugins/kibana/public/__tests__/vis_type_table/agg_table.js b/src/plugins/vis_type_table/public/agg_table/agg_table.test.js
similarity index 75%
rename from src/legacy/core_plugins/kibana/public/__tests__/vis_type_table/agg_table.js
rename to src/plugins/vis_type_table/public/agg_table/agg_table.test.js
index 88eb299e3c3a8..0362bd55963d9 100644
--- a/src/legacy/core_plugins/kibana/public/__tests__/vis_type_table/agg_table.js
+++ b/src/plugins/vis_type_table/public/agg_table/agg_table.test.js
@@ -19,44 +19,71 @@
import $ from 'jquery';
import moment from 'moment';
-import ngMock from 'ng_mock';
-import expect from '@kbn/expect';
+import angular from 'angular';
+import 'angular-mocks';
import sinon from 'sinon';
-import './legacy';
-// eslint-disable-next-line @kbn/eslint/no-restricted-paths
-import { npStart } from 'ui/new_platform';
import { round } from 'lodash';
-// eslint-disable-next-line @kbn/eslint/no-restricted-paths
-import { getInnerAngular } from '../../../../../../plugins/vis_type_table/public/get_inner_angular';
-// eslint-disable-next-line @kbn/eslint/no-restricted-paths
-import { initTableVisLegacyModule } from '../../../../../../plugins/vis_type_table/public/table_vis_legacy_module';
+import { getFieldFormatsRegistry } from '../../../../test_utils/public/stub_field_formats';
+import { coreMock } from '../../../../core/public/mocks';
+import { initAngularBootstrap } from '../../../kibana_legacy/public';
+import { setUiSettings } from '../../../data/public/services';
+import { UI_SETTINGS } from '../../../data/public/';
+import { CSV_SEPARATOR_SETTING, CSV_QUOTE_VALUES_SETTING } from '../../../share/public';
+
+import { setFormatService } from '../services';
+import { getInnerAngular } from '../get_inner_angular';
+import { initTableVisLegacyModule } from '../table_vis_legacy_module';
import { tabifiedData } from './tabified_data';
-// eslint-disable-next-line @kbn/eslint/no-restricted-paths
-import { configureAppAngularModule } from '../../../../../../plugins/kibana_legacy/public/angular';
+
+const uiSettings = new Map();
describe('Table Vis - AggTable Directive', function () {
+ const core = coreMock.createStart();
+
+ core.uiSettings.set = jest.fn((key, value) => {
+ uiSettings.set(key, value);
+ });
+
+ core.uiSettings.get = jest.fn((key) => {
+ const defaultValues = {
+ dateFormat: 'MMM D, YYYY @ HH:mm:ss.SSS',
+ 'dateFormat:tz': 'UTC',
+ [UI_SETTINGS.SHORT_DOTS_ENABLE]: true,
+ [UI_SETTINGS.FORMAT_CURRENCY_DEFAULT_PATTERN]: '($0,0.[00])',
+ [UI_SETTINGS.FORMAT_NUMBER_DEFAULT_PATTERN]: '0,0.[000]',
+ [UI_SETTINGS.FORMAT_PERCENT_DEFAULT_PATTERN]: '0,0.[000]%',
+ [UI_SETTINGS.FORMAT_NUMBER_DEFAULT_LOCALE]: 'en',
+ [UI_SETTINGS.FORMAT_DEFAULT_TYPE_MAP]: {},
+ [CSV_SEPARATOR_SETTING]: ',',
+ [CSV_QUOTE_VALUES_SETTING]: true,
+ };
+
+ return defaultValues[key] || uiSettings.get(key);
+ });
+
let $rootScope;
let $compile;
let settings;
const initLocalAngular = () => {
- const tableVisModule = getInnerAngular('kibana/table_vis', npStart.core);
- configureAppAngularModule(tableVisModule, npStart.core, true);
+ const tableVisModule = getInnerAngular('kibana/table_vis', core);
initTableVisLegacyModule(tableVisModule);
};
- beforeEach(initLocalAngular);
-
- beforeEach(ngMock.module('kibana/table_vis'));
- beforeEach(
- ngMock.inject(function ($injector, config) {
+ beforeEach(() => {
+ setUiSettings(core.uiSettings);
+ setFormatService(getFieldFormatsRegistry(core));
+ initAngularBootstrap();
+ initLocalAngular();
+ angular.mock.module('kibana/table_vis');
+ angular.mock.inject(($injector, config) => {
settings = config;
$rootScope = $injector.get('$rootScope');
$compile = $injector.get('$compile');
- })
- );
+ });
+ });
let $scope;
beforeEach(function () {
@@ -66,7 +93,7 @@ describe('Table Vis - AggTable Directive', function () {
$scope.$destroy();
});
- it('renders a simple response properly', function () {
+ test('renders a simple response properly', function () {
$scope.dimensions = {
metrics: [{ accessor: 0, format: { id: 'number' }, params: {} }],
buckets: [],
@@ -78,12 +105,12 @@ describe('Table Vis - AggTable Directive', function () {
);
$scope.$digest();
- expect($el.find('tbody').length).to.be(1);
- expect($el.find('td').length).to.be(1);
- expect($el.find('td').text()).to.eql('1,000');
+ expect($el.find('tbody').length).toBe(1);
+ expect($el.find('td').length).toBe(1);
+ expect($el.find('td').text()).toEqual('1,000');
});
- it('renders nothing if the table is empty', function () {
+ test('renders nothing if the table is empty', function () {
$scope.dimensions = {};
$scope.table = null;
const $el = $compile('')(
@@ -91,10 +118,10 @@ describe('Table Vis - AggTable Directive', function () {
);
$scope.$digest();
- expect($el.find('tbody').length).to.be(0);
+ expect($el.find('tbody').length).toBe(0);
});
- it('renders a complex response properly', async function () {
+ test('renders a complex response properly', async function () {
$scope.dimensions = {
buckets: [
{ accessor: 0, params: {} },
@@ -112,37 +139,37 @@ describe('Table Vis - AggTable Directive', function () {
$compile($el)($scope);
$scope.$digest();
- expect($el.find('tbody').length).to.be(1);
+ expect($el.find('tbody').length).toBe(1);
const $rows = $el.find('tbody tr');
- expect($rows.length).to.be.greaterThan(0);
+ expect($rows.length).toBeGreaterThan(0);
function validBytes(str) {
const num = str.replace(/,/g, '');
if (num !== '-') {
- expect(num).to.match(/^\d+$/);
+ expect(num).toMatch(/^\d+$/);
}
}
$rows.each(function () {
// 6 cells in every row
const $cells = $(this).find('td');
- expect($cells.length).to.be(6);
+ expect($cells.length).toBe(6);
const txts = $cells.map(function () {
return $(this).text().trim();
});
// two character country code
- expect(txts[0]).to.match(/^(png|jpg|gif|html|css)$/);
+ expect(txts[0]).toMatch(/^(png|jpg|gif|html|css)$/);
validBytes(txts[1]);
// country
- expect(txts[2]).to.match(/^\w\w$/);
+ expect(txts[2]).toMatch(/^\w\w$/);
validBytes(txts[3]);
// os
- expect(txts[4]).to.match(/^(win|mac|linux)$/);
+ expect(txts[4]).toMatch(/^(win|mac|linux)$/);
validBytes(txts[5]);
});
});
@@ -153,9 +180,9 @@ describe('Table Vis - AggTable Directive', function () {
moment.tz.setDefault(settings.get('dateFormat:tz'));
}
- const off = $scope.$on('change:config.dateFormat:tz', setDefaultTimezone);
const oldTimezoneSetting = settings.get('dateFormat:tz');
settings.set('dateFormat:tz', 'UTC');
+ setDefaultTimezone();
$scope.dimensions = {
buckets: [
@@ -181,24 +208,24 @@ describe('Table Vis - AggTable Directive', function () {
$compile($el)($scope);
$scope.$digest();
- expect($el.find('tfoot').length).to.be(1);
+ expect($el.find('tfoot').length).toBe(1);
const $rows = $el.find('tfoot tr');
- expect($rows.length).to.be(1);
+ expect($rows.length).toBe(1);
const $cells = $($rows[0]).find('th');
- expect($cells.length).to.be(6);
+ expect($cells.length).toBe(6);
for (let i = 0; i < 6; i++) {
- expect($($cells[i]).text().trim()).to.be(expected[i]);
+ expect($($cells[i]).text().trim()).toBe(expected[i]);
}
settings.set('dateFormat:tz', oldTimezoneSetting);
- off();
+ setDefaultTimezone();
}
- it('as count', async function () {
+ test('as count', async function () {
await totalsRowTest('count', ['18', '18', '18', '18', '18', '18']);
});
- it('as min', async function () {
+ test('as min', async function () {
await totalsRowTest('min', [
'',
'2014-09-28',
@@ -208,7 +235,7 @@ describe('Table Vis - AggTable Directive', function () {
'11',
]);
});
- it('as max', async function () {
+ test('as max', async function () {
await totalsRowTest('max', [
'',
'2014-10-03',
@@ -218,16 +245,16 @@ describe('Table Vis - AggTable Directive', function () {
'837',
]);
});
- it('as avg', async function () {
+ test('as avg', async function () {
await totalsRowTest('avg', ['', '', '87,221.5', '', '64.667', '206.833']);
});
- it('as sum', async function () {
+ test('as sum', async function () {
await totalsRowTest('sum', ['', '', '1,569,987', '', '1,164', '3,723']);
});
});
describe('aggTable.toCsv()', function () {
- it('escapes rows and columns properly', function () {
+ test('escapes rows and columns properly', function () {
const $el = $compile('')(
$scope
);
@@ -244,12 +271,12 @@ describe('Table Vis - AggTable Directive', function () {
rows: [{ a: 1, b: 2, c: '"foobar"' }],
};
- expect(aggTable.toCsv()).to.be(
+ expect(aggTable.toCsv()).toBe(
'one,two,"with double-quotes("")"' + '\r\n' + '1,2,"""foobar"""' + '\r\n'
);
});
- it('exports rows and columns properly', async function () {
+ test('exports rows and columns properly', async function () {
$scope.dimensions = {
buckets: [
{ accessor: 0, params: {} },
@@ -274,7 +301,7 @@ describe('Table Vis - AggTable Directive', function () {
$tableScope.table = $scope.table;
const raw = aggTable.toCsv(false);
- expect(raw).to.be(
+ expect(raw).toBe(
'"extension: Descending","Average bytes","geo.src: Descending","Average bytes","machine.os: Descending","Average bytes"' +
'\r\n' +
'png,412032,IT,9299,win,0' +
@@ -304,7 +331,7 @@ describe('Table Vis - AggTable Directive', function () {
);
});
- it('exports formatted rows and columns properly', async function () {
+ test('exports formatted rows and columns properly', async function () {
$scope.dimensions = {
buckets: [
{ accessor: 0, params: {} },
@@ -332,7 +359,7 @@ describe('Table Vis - AggTable Directive', function () {
$tableScope.formattedColumns[0].formatter.convert = (v) => `${v}_formatted`;
const formatted = aggTable.toCsv(true);
- expect(formatted).to.be(
+ expect(formatted).toBe(
'"extension: Descending","Average bytes","geo.src: Descending","Average bytes","machine.os: Descending","Average bytes"' +
'\r\n' +
'"png_formatted",412032,IT,9299,win,0' +
@@ -363,7 +390,7 @@ describe('Table Vis - AggTable Directive', function () {
});
});
- it('renders percentage columns', async function () {
+ test('renders percentage columns', async function () {
$scope.dimensions = {
buckets: [
{ accessor: 0, params: {} },
@@ -390,8 +417,8 @@ describe('Table Vis - AggTable Directive', function () {
$scope.$digest();
const $headings = $el.find('th');
- expect($headings.length).to.be(7);
- expect($headings.eq(3).text().trim()).to.be('Average bytes percentages');
+ expect($headings.length).toBe(7);
+ expect($headings.eq(3).text().trim()).toBe('Average bytes percentages');
const countColId = $scope.table.columns.find((col) => col.name === $scope.percentageCol).id;
const counts = $scope.table.rows.map((row) => row[countColId]);
@@ -400,7 +427,7 @@ describe('Table Vis - AggTable Directive', function () {
$percentageColValues.each((i, value) => {
const percentage = `${round((counts[i] / total) * 100, 3)}%`;
- expect(value).to.be(percentage);
+ expect(value).toBe(percentage);
});
});
@@ -420,7 +447,7 @@ describe('Table Vis - AggTable Directive', function () {
window.Blob = origBlob;
});
- it('calls _saveAs properly', function () {
+ test('calls _saveAs properly', function () {
const $el = $compile('')($scope);
$scope.$digest();
@@ -440,19 +467,19 @@ describe('Table Vis - AggTable Directive', function () {
aggTable.csv.filename = 'somefilename.csv';
aggTable.exportAsCsv();
- expect(saveAs.callCount).to.be(1);
+ expect(saveAs.callCount).toBe(1);
const call = saveAs.getCall(0);
- expect(call.args[0]).to.be.a(FakeBlob);
- expect(call.args[0].slices).to.eql([
+ expect(call.args[0]).toBeInstanceOf(FakeBlob);
+ expect(call.args[0].slices).toEqual([
'one,two,"with double-quotes("")"' + '\r\n' + '1,2,"""foobar"""' + '\r\n',
]);
- expect(call.args[0].opts).to.eql({
+ expect(call.args[0].opts).toEqual({
type: 'text/plain;charset=utf-8',
});
- expect(call.args[1]).to.be('somefilename.csv');
+ expect(call.args[1]).toBe('somefilename.csv');
});
- it('should use the export-title attribute', function () {
+ test('should use the export-title attribute', function () {
const expected = 'export file name';
const $el = $compile(
``
@@ -468,7 +495,7 @@ describe('Table Vis - AggTable Directive', function () {
$tableScope.exportTitle = expected;
$scope.$digest();
- expect(aggTable.csv.filename).to.equal(`${expected}.csv`);
+ expect(aggTable.csv.filename).toEqual(`${expected}.csv`);
});
});
});
diff --git a/src/legacy/core_plugins/kibana/public/__tests__/vis_type_table/agg_table_group.js b/src/plugins/vis_type_table/public/agg_table/agg_table_group.test.js
similarity index 74%
rename from src/legacy/core_plugins/kibana/public/__tests__/vis_type_table/agg_table_group.js
rename to src/plugins/vis_type_table/public/agg_table/agg_table_group.test.js
index 99b397167009d..43913eed32f90 100644
--- a/src/legacy/core_plugins/kibana/public/__tests__/vis_type_table/agg_table_group.js
+++ b/src/plugins/vis_type_table/public/agg_table/agg_table_group.test.js
@@ -18,38 +18,50 @@
*/
import $ from 'jquery';
-import ngMock from 'ng_mock';
+import angular from 'angular';
+import 'angular-mocks';
import expect from '@kbn/expect';
-import './legacy';
-// eslint-disable-next-line @kbn/eslint/no-restricted-paths
-import { getInnerAngular } from '../../../../../../plugins/vis_type_table/public/get_inner_angular';
-// eslint-disable-next-line @kbn/eslint/no-restricted-paths
-import { initTableVisLegacyModule } from '../../../../../../plugins/vis_type_table/public/table_vis_legacy_module';
+
+import { getFieldFormatsRegistry } from '../../../../test_utils/public/stub_field_formats';
+import { coreMock } from '../../../../core/public/mocks';
+import { initAngularBootstrap } from '../../../kibana_legacy/public';
+import { setUiSettings } from '../../../data/public/services';
+import { setFormatService } from '../services';
+import { getInnerAngular } from '../get_inner_angular';
+import { initTableVisLegacyModule } from '../table_vis_legacy_module';
import { tabifiedData } from './tabified_data';
-// eslint-disable-next-line @kbn/eslint/no-restricted-paths
-import { npStart } from 'ui/new_platform';
-// eslint-disable-next-line @kbn/eslint/no-restricted-paths
-import { configureAppAngularModule } from '../../../../../../plugins/kibana_legacy/public/angular';
+
+const uiSettings = new Map();
describe('Table Vis - AggTableGroup Directive', function () {
+ const core = coreMock.createStart();
let $rootScope;
let $compile;
+ core.uiSettings.set = jest.fn((key, value) => {
+ uiSettings.set(key, value);
+ });
+
+ core.uiSettings.get = jest.fn((key) => {
+ return uiSettings.get(key);
+ });
+
const initLocalAngular = () => {
- const tableVisModule = getInnerAngular('kibana/table_vis', npStart.core);
- configureAppAngularModule(tableVisModule, npStart.core, true);
+ const tableVisModule = getInnerAngular('kibana/table_vis', core);
initTableVisLegacyModule(tableVisModule);
};
- beforeEach(initLocalAngular);
-
- beforeEach(ngMock.module('kibana/table_vis'));
- beforeEach(
- ngMock.inject(function ($injector) {
+ beforeEach(() => {
+ setUiSettings(core.uiSettings);
+ setFormatService(getFieldFormatsRegistry(core));
+ initAngularBootstrap();
+ initLocalAngular();
+ angular.mock.module('kibana/table_vis');
+ angular.mock.inject(($injector) => {
$rootScope = $injector.get('$rootScope');
$compile = $injector.get('$compile');
- })
- );
+ });
+ });
let $scope;
beforeEach(function () {
diff --git a/src/legacy/core_plugins/kibana/public/__tests__/vis_type_table/tabified_data.js b/src/plugins/vis_type_table/public/agg_table/tabified_data.js
similarity index 100%
rename from src/legacy/core_plugins/kibana/public/__tests__/vis_type_table/tabified_data.js
rename to src/plugins/vis_type_table/public/agg_table/tabified_data.js
diff --git a/src/plugins/vis_type_table/public/paginated_table/rows.js b/src/plugins/vis_type_table/public/paginated_table/rows.js
index d2192a5843644..d8f01a10c63fa 100644
--- a/src/plugins/vis_type_table/public/paginated_table/rows.js
+++ b/src/plugins/vis_type_table/public/paginated_table/rows.js
@@ -19,6 +19,7 @@
import $ from 'jquery';
import _ from 'lodash';
+import angular from 'angular';
import tableCellFilterHtml from './table_cell_filter.html';
export function KbnRows($compile) {
@@ -65,7 +66,9 @@ export function KbnRows($compile) {
if (column.filterable && contentsIsDefined) {
$cell = createFilterableCell(contents);
- $cellContent = $cell.find('[data-cell-content]');
+ // in jest tests 'angular' is using jqLite. In jqLite the method find lookups only by tags.
+ // Because of this, we should change a way how we get cell content so that tests will pass.
+ $cellContent = angular.element($cell[0].querySelector('[data-cell-content]'));
} else {
$cell = $cellContent = createCell();
}
diff --git a/src/plugins/vis_type_timeseries/common/metric_types.js b/src/plugins/vis_type_timeseries/common/metric_types.js
index 9dc6085b080e9..05836a6df410a 100644
--- a/src/plugins/vis_type_timeseries/common/metric_types.js
+++ b/src/plugins/vis_type_timeseries/common/metric_types.js
@@ -27,6 +27,9 @@ export const METRIC_TYPES = {
VARIANCE: 'variance',
SUM_OF_SQUARES: 'sum_of_squares',
CARDINALITY: 'cardinality',
+ VALUE_COUNT: 'value_count',
+ AVERAGE: 'avg',
+ SUM: 'sum',
};
export const EXTENDED_STATS_TYPES = [
diff --git a/src/plugins/vis_type_timeseries/public/application/components/aggs/agg_select.test.tsx b/src/plugins/vis_type_timeseries/public/application/components/aggs/agg_select.test.tsx
new file mode 100644
index 0000000000000..968fa5384e1d8
--- /dev/null
+++ b/src/plugins/vis_type_timeseries/public/application/components/aggs/agg_select.test.tsx
@@ -0,0 +1,184 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import React from 'react';
+import { mountWithIntl } from 'test_utils/enzyme_helpers';
+import { AggSelect } from './agg_select';
+import { METRIC, SERIES } from '../../../test_utils';
+import { EuiComboBox } from '@elastic/eui';
+
+describe('TSVB AggSelect', () => {
+ const setup = (panelType: string, value: string) => {
+ const metric = {
+ ...METRIC,
+ type: 'filter_ratio',
+ field: 'histogram_value',
+ };
+ const series = { ...SERIES, metrics: [metric] };
+
+ const wrapper = mountWithIntl(
+
+ );
+ return wrapper;
+ };
+
+ it('should only display filter ratio compattible aggs', () => {
+ const wrapper = setup('filter_ratio', 'avg');
+ expect(wrapper.find(EuiComboBox).props().options).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "label": "Average",
+ "value": "avg",
+ },
+ Object {
+ "label": "Cardinality",
+ "value": "cardinality",
+ },
+ Object {
+ "label": "Count",
+ "value": "count",
+ },
+ Object {
+ "label": "Positive Rate",
+ "value": "positive_rate",
+ },
+ Object {
+ "label": "Max",
+ "value": "max",
+ },
+ Object {
+ "label": "Min",
+ "value": "min",
+ },
+ Object {
+ "label": "Sum",
+ "value": "sum",
+ },
+ Object {
+ "label": "Value Count",
+ "value": "value_count",
+ },
+ ]
+ `);
+ });
+
+ it('should only display histogram compattible aggs', () => {
+ const wrapper = setup('histogram', 'avg');
+ expect(wrapper.find(EuiComboBox).props().options).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "label": "Average",
+ "value": "avg",
+ },
+ Object {
+ "label": "Count",
+ "value": "count",
+ },
+ Object {
+ "label": "Sum",
+ "value": "sum",
+ },
+ Object {
+ "label": "Value Count",
+ "value": "value_count",
+ },
+ ]
+ `);
+ });
+
+ it('should only display metrics compattible aggs', () => {
+ const wrapper = setup('metrics', 'avg');
+ expect(wrapper.find(EuiComboBox).props().options).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "label": "Average",
+ "value": "avg",
+ },
+ Object {
+ "label": "Cardinality",
+ "value": "cardinality",
+ },
+ Object {
+ "label": "Count",
+ "value": "count",
+ },
+ Object {
+ "label": "Filter Ratio",
+ "value": "filter_ratio",
+ },
+ Object {
+ "label": "Positive Rate",
+ "value": "positive_rate",
+ },
+ Object {
+ "label": "Max",
+ "value": "max",
+ },
+ Object {
+ "label": "Min",
+ "value": "min",
+ },
+ Object {
+ "label": "Percentile",
+ "value": "percentile",
+ },
+ Object {
+ "label": "Percentile Rank",
+ "value": "percentile_rank",
+ },
+ Object {
+ "label": "Static Value",
+ "value": "static",
+ },
+ Object {
+ "label": "Std. Deviation",
+ "value": "std_deviation",
+ },
+ Object {
+ "label": "Sum",
+ "value": "sum",
+ },
+ Object {
+ "label": "Sum of Squares",
+ "value": "sum_of_squares",
+ },
+ Object {
+ "label": "Top Hit",
+ "value": "top_hit",
+ },
+ Object {
+ "label": "Value Count",
+ "value": "value_count",
+ },
+ Object {
+ "label": "Variance",
+ "value": "variance",
+ },
+ ]
+ `);
+ });
+});
diff --git a/src/plugins/vis_type_timeseries/public/application/components/aggs/agg_select.tsx b/src/plugins/vis_type_timeseries/public/application/components/aggs/agg_select.tsx
index 6fa1a2adaa08e..7701d351e5478 100644
--- a/src/plugins/vis_type_timeseries/public/application/components/aggs/agg_select.tsx
+++ b/src/plugins/vis_type_timeseries/public/application/components/aggs/agg_select.tsx
@@ -225,6 +225,19 @@ const specialAggs: AggSelectOption[] = [
},
];
+const FILTER_RATIO_AGGS = [
+ 'avg',
+ 'cardinality',
+ 'count',
+ 'positive_rate',
+ 'max',
+ 'min',
+ 'sum',
+ 'value_count',
+];
+
+const HISTOGRAM_AGGS = ['avg', 'count', 'sum', 'value_count'];
+
const allAggOptions = [...metricAggs, ...pipelineAggs, ...siblingAggs, ...specialAggs];
function filterByPanelType(panelType: string) {
@@ -257,6 +270,10 @@ export function AggSelect(props: AggSelectUiProps) {
let options: EuiComboBoxOptionOption[];
if (panelType === 'metrics') {
options = metricAggs;
+ } else if (panelType === 'filter_ratio') {
+ options = metricAggs.filter((m) => FILTER_RATIO_AGGS.includes(`${m.value}`));
+ } else if (panelType === 'histogram') {
+ options = metricAggs.filter((m) => HISTOGRAM_AGGS.includes(`${m.value}`));
} else {
const disableSiblingAggs = (agg: AggSelectOption) => ({
...agg,
diff --git a/src/plugins/vis_type_timeseries/public/application/components/aggs/filter_ratio.js b/src/plugins/vis_type_timeseries/public/application/components/aggs/filter_ratio.js
index b5311e3832da4..2aa994c09a2ad 100644
--- a/src/plugins/vis_type_timeseries/public/application/components/aggs/filter_ratio.js
+++ b/src/plugins/vis_type_timeseries/public/application/components/aggs/filter_ratio.js
@@ -36,7 +36,15 @@ import {
} from '@elastic/eui';
import { FormattedMessage } from '@kbn/i18n/react';
import { KBN_FIELD_TYPES } from '../../../../../../plugins/data/public';
-import { METRIC_TYPES } from '../../../../../../plugins/vis_type_timeseries/common/metric_types';
+import { getSupportedFieldsByMetricType } from '../lib/get_supported_fields_by_metric_type';
+
+const isFieldHistogram = (fields, indexPattern, field) => {
+ const indexFields = fields[indexPattern];
+ if (!indexFields) return false;
+ const fieldObject = indexFields.find((f) => f.name === field);
+ if (!fieldObject) return false;
+ return fieldObject.type === KBN_FIELD_TYPES.HISTOGRAM;
+};
export const FilterRatioAgg = (props) => {
const { series, fields, panel } = props;
@@ -56,9 +64,6 @@ export const FilterRatioAgg = (props) => {
const model = { ...defaults, ...props.model };
const htmlId = htmlIdGenerator();
- const restrictFields =
- model.metric_agg === METRIC_TYPES.CARDINALITY ? [] : [KBN_FIELD_TYPES.NUMBER];
-
return (
{
@@ -149,7 +156,7 @@ export const FilterRatioAgg = (props) => {
{
+ const setup = (metric) => {
+ const series = { ...SERIES, metrics: [metric] };
+ const panel = { ...PANEL, series };
+
+ const wrapper = mountWithIntl(
+
+
+
+ );
+ return wrapper;
+ };
+
+ describe('histogram support', () => {
+ it('should only display histogram compattible aggs', () => {
+ const metric = {
+ ...METRIC,
+ metric_agg: 'avg',
+ field: 'histogram_value',
+ };
+ const wrapper = setup(metric);
+ expect(wrapper.find(EuiComboBox).at(1).props().options).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "label": "Average",
+ "value": "avg",
+ },
+ Object {
+ "label": "Count",
+ "value": "count",
+ },
+ Object {
+ "label": "Sum",
+ "value": "sum",
+ },
+ Object {
+ "label": "Value Count",
+ "value": "value_count",
+ },
+ ]
+ `);
+ });
+ const shouldNotHaveHistogramField = (agg) => {
+ it(`should not have histogram fields for ${agg}`, () => {
+ const metric = {
+ ...METRIC,
+ metric_agg: agg,
+ field: '',
+ };
+ const wrapper = setup(metric);
+ expect(wrapper.find(EuiComboBox).at(2).props().options).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "label": "number",
+ "options": Array [
+ Object {
+ "label": "system.cpu.user.pct",
+ "value": "system.cpu.user.pct",
+ },
+ ],
+ },
+ ]
+ `);
+ });
+ };
+ shouldNotHaveHistogramField('max');
+ shouldNotHaveHistogramField('min');
+ shouldNotHaveHistogramField('positive_rate');
+
+ it(`should not have histogram fields for cardinality`, () => {
+ const metric = {
+ ...METRIC,
+ metric_agg: 'cardinality',
+ field: '',
+ };
+ const wrapper = setup(metric);
+ expect(wrapper.find(EuiComboBox).at(2).props().options).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "label": "date",
+ "options": Array [
+ Object {
+ "label": "@timestamp",
+ "value": "@timestamp",
+ },
+ ],
+ },
+ Object {
+ "label": "number",
+ "options": Array [
+ Object {
+ "label": "system.cpu.user.pct",
+ "value": "system.cpu.user.pct",
+ },
+ ],
+ },
+ ]
+ `);
+ });
+ });
+});
diff --git a/src/plugins/vis_type_timeseries/public/application/components/aggs/histogram_support.test.js b/src/plugins/vis_type_timeseries/public/application/components/aggs/histogram_support.test.js
new file mode 100644
index 0000000000000..7af33ba11f247
--- /dev/null
+++ b/src/plugins/vis_type_timeseries/public/application/components/aggs/histogram_support.test.js
@@ -0,0 +1,94 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import React from 'react';
+import { mountWithIntl } from 'test_utils/enzyme_helpers';
+import { Agg } from './agg';
+import { FieldSelect } from './field_select';
+import { FIELDS, METRIC, SERIES, PANEL } from '../../../test_utils';
+const runTest = (aggType, name, test, additionalProps = {}) => {
+ describe(aggType, () => {
+ const metric = {
+ ...METRIC,
+ type: aggType,
+ field: 'histogram_value',
+ ...additionalProps,
+ };
+ const series = { ...SERIES, metrics: [metric] };
+ const panel = { ...PANEL, series };
+
+ it(name, () => {
+ const wrapper = mountWithIntl(
+
+ );
+ test(wrapper);
+ });
+ });
+};
+
+describe('Histogram Types', () => {
+ describe('supported', () => {
+ const shouldHaveHistogramSupport = (aggType, additionalProps = {}) => {
+ runTest(
+ aggType,
+ 'supports',
+ (wrapper) =>
+ expect(wrapper.find(FieldSelect).at(0).props().restrict).toContain('histogram'),
+ additionalProps
+ );
+ };
+ shouldHaveHistogramSupport('avg');
+ shouldHaveHistogramSupport('sum');
+ shouldHaveHistogramSupport('value_count');
+ shouldHaveHistogramSupport('percentile');
+ shouldHaveHistogramSupport('percentile_rank');
+ shouldHaveHistogramSupport('filter_ratio', { metric_agg: 'avg' });
+ });
+ describe('not supported', () => {
+ const shouldNotHaveHistogramSupport = (aggType, additionalProps = {}) => {
+ runTest(
+ aggType,
+ 'does not support',
+ (wrapper) =>
+ expect(wrapper.find(FieldSelect).at(0).props().restrict).not.toContain('histogram'),
+ additionalProps
+ );
+ };
+ shouldNotHaveHistogramSupport('cardinality');
+ shouldNotHaveHistogramSupport('max');
+ shouldNotHaveHistogramSupport('min');
+ shouldNotHaveHistogramSupport('variance');
+ shouldNotHaveHistogramSupport('sum_of_squares');
+ shouldNotHaveHistogramSupport('std_deviation');
+ shouldNotHaveHistogramSupport('positive_rate');
+ shouldNotHaveHistogramSupport('top_hit');
+ });
+});
diff --git a/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile.js b/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile.js
index 6a7bf1bffe83c..f12c0c8f6f465 100644
--- a/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile.js
+++ b/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile.js
@@ -36,7 +36,7 @@ import { FormattedMessage } from '@kbn/i18n/react';
import { KBN_FIELD_TYPES } from '../../../../../../plugins/data/public';
import { Percentiles, newPercentile } from './percentile_ui';
-const RESTRICT_FIELDS = [KBN_FIELD_TYPES.NUMBER];
+const RESTRICT_FIELDS = [KBN_FIELD_TYPES.NUMBER, KBN_FIELD_TYPES.HISTOGRAM];
const checkModel = (model) => Array.isArray(model.percentiles);
diff --git a/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/percentile_rank.tsx b/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/percentile_rank.tsx
index a16f5aeefc49c..d02a16ade2bba 100644
--- a/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/percentile_rank.tsx
+++ b/src/plugins/vis_type_timeseries/public/application/components/aggs/percentile_rank/percentile_rank.tsx
@@ -41,7 +41,7 @@ import { IFieldType, KBN_FIELD_TYPES } from '../../../../../../../plugins/data/p
import { MetricsItemsSchema, PanelSchema, SeriesItemsSchema } from '../../../../../common/types';
import { DragHandleProps } from '../../../../types';
-const RESTRICT_FIELDS = [KBN_FIELD_TYPES.NUMBER];
+const RESTRICT_FIELDS = [KBN_FIELD_TYPES.NUMBER, KBN_FIELD_TYPES.HISTOGRAM];
interface PercentileRankAggProps {
disableDelete: boolean;
diff --git a/src/plugins/vis_type_timeseries/public/application/components/aggs/positive_rate.js b/src/plugins/vis_type_timeseries/public/application/components/aggs/positive_rate.js
index 3ca89f7289d65..c20bcc1babc1d 100644
--- a/src/plugins/vis_type_timeseries/public/application/components/aggs/positive_rate.js
+++ b/src/plugins/vis_type_timeseries/public/application/components/aggs/positive_rate.js
@@ -123,7 +123,7 @@ export const PositiveRateAgg = (props) => {
t !== KBN_FIELD_TYPES.HISTOGRAM);
+ case METRIC_TYPES.VALUE_COUNT:
+ case METRIC_TYPES.AVERAGE:
+ case METRIC_TYPES.SUM:
+ return [KBN_FIELD_TYPES.NUMBER, KBN_FIELD_TYPES.HISTOGRAM];
+ default:
+ return [KBN_FIELD_TYPES.NUMBER];
+ }
+}
diff --git a/src/plugins/vis_type_timeseries/public/application/components/lib/get_supported_fields_by_metric_type.test.js b/src/plugins/vis_type_timeseries/public/application/components/lib/get_supported_fields_by_metric_type.test.js
new file mode 100644
index 0000000000000..3cd3fac191bf1
--- /dev/null
+++ b/src/plugins/vis_type_timeseries/public/application/components/lib/get_supported_fields_by_metric_type.test.js
@@ -0,0 +1,44 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { getSupportedFieldsByMetricType } from './get_supported_fields_by_metric_type';
+
+describe('getSupportedFieldsByMetricType', () => {
+ const shouldHaveHistogramAndNumbers = (type) =>
+ it(`should return numbers and histogram for ${type}`, () => {
+ expect(getSupportedFieldsByMetricType(type)).toEqual(['number', 'histogram']);
+ });
+ const shouldHaveOnlyNumbers = (type) =>
+ it(`should return only numbers for ${type}`, () => {
+ expect(getSupportedFieldsByMetricType(type)).toEqual(['number']);
+ });
+
+ shouldHaveHistogramAndNumbers('value_count');
+ shouldHaveHistogramAndNumbers('avg');
+ shouldHaveHistogramAndNumbers('sum');
+
+ shouldHaveOnlyNumbers('positive_rate');
+ shouldHaveOnlyNumbers('std_deviation');
+ shouldHaveOnlyNumbers('max');
+ shouldHaveOnlyNumbers('min');
+
+ it(`should return everything but histogram for cardinality`, () => {
+ expect(getSupportedFieldsByMetricType('cardinality')).not.toContain('histogram');
+ });
+});
diff --git a/src/plugins/vis_type_timeseries/public/test_utils/index.ts b/src/plugins/vis_type_timeseries/public/test_utils/index.ts
new file mode 100644
index 0000000000000..96ecc89b70c2d
--- /dev/null
+++ b/src/plugins/vis_type_timeseries/public/test_utils/index.ts
@@ -0,0 +1,50 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+export const UI_RESTRICTIONS = { '*': true };
+export const INDEX_PATTERN = 'some-pattern';
+export const FIELDS = {
+ [INDEX_PATTERN]: [
+ {
+ type: 'date',
+ name: '@timestamp',
+ },
+ {
+ type: 'number',
+ name: 'system.cpu.user.pct',
+ },
+ {
+ type: 'histogram',
+ name: 'histogram_value',
+ },
+ ],
+};
+export const METRIC = {
+ id: 'sample_metric',
+ type: 'avg',
+ field: 'system.cpu.user.pct',
+};
+export const SERIES = {
+ metrics: [METRIC],
+};
+export const PANEL = {
+ type: 'timeseries',
+ index_pattern: INDEX_PATTERN,
+ series: SERIES,
+};
diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/table/process_bucket.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/table/process_bucket.js
index 0f2a7e153bde0..909cee456c31f 100644
--- a/src/plugins/vis_type_timeseries/server/lib/vis_data/table/process_bucket.js
+++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/table/process_bucket.js
@@ -20,11 +20,20 @@
import { buildProcessorFunction } from '../build_processor_function';
import { processors } from '../response_processors/table';
import { getLastValue } from '../../../../common/get_last_value';
-import regression from 'regression';
import { first, get } from 'lodash';
import { overwrite } from '../helpers';
import { getActiveSeries } from '../helpers/get_active_series';
+function trendSinceLastBucket(data) {
+ if (data.length < 2) {
+ return 0;
+ }
+ const currentBucket = data[data.length - 1];
+ const prevBucket = data[data.length - 2];
+ const trend = (currentBucket[1] - prevBucket[1]) / currentBucket[1];
+ return Number.isNaN(trend) ? 0 : trend;
+}
+
export function processBucket(panel) {
return (bucket) => {
const series = getActiveSeries(panel).map((series) => {
@@ -38,14 +47,12 @@ export function processBucket(panel) {
};
overwrite(bucket, series.id, { meta, timeseries });
}
-
const processor = buildProcessorFunction(processors, bucket, panel, series);
const result = first(processor([]));
if (!result) return null;
const data = get(result, 'data', []);
- const linearRegression = regression.linear(data);
+ result.slope = trendSinceLastBucket(data);
result.last = getLastValue(data);
- result.slope = linearRegression.equation[0];
return result;
});
return { key: bucket.key, series };
diff --git a/src/plugins/vis_type_timeseries/server/lib/vis_data/table/process_bucket.test.js b/src/plugins/vis_type_timeseries/server/lib/vis_data/table/process_bucket.test.js
new file mode 100644
index 0000000000000..a4f9c71a5953d
--- /dev/null
+++ b/src/plugins/vis_type_timeseries/server/lib/vis_data/table/process_bucket.test.js
@@ -0,0 +1,159 @@
+/*
+ * Licensed to Elasticsearch B.V. under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch B.V. licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { processBucket } from './process_bucket';
+
+function createValueObject(key, value, seriesId) {
+ return { key_as_string: `${key}`, doc_count: value, key, [seriesId]: { value } };
+}
+
+function createBucketsObjects(size, sort, seriesId) {
+ const values = Array(size)
+ .fill(1)
+ .map((_, i) => i + 1);
+ if (sort === 'flat') {
+ return values.map((_, i) => createValueObject(i, 1, seriesId));
+ }
+ if (sort === 'desc') {
+ return values.reverse().map((v, i) => createValueObject(i, v, seriesId));
+ }
+ return values.map((v, i) => createValueObject(i, v, seriesId));
+}
+
+function createPanel(series) {
+ return {
+ type: 'table',
+ time_field: '',
+ series: series.map((seriesId) => ({
+ id: seriesId,
+ metrics: [{ id: seriesId, type: 'count' }],
+ trend_arrows: 1,
+ })),
+ };
+}
+
+function createBuckets(series) {
+ return [
+ { key: 'A', trend: 'asc', size: 10 },
+ { key: 'B', trend: 'desc', size: 10 },
+ { key: 'C', trend: 'flat', size: 10 },
+ { key: 'D', trend: 'asc', size: 1, expectedTrend: 'flat' },
+ ].map(({ key, trend, size, expectedTrend }) => {
+ const baseObj = {
+ key,
+ expectedTrend: expectedTrend || trend,
+ };
+ for (const seriesId of series) {
+ baseObj[seriesId] = {
+ meta: {
+ timeField: 'timestamp',
+ seriesId: seriesId,
+ },
+ buckets: createBucketsObjects(size, trend, seriesId),
+ };
+ }
+ return baseObj;
+ });
+}
+
+function trendChecker(trend, slope) {
+ switch (trend) {
+ case 'asc':
+ return slope > 0;
+ case 'desc':
+ return slope <= 0;
+ case 'flat':
+ return slope === 0;
+ default:
+ throw Error(`Slope value ${slope} not valid for trend "${trend}"`);
+ }
+}
+
+describe('processBucket(panel)', () => {
+ describe('single metric panel', () => {
+ let panel;
+ const SERIES_ID = 'series-id';
+
+ beforeEach(() => {
+ panel = createPanel([SERIES_ID]);
+ });
+
+ test('return the correct trend direction', () => {
+ const bucketProcessor = processBucket(panel);
+ const buckets = createBuckets([SERIES_ID]);
+ for (const bucket of buckets) {
+ const result = bucketProcessor(bucket);
+ expect(result.key).toEqual(bucket.key);
+ expect(trendChecker(bucket.expectedTrend, result.series[0].slope)).toBeTruthy();
+ }
+ });
+
+ test('properly handle 0 values for trend', () => {
+ const bucketProcessor = processBucket(panel);
+ const bucketforNaNResult = {
+ key: 'NaNScenario',
+ expectedTrend: 'flat',
+ [SERIES_ID]: {
+ meta: {
+ timeField: 'timestamp',
+ seriesId: SERIES_ID,
+ },
+ buckets: [
+ // this is a flat case, but 0/0 has not a valid number result
+ createValueObject(0, 0, SERIES_ID),
+ createValueObject(1, 0, SERIES_ID),
+ ],
+ },
+ };
+ const result = bucketProcessor(bucketforNaNResult);
+ expect(result.key).toEqual(bucketforNaNResult.key);
+ expect(trendChecker(bucketforNaNResult.expectedTrend, result.series[0].slope)).toEqual(true);
+ });
+
+ test('have the side effect to create the timeseries property if missing on bucket', () => {
+ const bucketProcessor = processBucket(panel);
+ const buckets = createBuckets([SERIES_ID]);
+ for (const bucket of buckets) {
+ bucketProcessor(bucket);
+ expect(bucket[SERIES_ID].buckets).toBeUndefined();
+ expect(bucket[SERIES_ID].timeseries).toBeDefined();
+ }
+ });
+ });
+
+ describe('multiple metrics panel', () => {
+ let panel;
+ const SERIES = ['series-id-1', 'series-id-2'];
+
+ beforeEach(() => {
+ panel = createPanel(SERIES);
+ });
+
+ test('return the correct trend direction', () => {
+ const bucketProcessor = processBucket(panel);
+ const buckets = createBuckets(SERIES);
+ for (const bucket of buckets) {
+ const result = bucketProcessor(bucket);
+ expect(result.key).toEqual(bucket.key);
+ expect(trendChecker(bucket.expectedTrend, result.series[0].slope)).toBeTruthy();
+ expect(trendChecker(bucket.expectedTrend, result.series[1].slope)).toBeTruthy();
+ }
+ });
+ });
+});
diff --git a/test/functional/apps/discover/_discover.js b/test/functional/apps/discover/_discover.js
index 47741c1ab8a0d..94a271987ecdf 100644
--- a/test/functional/apps/discover/_discover.js
+++ b/test/functional/apps/discover/_discover.js
@@ -254,6 +254,19 @@ export default function ({ getService, getPageObjects }) {
});
});
+ describe('invalid time range in URL', function () {
+ it('should get the default timerange', async function () {
+ const prevTime = await PageObjects.timePicker.getTimeConfig();
+ await PageObjects.common.navigateToUrl('discover', '#/?_g=(time:(from:now-15m,to:null))', {
+ useActualUrl: true,
+ });
+ await PageObjects.header.awaitKibanaChrome();
+ const time = await PageObjects.timePicker.getTimeConfig();
+ expect(time.start).to.be(prevTime.start);
+ expect(time.end).to.be(prevTime.end);
+ });
+ });
+
describe('empty query', function () {
it('should update the histogram timerange when the query is resubmitted', async function () {
await kibanaServer.uiSettings.update({
@@ -268,17 +281,6 @@ export default function ({ getService, getPageObjects }) {
});
});
- describe('invalid time range in URL', function () {
- it('should display a "Invalid time range toast"', async function () {
- await PageObjects.common.navigateToUrl('discover', '#/?_g=(time:(from:now-15m,to:null))', {
- useActualUrl: true,
- });
- await PageObjects.header.awaitKibanaChrome();
- const toastMessage = await PageObjects.common.closeToast();
- expect(toastMessage).to.be('Invalid time range');
- });
- });
-
describe('managing fields', function () {
it('should add a field, sort by it, remove it and also sorting by it', async function () {
await PageObjects.timePicker.setDefaultAbsoluteRangeViaUiSettings();
diff --git a/test/functional/apps/saved_objects_management/edit_saved_object.ts b/test/functional/apps/saved_objects_management/edit_saved_object.ts
index 2c9200c2f8d93..0e2ff44ff62ef 100644
--- a/test/functional/apps/saved_objects_management/edit_saved_object.ts
+++ b/test/functional/apps/saved_objects_management/edit_saved_object.ts
@@ -66,6 +66,7 @@ export default function ({ getPageObjects, getService }: FtrProviderContext) {
await button.click();
};
+ // Flaky: https://github.com/elastic/kibana/issues/68400
describe('saved objects edition page', () => {
beforeEach(async () => {
await esArchiver.load('saved_objects_management/edit_saved_object');
diff --git a/test/functional/services/listing_table.ts b/test/functional/services/listing_table.ts
index 9a117458c7f76..fa42eb60fa410 100644
--- a/test/functional/services/listing_table.ts
+++ b/test/functional/services/listing_table.ts
@@ -179,9 +179,12 @@ export function ListingTableProvider({ getService, getPageObjects }: FtrProvider
* @param promptBtnTestSubj testSubj locator for Prompt button
*/
public async clickNewButton(promptBtnTestSubj: string): Promise {
- await retry.try(async () => {
+ await retry.tryForTime(20000, async () => {
// newItemButton button is only visible when there are items in the listing table is displayed.
- if (await testSubjects.exists('newItemButton')) {
+ const isnNewItemButtonPresent = await testSubjects.exists('newItemButton', {
+ timeout: 5000,
+ });
+ if (isnNewItemButtonPresent) {
await testSubjects.click('newItemButton');
} else {
// no items exist, click createPromptButton to create new dashboard/visualization
diff --git a/x-pack/.gitignore b/x-pack/.gitignore
index e181caf2b1a49..0c916ef0e9b91 100644
--- a/x-pack/.gitignore
+++ b/x-pack/.gitignore
@@ -6,6 +6,7 @@
/test/page_load_metrics/screenshots
/test/functional/apps/reporting/reports/session
/test/reporting/configs/failure_debug/
+/plugins/reporting/.chromium/
/legacy/plugins/reporting/.chromium/
/legacy/plugins/reporting/.phantom/
/plugins/reporting/chromium/
diff --git a/x-pack/plugins/apm/common/environment_filter_values.ts b/x-pack/plugins/apm/common/environment_filter_values.ts
index 239378d0ea94a..38b6f480ca3d3 100644
--- a/x-pack/plugins/apm/common/environment_filter_values.ts
+++ b/x-pack/plugins/apm/common/environment_filter_values.ts
@@ -4,5 +4,16 @@
* you may not use this file except in compliance with the Elastic License.
*/
+import { i18n } from '@kbn/i18n';
+
export const ENVIRONMENT_ALL = 'ENVIRONMENT_ALL';
export const ENVIRONMENT_NOT_DEFINED = 'ENVIRONMENT_NOT_DEFINED';
+
+export function getEnvironmentLabel(environment: string) {
+ if (environment === ENVIRONMENT_NOT_DEFINED) {
+ return i18n.translate('xpack.apm.filter.environment.notDefinedLabel', {
+ defaultMessage: 'Not defined',
+ });
+ }
+ return environment;
+}
diff --git a/x-pack/plugins/apm/public/components/app/Home/index.tsx b/x-pack/plugins/apm/public/components/app/Home/index.tsx
index f612ac0d383ef..bcc834fef6a6a 100644
--- a/x-pack/plugins/apm/public/components/app/Home/index.tsx
+++ b/x-pack/plugins/apm/public/components/app/Home/index.tsx
@@ -20,6 +20,7 @@ import { EuiTabLink } from '../../shared/EuiTabLink';
import { ServiceMapLink } from '../../shared/Links/apm/ServiceMapLink';
import { ServiceOverviewLink } from '../../shared/Links/apm/ServiceOverviewLink';
import { SettingsLink } from '../../shared/Links/apm/SettingsLink';
+import { AnomalyDetectionSetupLink } from '../../shared/Links/apm/AnomalyDetectionSetupLink';
import { TraceOverviewLink } from '../../shared/Links/apm/TraceOverviewLink';
import { SetupInstructionsLink } from '../../shared/Links/SetupInstructionsLink';
import { ServiceMap } from '../ServiceMap';
@@ -118,6 +119,9 @@ export function Home({ tab }: Props) {
+
+
+
diff --git a/x-pack/plugins/apm/public/components/app/Settings/anomaly_detection/add_environments.tsx b/x-pack/plugins/apm/public/components/app/Settings/anomaly_detection/add_environments.tsx
index 2da3c12563104..4c056d48f4b14 100644
--- a/x-pack/plugins/apm/public/components/app/Settings/anomaly_detection/add_environments.tsx
+++ b/x-pack/plugins/apm/public/components/app/Settings/anomaly_detection/add_environments.tsx
@@ -22,7 +22,7 @@ import { i18n } from '@kbn/i18n';
import { useFetcher, FETCH_STATUS } from '../../../../hooks/useFetcher';
import { useApmPluginContext } from '../../../../hooks/useApmPluginContext';
import { createJobs } from './create_jobs';
-import { ENVIRONMENT_NOT_DEFINED } from '../../../../../common/environment_filter_values';
+import { getEnvironmentLabel } from '../../../../../common/environment_filter_values';
interface Props {
currentEnvironments: string[];
@@ -45,11 +45,13 @@ export const AddEnvironments = ({
);
const environmentOptions = data.map((env) => ({
- label: env === ENVIRONMENT_NOT_DEFINED ? NOT_DEFINED_OPTION_LABEL : env,
+ label: getEnvironmentLabel(env),
value: env,
disabled: currentEnvironments.includes(env),
}));
+ const [isSaving, setIsSaving] = useState(false);
+
const [selectedOptions, setSelected] = useState<
Array>
>([]);
@@ -127,9 +129,12 @@ export const AddEnvironments = ({
{
+ setIsSaving(true);
+
const selectedEnvironments = selectedOptions.map(
({ value }) => value as string
);
@@ -140,6 +145,7 @@ export const AddEnvironments = ({
if (success) {
onCreateJobSuccess();
}
+ setIsSaving(false);
}}
>
{i18n.translate(
@@ -155,10 +161,3 @@ export const AddEnvironments = ({
);
};
-
-const NOT_DEFINED_OPTION_LABEL = i18n.translate(
- 'xpack.apm.filter.environment.notDefinedLabel',
- {
- defaultMessage: 'Not defined',
- }
-);
diff --git a/x-pack/plugins/apm/public/components/app/Settings/anomaly_detection/index.tsx b/x-pack/plugins/apm/public/components/app/Settings/anomaly_detection/index.tsx
index 6f985d06dba9d..f02350fafbabb 100644
--- a/x-pack/plugins/apm/public/components/app/Settings/anomaly_detection/index.tsx
+++ b/x-pack/plugins/apm/public/components/app/Settings/anomaly_detection/index.tsx
@@ -15,7 +15,11 @@ import { LicensePrompt } from '../../../shared/LicensePrompt';
import { useLicense } from '../../../../hooks/useLicense';
import { APIReturnType } from '../../../../services/rest/createCallApmApi';
-const DEFAULT_VALUE: APIReturnType<'/api/apm/settings/anomaly-detection'> = {
+export type AnomalyDetectionApiResponse = APIReturnType<
+ '/api/apm/settings/anomaly-detection'
+>;
+
+const DEFAULT_VALUE: AnomalyDetectionApiResponse = {
jobs: [],
hasLegacyJobs: false,
};
@@ -80,7 +84,7 @@ export const AnomalyDetection = () => {
) : (
{
setViewAddEnvironments(true);
diff --git a/x-pack/plugins/apm/public/components/app/Settings/anomaly_detection/jobs_list.tsx b/x-pack/plugins/apm/public/components/app/Settings/anomaly_detection/jobs_list.tsx
index 83d19aa27ac11..5954b82f3b9e7 100644
--- a/x-pack/plugins/apm/public/components/app/Settings/anomaly_detection/jobs_list.tsx
+++ b/x-pack/plugins/apm/public/components/app/Settings/anomaly_detection/jobs_list.tsx
@@ -19,27 +19,22 @@ import { FormattedMessage } from '@kbn/i18n/react';
import { FETCH_STATUS } from '../../../../hooks/useFetcher';
import { ITableColumn, ManagedTable } from '../../../shared/ManagedTable';
import { LoadingStatePrompt } from '../../../shared/LoadingStatePrompt';
-import { AnomalyDetectionJobByEnv } from '../../../../../typings/anomaly_detection';
import { MLJobLink } from '../../../shared/Links/MachineLearningLinks/MLJobLink';
import { MLLink } from '../../../shared/Links/MachineLearningLinks/MLLink';
-import { ENVIRONMENT_NOT_DEFINED } from '../../../../../common/environment_filter_values';
+import { getEnvironmentLabel } from '../../../../../common/environment_filter_values';
import { LegacyJobsCallout } from './legacy_jobs_callout';
+import { AnomalyDetectionApiResponse } from './index';
-const columns: Array> = [
+type Jobs = AnomalyDetectionApiResponse['jobs'];
+
+const columns: Array> = [
{
field: 'environment',
name: i18n.translate(
'xpack.apm.settings.anomalyDetection.jobList.environmentColumnLabel',
{ defaultMessage: 'Environment' }
),
- render: (environment: string) => {
- if (environment === ENVIRONMENT_NOT_DEFINED) {
- return i18n.translate('xpack.apm.filter.environment.notDefinedLabel', {
- defaultMessage: 'Not defined',
- });
- }
- return environment;
- },
+ render: getEnvironmentLabel,
},
{
field: 'job_id',
@@ -64,13 +59,13 @@ const columns: Array> = [
interface Props {
status: FETCH_STATUS;
onAddEnvironments: () => void;
- anomalyDetectionJobsByEnv: AnomalyDetectionJobByEnv[];
+ jobs: Jobs;
hasLegacyJobs: boolean;
}
export const JobsList = ({
status,
onAddEnvironments,
- anomalyDetectionJobsByEnv,
+ jobs,
hasLegacyJobs,
}: Props) => {
const isLoading =
@@ -135,7 +130,7 @@ export const JobsList = ({
)
}
columns={columns}
- items={isLoading || hasFetchFailure ? [] : anomalyDetectionJobsByEnv}
+ items={jobs}
/>
diff --git a/x-pack/plugins/apm/public/components/app/TransactionDetails/index.tsx b/x-pack/plugins/apm/public/components/app/TransactionDetails/index.tsx
index 620ae6708eda0..c56b7b9aaa720 100644
--- a/x-pack/plugins/apm/public/components/app/TransactionDetails/index.tsx
+++ b/x-pack/plugins/apm/public/components/app/TransactionDetails/index.tsx
@@ -89,7 +89,6 @@ export function TransactionDetails() {
+ callApmApi({ pathname: `/api/apm/settings/anomaly-detection` }),
+ [],
+ { preservePreviousData: false }
+ );
+ const isFetchSuccess = status === FETCH_STATUS.SUCCESS;
+
+ // Show alert if there are no jobs OR if no job matches the current environment
+ const showAlert =
+ isFetchSuccess && !data.jobs.some((job) => environment === job.environment);
+
+ return (
+
+
+ {ANOMALY_DETECTION_LINK_LABEL}
+
+ {showAlert && (
+
+
+
+ )}
+
+ );
+}
+
+function getTooltipText(environment?: string) {
+ if (!environment) {
+ return i18n.translate('xpack.apm.anomalyDetectionSetup.notEnabledText', {
+ defaultMessage: `Anomaly detection is not yet enabled. Click to continue setup.`,
+ });
+ }
+
+ return i18n.translate(
+ 'xpack.apm.anomalyDetectionSetup.notEnabledForEnvironmentText',
+ {
+ defaultMessage: `Anomaly detection is not yet enabled for the "{currentEnvironment}" environment. Click to continue setup.`,
+ values: { currentEnvironment: getEnvironmentLabel(environment) },
+ }
+ );
+}
+
+const ANOMALY_DETECTION_LINK_LABEL = i18n.translate(
+ 'xpack.apm.anomalyDetectionSetup.linkLabel',
+ { defaultMessage: `Anomaly detection` }
+);
diff --git a/x-pack/plugins/apm/public/components/shared/charts/TransactionCharts/index.tsx b/x-pack/plugins/apm/public/components/shared/charts/TransactionCharts/index.tsx
index 00ff6f9969725..1f80dbf5f4d95 100644
--- a/x-pack/plugins/apm/public/components/shared/charts/TransactionCharts/index.tsx
+++ b/x-pack/plugins/apm/public/components/shared/charts/TransactionCharts/index.tsx
@@ -42,7 +42,6 @@ import {
} from '../../../../../common/transaction_types';
interface TransactionChartProps {
- hasMLJob: boolean;
charts: ITransactionChartData;
location: Location;
urlParams: IUrlParams;
@@ -96,18 +95,17 @@ export class TransactionCharts extends Component {
};
public renderMLHeader(hasValidMlLicense: boolean | undefined) {
- const { hasMLJob } = this.props;
- if (!hasValidMlLicense || !hasMLJob) {
+ const { mlJobId } = this.props.charts;
+
+ if (!hasValidMlLicense || !mlJobId) {
return null;
}
- const { serviceName, kuery } = this.props.urlParams;
+ const { serviceName, kuery, transactionType } = this.props.urlParams;
if (!serviceName) {
return null;
}
- const linkedJobId = ''; // TODO [APM ML] link to ML job id for the selected environment
-
const hasKuery = !isEmpty(kuery);
const icon = hasKuery ? (
{
}
)}{' '}
- View Job
+
+ View Job
+
);
diff --git a/x-pack/plugins/apm/public/selectors/chartSelectors.ts b/x-pack/plugins/apm/public/selectors/chartSelectors.ts
index 714d62a703f51..26c2365ed77e1 100644
--- a/x-pack/plugins/apm/public/selectors/chartSelectors.ts
+++ b/x-pack/plugins/apm/public/selectors/chartSelectors.ts
@@ -33,6 +33,7 @@ export interface ITpmBucket {
export interface ITransactionChartData {
tpmSeries: ITpmBucket[];
responseTimeSeries: TimeSeries[];
+ mlJobId: string | undefined;
}
const INITIAL_DATA = {
@@ -62,6 +63,7 @@ export function getTransactionCharts(
return {
tpmSeries,
responseTimeSeries,
+ mlJobId: anomalyTimeseries?.jobId,
};
}
diff --git a/x-pack/plugins/apm/server/lib/anomaly_detection/create_anomaly_detection_jobs.ts b/x-pack/plugins/apm/server/lib/anomaly_detection/create_anomaly_detection_jobs.ts
index e723393a24013..c387c5152b1c5 100644
--- a/x-pack/plugins/apm/server/lib/anomaly_detection/create_anomaly_detection_jobs.ts
+++ b/x-pack/plugins/apm/server/lib/anomaly_detection/create_anomaly_detection_jobs.ts
@@ -10,12 +10,11 @@ import { snakeCase } from 'lodash';
import { PromiseReturnType } from '../../../../observability/typings/common';
import { Setup } from '../helpers/setup_request';
import {
- SERVICE_ENVIRONMENT,
TRANSACTION_DURATION,
PROCESSOR_EVENT,
} from '../../../common/elasticsearch_fieldnames';
-import { ENVIRONMENT_NOT_DEFINED } from '../../../common/environment_filter_values';
import { APM_ML_JOB_GROUP, ML_MODULE_ID_APM_TRANSACTION } from './constants';
+import { getEnvironmentUiFilterES } from '../helpers/convert_ui_filters/get_environment_ui_filter_es';
export type CreateAnomalyDetectionJobsAPIResponse = PromiseReturnType<
typeof createAnomalyDetectionJobs
@@ -89,9 +88,7 @@ async function createAnomalyDetectionJob({
filter: [
{ term: { [PROCESSOR_EVENT]: 'transaction' } },
{ exists: { field: TRANSACTION_DURATION } },
- environment === ENVIRONMENT_NOT_DEFINED
- ? ENVIRONMENT_NOT_DEFINED_FILTER
- : { term: { [SERVICE_ENVIRONMENT]: environment } },
+ ...getEnvironmentUiFilterES(environment),
],
},
},
@@ -109,13 +106,3 @@ async function createAnomalyDetectionJob({
],
});
}
-
-const ENVIRONMENT_NOT_DEFINED_FILTER = {
- bool: {
- must_not: {
- exists: {
- field: SERVICE_ENVIRONMENT,
- },
- },
- },
-};
diff --git a/x-pack/plugins/apm/server/lib/anomaly_detection/get_anomaly_detection_jobs.ts b/x-pack/plugins/apm/server/lib/anomaly_detection/get_anomaly_detection_jobs.ts
index 8fdebeb597eaf..13b30f159eed1 100644
--- a/x-pack/plugins/apm/server/lib/anomaly_detection/get_anomaly_detection_jobs.ts
+++ b/x-pack/plugins/apm/server/lib/anomaly_detection/get_anomaly_detection_jobs.ts
@@ -6,7 +6,7 @@
import { Logger } from 'kibana/server';
import { Setup } from '../helpers/setup_request';
-import { getMlJobsWithAPMGroup } from './get_ml_jobs_by_group';
+import { getMlJobsWithAPMGroup } from './get_ml_jobs_with_apm_group';
export async function getAnomalyDetectionJobs(setup: Setup, logger: Logger) {
const { ml } = setup;
diff --git a/x-pack/plugins/apm/server/lib/anomaly_detection/get_ml_jobs_by_group.ts b/x-pack/plugins/apm/server/lib/anomaly_detection/get_ml_jobs_with_apm_group.ts
similarity index 100%
rename from x-pack/plugins/apm/server/lib/anomaly_detection/get_ml_jobs_by_group.ts
rename to x-pack/plugins/apm/server/lib/anomaly_detection/get_ml_jobs_with_apm_group.ts
diff --git a/x-pack/plugins/apm/server/lib/anomaly_detection/has_legacy_jobs.ts b/x-pack/plugins/apm/server/lib/anomaly_detection/has_legacy_jobs.ts
index bf502607fcc1d..999d28309121a 100644
--- a/x-pack/plugins/apm/server/lib/anomaly_detection/has_legacy_jobs.ts
+++ b/x-pack/plugins/apm/server/lib/anomaly_detection/has_legacy_jobs.ts
@@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { Setup } from '../helpers/setup_request';
-import { getMlJobsWithAPMGroup } from './get_ml_jobs_by_group';
+import { getMlJobsWithAPMGroup } from './get_ml_jobs_with_apm_group';
// Determine whether there are any legacy ml jobs.
// A legacy ML job has a job id that ends with "high_mean_response_time" and created_by=ml-module-apm-transaction
diff --git a/x-pack/plugins/apm/server/lib/helpers/convert_ui_filters/__test__/get_environment_ui_filter_es.test.ts b/x-pack/plugins/apm/server/lib/helpers/convert_ui_filters/__test__/get_environment_ui_filter_es.test.ts
index 0f0a11a868d6d..800f809727eb6 100644
--- a/x-pack/plugins/apm/server/lib/helpers/convert_ui_filters/__test__/get_environment_ui_filter_es.test.ts
+++ b/x-pack/plugins/apm/server/lib/helpers/convert_ui_filters/__test__/get_environment_ui_filter_es.test.ts
@@ -7,24 +7,23 @@
import { getEnvironmentUiFilterES } from '../get_environment_ui_filter_es';
import { ENVIRONMENT_NOT_DEFINED } from '../../../../../common/environment_filter_values';
import { SERVICE_ENVIRONMENT } from '../../../../../common/elasticsearch_fieldnames';
-import { ESFilter } from '../../../../../typings/elasticsearch';
describe('getEnvironmentUiFilterES', () => {
- it('should return undefined, when environment is undefined', () => {
+ it('should return empty array, when environment is undefined', () => {
const uiFilterES = getEnvironmentUiFilterES();
- expect(uiFilterES).toBeUndefined();
+ expect(uiFilterES).toHaveLength(0);
});
it('should create a filter for a service environment', () => {
- const uiFilterES = getEnvironmentUiFilterES('test') as ESFilter;
- expect(uiFilterES).toHaveProperty(['term', SERVICE_ENVIRONMENT], 'test');
+ const uiFilterES = getEnvironmentUiFilterES('test');
+ expect(uiFilterES).toHaveLength(1);
+ expect(uiFilterES[0]).toHaveProperty(['term', SERVICE_ENVIRONMENT], 'test');
});
it('should create a filter for missing service environments', () => {
- const uiFilterES = getEnvironmentUiFilterES(
- ENVIRONMENT_NOT_DEFINED
- ) as ESFilter;
- expect(uiFilterES).toHaveProperty(
+ const uiFilterES = getEnvironmentUiFilterES(ENVIRONMENT_NOT_DEFINED);
+ expect(uiFilterES).toHaveLength(1);
+ expect(uiFilterES[0]).toHaveProperty(
['bool', 'must_not', 'exists', 'field'],
SERVICE_ENVIRONMENT
);
diff --git a/x-pack/plugins/apm/server/lib/helpers/convert_ui_filters/get_environment_ui_filter_es.ts b/x-pack/plugins/apm/server/lib/helpers/convert_ui_filters/get_environment_ui_filter_es.ts
index 63d222a7fcb6e..87bc8dc968373 100644
--- a/x-pack/plugins/apm/server/lib/helpers/convert_ui_filters/get_environment_ui_filter_es.ts
+++ b/x-pack/plugins/apm/server/lib/helpers/convert_ui_filters/get_environment_ui_filter_es.ts
@@ -8,19 +8,12 @@ import { ESFilter } from '../../../../typings/elasticsearch';
import { ENVIRONMENT_NOT_DEFINED } from '../../../../common/environment_filter_values';
import { SERVICE_ENVIRONMENT } from '../../../../common/elasticsearch_fieldnames';
-export function getEnvironmentUiFilterES(
- environment?: string
-): ESFilter | undefined {
+export function getEnvironmentUiFilterES(environment?: string): ESFilter[] {
if (!environment) {
- return undefined;
+ return [];
}
-
if (environment === ENVIRONMENT_NOT_DEFINED) {
- return {
- bool: { must_not: { exists: { field: SERVICE_ENVIRONMENT } } },
- };
+ return [{ bool: { must_not: { exists: { field: SERVICE_ENVIRONMENT } } } }];
}
- return {
- term: { [SERVICE_ENVIRONMENT]: environment },
- };
+ return [{ term: { [SERVICE_ENVIRONMENT]: environment } }];
}
diff --git a/x-pack/plugins/apm/server/lib/helpers/convert_ui_filters/get_ui_filters_es.ts b/x-pack/plugins/apm/server/lib/helpers/convert_ui_filters/get_ui_filters_es.ts
index b34d5535d58cc..c1405b44f2a8a 100644
--- a/x-pack/plugins/apm/server/lib/helpers/convert_ui_filters/get_ui_filters_es.ts
+++ b/x-pack/plugins/apm/server/lib/helpers/convert_ui_filters/get_ui_filters_es.ts
@@ -27,22 +27,19 @@ export function getUiFiltersES(uiFilters: UIFilters) {
};
}) as ESFilter[];
- // remove undefined items from list
const esFilters = [
- getKueryUiFilterES(uiFilters.kuery),
- getEnvironmentUiFilterES(uiFilters.environment),
- ]
- .filter((filter) => !!filter)
- .concat(mappedFilters) as ESFilter[];
+ ...getKueryUiFilterES(uiFilters.kuery),
+ ...getEnvironmentUiFilterES(uiFilters.environment),
+ ].concat(mappedFilters) as ESFilter[];
return esFilters;
}
function getKueryUiFilterES(kuery?: string) {
if (!kuery) {
- return;
+ return [];
}
const ast = esKuery.fromKueryExpression(kuery);
- return esKuery.toElasticsearchQuery(ast) as ESFilter;
+ return [esKuery.toElasticsearchQuery(ast) as ESFilter];
}
diff --git a/x-pack/plugins/apm/server/lib/service_map/get_service_map_service_node_info.ts b/x-pack/plugins/apm/server/lib/service_map/get_service_map_service_node_info.ts
index be92bfe5a0099..dd5d19b620c51 100644
--- a/x-pack/plugins/apm/server/lib/service_map/get_service_map_service_node_info.ts
+++ b/x-pack/plugins/apm/server/lib/service_map/get_service_map_service_node_info.ts
@@ -9,7 +9,6 @@ import { ESFilter } from '../../../typings/elasticsearch';
import { rangeFilter } from '../../../common/utils/range_filter';
import {
PROCESSOR_EVENT,
- SERVICE_ENVIRONMENT,
SERVICE_NAME,
TRANSACTION_DURATION,
TRANSACTION_TYPE,
@@ -22,7 +21,7 @@ import {
TRANSACTION_REQUEST,
TRANSACTION_PAGE_LOAD,
} from '../../../common/transaction_types';
-import { ENVIRONMENT_NOT_DEFINED } from '../../../common/environment_filter_values';
+import { getEnvironmentUiFilterES } from '../helpers/convert_ui_filters/get_environment_ui_filter_es';
interface Options {
setup: Setup & SetupTimeRange;
@@ -43,30 +42,14 @@ export async function getServiceMapServiceNodeInfo({
}: Options & { serviceName: string; environment?: string }) {
const { start, end } = setup;
- const environmentNotDefinedFilter = {
- bool: { must_not: [{ exists: { field: SERVICE_ENVIRONMENT } }] },
- };
-
const filter: ESFilter[] = [
{ range: rangeFilter(start, end) },
{ term: { [SERVICE_NAME]: serviceName } },
+ ...getEnvironmentUiFilterES(environment),
];
- if (environment) {
- filter.push(
- environment === ENVIRONMENT_NOT_DEFINED
- ? environmentNotDefinedFilter
- : { term: { [SERVICE_ENVIRONMENT]: environment } }
- );
- }
-
const minutes = Math.abs((end - start) / (1000 * 60));
-
- const taskParams = {
- setup,
- minutes,
- filter,
- };
+ const taskParams = { setup, minutes, filter };
const [
errorMetrics,
@@ -97,11 +80,7 @@ async function getErrorMetrics({ setup, minutes, filter }: TaskParameters) {
size: 0,
query: {
bool: {
- filter: filter.concat({
- term: {
- [PROCESSOR_EVENT]: 'error',
- },
- }),
+ filter: filter.concat({ term: { [PROCESSOR_EVENT]: 'error' } }),
},
},
track_total_hits: true,
@@ -134,11 +113,7 @@ async function getTransactionStats({
bool: {
filter: [
...filter,
- {
- term: {
- [PROCESSOR_EVENT]: 'transaction',
- },
- },
+ { term: { [PROCESSOR_EVENT]: 'transaction' } },
{
terms: {
[TRANSACTION_TYPE]: [
@@ -151,13 +126,7 @@ async function getTransactionStats({
},
},
track_total_hits: true,
- aggs: {
- duration: {
- avg: {
- field: TRANSACTION_DURATION,
- },
- },
- },
+ aggs: { duration: { avg: { field: TRANSACTION_DURATION } } },
},
};
const response = await client.search(params);
@@ -181,32 +150,16 @@ async function getCpuMetrics({
query: {
bool: {
filter: filter.concat([
- {
- term: {
- [PROCESSOR_EVENT]: 'metric',
- },
- },
- {
- exists: {
- field: METRIC_SYSTEM_CPU_PERCENT,
- },
- },
+ { term: { [PROCESSOR_EVENT]: 'metric' } },
+ { exists: { field: METRIC_SYSTEM_CPU_PERCENT } },
]),
},
},
- aggs: {
- avgCpuUsage: {
- avg: {
- field: METRIC_SYSTEM_CPU_PERCENT,
- },
- },
- },
+ aggs: { avgCpuUsage: { avg: { field: METRIC_SYSTEM_CPU_PERCENT } } },
},
});
- return {
- avgCpuUsage: response.aggregations?.avgCpuUsage.value ?? null,
- };
+ return { avgCpuUsage: response.aggregations?.avgCpuUsage.value ?? null };
}
async function getMemoryMetrics({
@@ -220,31 +173,13 @@ async function getMemoryMetrics({
query: {
bool: {
filter: filter.concat([
- {
- term: {
- [PROCESSOR_EVENT]: 'metric',
- },
- },
- {
- exists: {
- field: METRIC_SYSTEM_FREE_MEMORY,
- },
- },
- {
- exists: {
- field: METRIC_SYSTEM_TOTAL_MEMORY,
- },
- },
+ { term: { [PROCESSOR_EVENT]: 'metric' } },
+ { exists: { field: METRIC_SYSTEM_FREE_MEMORY } },
+ { exists: { field: METRIC_SYSTEM_TOTAL_MEMORY } },
]),
},
},
- aggs: {
- avgMemoryUsage: {
- avg: {
- script: percentMemoryUsedScript,
- },
- },
- },
+ aggs: { avgMemoryUsage: { avg: { script: percentMemoryUsedScript } } },
},
});
diff --git a/x-pack/plugins/apm/server/lib/services/annotations/get_derived_service_annotations.ts b/x-pack/plugins/apm/server/lib/services/annotations/get_derived_service_annotations.ts
index 6da5d195cf194..6a8aaf8dca8a6 100644
--- a/x-pack/plugins/apm/server/lib/services/annotations/get_derived_service_annotations.ts
+++ b/x-pack/plugins/apm/server/lib/services/annotations/get_derived_service_annotations.ts
@@ -29,14 +29,9 @@ export async function getDerivedServiceAnnotations({
const filter: ESFilter[] = [
{ term: { [PROCESSOR_EVENT]: 'transaction' } },
{ term: { [SERVICE_NAME]: serviceName } },
+ ...getEnvironmentUiFilterES(environment),
];
- const environmentFilter = getEnvironmentUiFilterES(environment);
-
- if (environmentFilter) {
- filter.push(environmentFilter);
- }
-
const versions =
(
await client.search({
diff --git a/x-pack/plugins/apm/server/lib/services/annotations/get_stored_annotations.ts b/x-pack/plugins/apm/server/lib/services/annotations/get_stored_annotations.ts
index 75aeb27ea2122..6e3ae0181ddee 100644
--- a/x-pack/plugins/apm/server/lib/services/annotations/get_stored_annotations.ts
+++ b/x-pack/plugins/apm/server/lib/services/annotations/get_stored_annotations.ts
@@ -29,8 +29,6 @@ export async function getStoredAnnotations({
logger: Logger;
}): Promise {
try {
- const environmentFilter = getEnvironmentUiFilterES(environment);
-
const response: ESSearchResponse = (await apiCaller(
'search',
{
@@ -51,7 +49,7 @@ export async function getStoredAnnotations({
{ term: { 'annotation.type': 'deployment' } },
{ term: { tags: 'apm' } },
{ term: { [SERVICE_NAME]: serviceName } },
- ...(environmentFilter ? [environmentFilter] : []),
+ ...getEnvironmentUiFilterES(environment),
],
},
},
diff --git a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/fetcher.ts b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/fetcher.ts
new file mode 100644
index 0000000000000..3cf9a54e3fe9b
--- /dev/null
+++ b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/fetcher.ts
@@ -0,0 +1,93 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { Logger } from 'kibana/server';
+import { PromiseReturnType } from '../../../../../../observability/typings/common';
+import { Setup, SetupTimeRange } from '../../../helpers/setup_request';
+
+export type ESResponse = Exclude<
+ PromiseReturnType,
+ undefined
+>;
+
+export async function anomalySeriesFetcher({
+ serviceName,
+ transactionType,
+ intervalString,
+ mlBucketSize,
+ setup,
+ jobId,
+ logger,
+}: {
+ serviceName: string;
+ transactionType: string;
+ intervalString: string;
+ mlBucketSize: number;
+ setup: Setup & SetupTimeRange;
+ jobId: string;
+ logger: Logger;
+}) {
+ const { ml, start, end } = setup;
+ if (!ml) {
+ return;
+ }
+
+ // move the start back with one bucket size, to ensure to get anomaly data in the beginning
+ // this is required because ML has a minimum bucket size (default is 900s) so if our buckets are smaller, we might have several null buckets in the beginning
+ const newStart = start - mlBucketSize * 1000;
+
+ const params = {
+ body: {
+ size: 0,
+ query: {
+ bool: {
+ filter: [
+ { term: { job_id: jobId } },
+ { exists: { field: 'bucket_span' } },
+ { term: { result_type: 'model_plot' } },
+ { term: { partition_field_value: serviceName } },
+ { term: { by_field_value: transactionType } },
+ {
+ range: {
+ timestamp: { gte: newStart, lte: end, format: 'epoch_millis' },
+ },
+ },
+ ],
+ },
+ },
+ aggs: {
+ ml_avg_response_times: {
+ date_histogram: {
+ field: 'timestamp',
+ fixed_interval: intervalString,
+ min_doc_count: 0,
+ extended_bounds: { min: newStart, max: end },
+ },
+ aggs: {
+ anomaly_score: { max: { field: 'anomaly_score' } },
+ lower: { min: { field: 'model_lower' } },
+ upper: { max: { field: 'model_upper' } },
+ },
+ },
+ },
+ },
+ };
+
+ try {
+ const response = await ml.mlSystem.mlAnomalySearch(params);
+ return response;
+ } catch (err) {
+ const isHttpError = 'statusCode' in err;
+ if (isHttpError) {
+ logger.info(
+ `Status code "${err.statusCode}" while retrieving ML anomalies for APM`
+ );
+ return;
+ }
+ logger.error('An error occurred while retrieving ML anomalies for APM');
+ logger.error(err);
+ }
+}
diff --git a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/get_ml_bucket_size.ts b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/get_ml_bucket_size.ts
new file mode 100644
index 0000000000000..2f5e703251c03
--- /dev/null
+++ b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/get_ml_bucket_size.ts
@@ -0,0 +1,61 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { Logger } from 'kibana/server';
+import { Setup, SetupTimeRange } from '../../../helpers/setup_request';
+
+interface IOptions {
+ setup: Setup & SetupTimeRange;
+ jobId: string;
+ logger: Logger;
+}
+
+interface ESResponse {
+ bucket_span: number;
+}
+
+export async function getMlBucketSize({
+ setup,
+ jobId,
+ logger,
+}: IOptions): Promise {
+ const { ml, start, end } = setup;
+ if (!ml) {
+ return;
+ }
+
+ const params = {
+ body: {
+ _source: 'bucket_span',
+ size: 1,
+ terminateAfter: 1,
+ query: {
+ bool: {
+ filter: [
+ { term: { job_id: jobId } },
+ { exists: { field: 'bucket_span' } },
+ {
+ range: {
+ timestamp: { gte: start, lte: end, format: 'epoch_millis' },
+ },
+ },
+ ],
+ },
+ },
+ },
+ };
+
+ try {
+ const resp = await ml.mlSystem.mlAnomalySearch(params);
+ return resp.hits.hits[0]?._source.bucket_span;
+ } catch (err) {
+ const isHttpError = 'statusCode' in err;
+ if (isHttpError) {
+ return;
+ }
+ logger.error(err);
+ }
+}
diff --git a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/index.ts b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/index.ts
index b2d11f2ffe19a..072099bc9553c 100644
--- a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/index.ts
+++ b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/index.ts
@@ -3,18 +3,19 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
-
+import { Logger } from 'kibana/server';
+import { isNumber } from 'lodash';
+import { getBucketSize } from '../../../helpers/get_bucket_size';
import {
Setup,
SetupTimeRange,
SetupUIFilters,
} from '../../../helpers/setup_request';
-import { Coordinate, RectCoordinate } from '../../../../../typings/timeseries';
-
-interface AnomalyTimeseries {
- anomalyBoundaries: Coordinate[];
- anomalyScore: RectCoordinate[];
-}
+import { anomalySeriesFetcher } from './fetcher';
+import { getMlBucketSize } from './get_ml_bucket_size';
+import { anomalySeriesTransform } from './transform';
+import { getMLJobIds } from '../../../service_map/get_service_anomalies';
+import { UIFilters } from '../../../../../typings/ui_filters';
export async function getAnomalySeries({
serviceName,
@@ -22,13 +23,17 @@ export async function getAnomalySeries({
transactionName,
timeSeriesDates,
setup,
+ logger,
+ uiFilters,
}: {
serviceName: string;
transactionType: string | undefined;
transactionName: string | undefined;
timeSeriesDates: number[];
setup: Setup & SetupTimeRange & SetupUIFilters;
-}): Promise {
+ logger: Logger;
+ uiFilters: UIFilters;
+}) {
// don't fetch anomalies for transaction details page
if (transactionName) {
return;
@@ -39,8 +44,12 @@ export async function getAnomalySeries({
return;
}
- // don't fetch anomalies if uiFilters are applied
- if (setup.uiFiltersES.length > 0) {
+ // don't fetch anomalies if unknown uiFilters are applied
+ const knownFilters = ['environment', 'serviceName'];
+ const uiFilterNames = Object.keys(uiFilters);
+ if (
+ uiFilterNames.some((uiFilterName) => !knownFilters.includes(uiFilterName))
+ ) {
return;
}
@@ -55,6 +64,45 @@ export async function getAnomalySeries({
return;
}
- // TODO [APM ML] return a series of anomaly scores, upper & lower bounds for the given timeSeriesDates
- return;
+ let mlJobIds: string[] = [];
+ try {
+ mlJobIds = await getMLJobIds(setup.ml, uiFilters.environment);
+ } catch (error) {
+ logger.error(error);
+ return;
+ }
+
+ // don't fetch anomalies if there are isn't exaclty 1 ML job match for the given environment
+ if (mlJobIds.length !== 1) {
+ return;
+ }
+ const jobId = mlJobIds[0];
+
+ const mlBucketSize = await getMlBucketSize({ setup, jobId, logger });
+ if (!isNumber(mlBucketSize)) {
+ return;
+ }
+
+ const { start, end } = setup;
+ const { intervalString, bucketSize } = getBucketSize(start, end, 'auto');
+
+ const esResponse = await anomalySeriesFetcher({
+ serviceName,
+ transactionType,
+ intervalString,
+ mlBucketSize,
+ setup,
+ jobId,
+ logger,
+ });
+
+ if (esResponse && mlBucketSize > 0) {
+ return anomalySeriesTransform(
+ esResponse,
+ mlBucketSize,
+ bucketSize,
+ timeSeriesDates,
+ jobId
+ );
+ }
}
diff --git a/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/transform.ts b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/transform.ts
new file mode 100644
index 0000000000000..393a73f7c1ccd
--- /dev/null
+++ b/x-pack/plugins/apm/server/lib/transactions/charts/get_anomaly_data/transform.ts
@@ -0,0 +1,136 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { first, last } from 'lodash';
+import { Coordinate, RectCoordinate } from '../../../../../typings/timeseries';
+import { ESResponse } from './fetcher';
+
+type IBucket = ReturnType;
+function getBucket(
+ bucket: Required<
+ ESResponse
+ >['aggregations']['ml_avg_response_times']['buckets'][0]
+) {
+ return {
+ x: bucket.key,
+ anomalyScore: bucket.anomaly_score.value,
+ lower: bucket.lower.value,
+ upper: bucket.upper.value,
+ };
+}
+
+export type AnomalyTimeSeriesResponse = ReturnType<
+ typeof anomalySeriesTransform
+>;
+export function anomalySeriesTransform(
+ response: ESResponse,
+ mlBucketSize: number,
+ bucketSize: number,
+ timeSeriesDates: number[],
+ jobId: string
+) {
+ const buckets =
+ response.aggregations?.ml_avg_response_times.buckets.map(getBucket) || [];
+
+ const bucketSizeInMillis = Math.max(bucketSize, mlBucketSize) * 1000;
+
+ return {
+ jobId,
+ anomalyScore: getAnomalyScoreDataPoints(
+ buckets,
+ timeSeriesDates,
+ bucketSizeInMillis
+ ),
+ anomalyBoundaries: getAnomalyBoundaryDataPoints(buckets, timeSeriesDates),
+ };
+}
+
+export function getAnomalyScoreDataPoints(
+ buckets: IBucket[],
+ timeSeriesDates: number[],
+ bucketSizeInMillis: number
+): RectCoordinate[] {
+ const ANOMALY_THRESHOLD = 75;
+ const firstDate = first(timeSeriesDates);
+ const lastDate = last(timeSeriesDates);
+
+ if (firstDate === undefined || lastDate === undefined) {
+ return [];
+ }
+
+ return buckets
+ .filter(
+ (bucket) =>
+ bucket.anomalyScore !== null && bucket.anomalyScore > ANOMALY_THRESHOLD
+ )
+ .filter(isInDateRange(firstDate, lastDate))
+ .map((bucket) => {
+ return {
+ x0: bucket.x,
+ x: Math.min(bucket.x + bucketSizeInMillis, lastDate), // don't go beyond last date
+ };
+ });
+}
+
+export function getAnomalyBoundaryDataPoints(
+ buckets: IBucket[],
+ timeSeriesDates: number[]
+): Coordinate[] {
+ return replaceFirstAndLastBucket(buckets, timeSeriesDates)
+ .filter((bucket) => bucket.lower !== null)
+ .map((bucket) => {
+ return {
+ x: bucket.x,
+ y0: bucket.lower,
+ y: bucket.upper,
+ };
+ });
+}
+
+export function replaceFirstAndLastBucket(
+ buckets: IBucket[],
+ timeSeriesDates: number[]
+) {
+ const firstDate = first(timeSeriesDates);
+ const lastDate = last(timeSeriesDates);
+
+ if (firstDate === undefined || lastDate === undefined) {
+ return buckets;
+ }
+
+ const preBucketWithValue = buckets
+ .filter((p) => p.x <= firstDate)
+ .reverse()
+ .find((p) => p.lower !== null);
+
+ const bucketsInRange = buckets.filter(isInDateRange(firstDate, lastDate));
+
+ // replace first bucket if it is null
+ const firstBucket = first(bucketsInRange);
+ if (preBucketWithValue && firstBucket && firstBucket.lower === null) {
+ firstBucket.lower = preBucketWithValue.lower;
+ firstBucket.upper = preBucketWithValue.upper;
+ }
+
+ const lastBucketWithValue = [...buckets]
+ .reverse()
+ .find((p) => p.lower !== null);
+
+ // replace last bucket if it is null
+ const lastBucket = last(bucketsInRange);
+ if (lastBucketWithValue && lastBucket && lastBucket.lower === null) {
+ lastBucket.lower = lastBucketWithValue.lower;
+ lastBucket.upper = lastBucketWithValue.upper;
+ }
+
+ return bucketsInRange;
+}
+
+// anomaly time series contain one or more buckets extra in the beginning
+// these extra buckets should be removed
+function isInDateRange(firstDate: number, lastDate: number) {
+ return (p: IBucket) => p.x >= firstDate && p.x <= lastDate;
+}
diff --git a/x-pack/plugins/apm/server/lib/transactions/charts/index.ts b/x-pack/plugins/apm/server/lib/transactions/charts/index.ts
index 2ec049002d605..e862982145f77 100644
--- a/x-pack/plugins/apm/server/lib/transactions/charts/index.ts
+++ b/x-pack/plugins/apm/server/lib/transactions/charts/index.ts
@@ -4,6 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
+import { Logger } from 'kibana/server';
import { PromiseReturnType } from '../../../../../observability/typings/common';
import {
Setup,
@@ -13,6 +14,7 @@ import {
import { getAnomalySeries } from './get_anomaly_data';
import { getApmTimeseriesData } from './get_timeseries_data';
import { ApmTimeSeriesResponse } from './get_timeseries_data/transform';
+import { UIFilters } from '../../../../typings/ui_filters';
function getDates(apmTimeseries: ApmTimeSeriesResponse) {
return apmTimeseries.responseTimes.avg.map((p) => p.x);
@@ -26,6 +28,8 @@ export async function getTransactionCharts(options: {
transactionType: string | undefined;
transactionName: string | undefined;
setup: Setup & SetupTimeRange & SetupUIFilters;
+ logger: Logger;
+ uiFilters: UIFilters;
}) {
const apmTimeseries = await getApmTimeseriesData(options);
const anomalyTimeseries = await getAnomalySeries({
diff --git a/x-pack/plugins/apm/server/lib/transactions/queries.test.ts b/x-pack/plugins/apm/server/lib/transactions/queries.test.ts
index 713635cff2fbf..586fa1798b7bc 100644
--- a/x-pack/plugins/apm/server/lib/transactions/queries.test.ts
+++ b/x-pack/plugins/apm/server/lib/transactions/queries.test.ts
@@ -12,6 +12,8 @@ import {
SearchParamsMock,
inspectSearchParams,
} from '../../../public/utils/testHelpers';
+// eslint-disable-next-line @kbn/eslint/no-restricted-paths
+import { loggerMock } from '../../../../../../src/core/server/logging/logger.mock';
describe('transaction queries', () => {
let mock: SearchParamsMock;
@@ -52,6 +54,8 @@ describe('transaction queries', () => {
transactionName: undefined,
transactionType: undefined,
setup,
+ logger: loggerMock.create(),
+ uiFilters: {},
})
);
expect(mock.params).toMatchSnapshot();
@@ -64,6 +68,8 @@ describe('transaction queries', () => {
transactionName: 'bar',
transactionType: undefined,
setup,
+ logger: loggerMock.create(),
+ uiFilters: {},
})
);
expect(mock.params).toMatchSnapshot();
@@ -76,6 +82,8 @@ describe('transaction queries', () => {
transactionName: 'bar',
transactionType: 'baz',
setup,
+ logger: loggerMock.create(),
+ uiFilters: {},
})
);
diff --git a/x-pack/plugins/apm/server/routes/settings/anomaly_detection.ts b/x-pack/plugins/apm/server/routes/settings/anomaly_detection.ts
index 7009470e1ff17..4d564b773e397 100644
--- a/x-pack/plugins/apm/server/routes/settings/anomaly_detection.ts
+++ b/x-pack/plugins/apm/server/routes/settings/anomaly_detection.ts
@@ -18,10 +18,13 @@ export const anomalyDetectionJobsRoute = createRoute(() => ({
path: '/api/apm/settings/anomaly-detection',
handler: async ({ context, request }) => {
const setup = await setupRequest(context, request);
- const jobs = await getAnomalyDetectionJobs(setup, context.logger);
+ const [jobs, legacyJobs] = await Promise.all([
+ getAnomalyDetectionJobs(setup, context.logger),
+ hasLegacyJobs(setup),
+ ]);
return {
jobs,
- hasLegacyJobs: await hasLegacyJobs(setup),
+ hasLegacyJobs: legacyJobs,
};
},
}));
diff --git a/x-pack/plugins/apm/server/routes/transaction_groups.ts b/x-pack/plugins/apm/server/routes/transaction_groups.ts
index 9ad281159fca5..3d939b04795c6 100644
--- a/x-pack/plugins/apm/server/routes/transaction_groups.ts
+++ b/x-pack/plugins/apm/server/routes/transaction_groups.ts
@@ -14,6 +14,7 @@ import { createRoute } from './create_route';
import { uiFiltersRt, rangeRt } from './default_api_types';
import { getTransactionAvgDurationByBrowser } from '../lib/transactions/avg_duration_by_browser';
import { getTransactionAvgDurationByCountry } from '../lib/transactions/avg_duration_by_country';
+import { UIFilters } from '../../typings/ui_filters';
export const transactionGroupsRoute = createRoute(() => ({
path: '/api/apm/services/{serviceName}/transaction_groups',
@@ -62,14 +63,27 @@ export const transactionGroupsChartsRoute = createRoute(() => ({
},
handler: async ({ context, request }) => {
const setup = await setupRequest(context, request);
+ const logger = context.logger;
const { serviceName } = context.params.path;
- const { transactionType, transactionName } = context.params.query;
+ const {
+ transactionType,
+ transactionName,
+ uiFilters: uiFiltersJson,
+ } = context.params.query;
+ let uiFilters: UIFilters = {};
+ try {
+ uiFilters = JSON.parse(uiFiltersJson);
+ } catch (error) {
+ logger.error(error);
+ }
return getTransactionCharts({
serviceName,
transactionType,
transactionName,
setup,
+ logger,
+ uiFilters,
});
},
}));
diff --git a/x-pack/plugins/apm/typings/anomaly_detection.ts b/x-pack/plugins/apm/typings/anomaly_detection.ts
deleted file mode 100644
index 30dc92c36dea4..0000000000000
--- a/x-pack/plugins/apm/typings/anomaly_detection.ts
+++ /dev/null
@@ -1,10 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-export interface AnomalyDetectionJobByEnv {
- environment: string;
- job_id: string;
-}
diff --git a/x-pack/plugins/index_management/common/lib/component_template_serialization.test.ts b/x-pack/plugins/index_management/common/lib/component_template_serialization.test.ts
index 83682f45918e3..16c45991d1f32 100644
--- a/x-pack/plugins/index_management/common/lib/component_template_serialization.test.ts
+++ b/x-pack/plugins/index_management/common/lib/component_template_serialization.test.ts
@@ -92,6 +92,7 @@ describe('Component template serialization', () => {
},
_kbnMeta: {
usedBy: ['my_index_template'],
+ isManaged: false,
},
});
});
@@ -105,6 +106,7 @@ describe('Component template serialization', () => {
version: 1,
_kbnMeta: {
usedBy: [],
+ isManaged: false,
},
_meta: {
serialization: {
diff --git a/x-pack/plugins/index_management/common/lib/component_template_serialization.ts b/x-pack/plugins/index_management/common/lib/component_template_serialization.ts
index 672b8140f79fb..3a1c2c1ca55b2 100644
--- a/x-pack/plugins/index_management/common/lib/component_template_serialization.ts
+++ b/x-pack/plugins/index_management/common/lib/component_template_serialization.ts
@@ -60,24 +60,26 @@ export function deserializeComponentTemplate(
_meta,
_kbnMeta: {
usedBy: indexTemplatesToUsedBy[name] || [],
+ isManaged: Boolean(_meta?.managed === true),
},
};
return deserializedComponentTemplate;
}
-export function deserializeComponenTemplateList(
+export function deserializeComponentTemplateList(
componentTemplateEs: ComponentTemplateFromEs,
indexTemplatesEs: TemplateFromEs[]
) {
const { name, component_template: componentTemplate } = componentTemplateEs;
- const { template } = componentTemplate;
+ const { template, _meta } = componentTemplate;
const indexTemplatesToUsedBy = getIndexTemplatesToUsedBy(indexTemplatesEs);
const componentTemplateListItem: ComponentTemplateListItem = {
name,
usedBy: indexTemplatesToUsedBy[name] || [],
+ isManaged: Boolean(_meta?.managed === true),
hasSettings: hasEntries(template.settings),
hasMappings: hasEntries(template.mappings),
hasAliases: hasEntries(template.aliases),
diff --git a/x-pack/plugins/index_management/common/lib/index.ts b/x-pack/plugins/index_management/common/lib/index.ts
index f39cc063ba731..9e87e87b0eee0 100644
--- a/x-pack/plugins/index_management/common/lib/index.ts
+++ b/x-pack/plugins/index_management/common/lib/index.ts
@@ -19,6 +19,6 @@ export { getTemplateParameter } from './utils';
export {
deserializeComponentTemplate,
- deserializeComponenTemplateList,
+ deserializeComponentTemplateList,
serializeComponentTemplate,
} from './component_template_serialization';
diff --git a/x-pack/plugins/index_management/common/types/component_templates.ts b/x-pack/plugins/index_management/common/types/component_templates.ts
index bc7ebdc2753dd..c8dec40d061bd 100644
--- a/x-pack/plugins/index_management/common/types/component_templates.ts
+++ b/x-pack/plugins/index_management/common/types/component_templates.ts
@@ -22,6 +22,7 @@ export interface ComponentTemplateDeserialized extends ComponentTemplateSerializ
name: string;
_kbnMeta: {
usedBy: string[];
+ isManaged: boolean;
};
}
@@ -36,4 +37,5 @@ export interface ComponentTemplateListItem {
hasMappings: boolean;
hasAliases: boolean;
hasSettings: boolean;
+ isManaged: boolean;
}
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_create.test.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_create.test.tsx
index 75eb419d56a5c..4462a42758878 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_create.test.tsx
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_create.test.tsx
@@ -185,7 +185,7 @@ describe('', () => {
},
aliases: ALIASES,
},
- _kbnMeta: { usedBy: [] },
+ _kbnMeta: { usedBy: [], isManaged: false },
};
expect(JSON.parse(JSON.parse(latestRequest.requestBody).body)).toEqual(expected);
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_details.test.ts b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_details.test.ts
index 7c17dde119c42..3d496d68cc66e 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_details.test.ts
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_details.test.ts
@@ -26,13 +26,13 @@ const COMPONENT_TEMPLATE: ComponentTemplateDeserialized = {
},
version: 1,
_meta: { description: 'component template test' },
- _kbnMeta: { usedBy: ['template_1'] },
+ _kbnMeta: { usedBy: ['template_1'], isManaged: false },
};
const COMPONENT_TEMPLATE_ONLY_REQUIRED_FIELDS: ComponentTemplateDeserialized = {
name: 'comp-base',
template: {},
- _kbnMeta: { usedBy: [] },
+ _kbnMeta: { usedBy: [], isManaged: false },
};
describe('', () => {
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_edit.test.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_edit.test.tsx
index 115fdf032da8f..114cafe9defde 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_edit.test.tsx
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_edit.test.tsx
@@ -52,7 +52,7 @@ describe('', () => {
template: {
settings: { number_of_shards: 1 },
},
- _kbnMeta: { usedBy: [] },
+ _kbnMeta: { usedBy: [], isManaged: false },
};
beforeEach(async () => {
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_list.test.ts b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_list.test.ts
index 6f09e51255f3b..bd6ac27375836 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_list.test.ts
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/component_template_list.test.ts
@@ -42,6 +42,7 @@ describe('', () => {
hasAliases: true,
hasSettings: true,
usedBy: [],
+ isManaged: false,
};
const componentTemplate2: ComponentTemplateListItem = {
@@ -50,6 +51,7 @@ describe('', () => {
hasAliases: true,
hasSettings: true,
usedBy: ['test_index_template_1'],
+ isManaged: false,
};
const componentTemplates = [componentTemplate1, componentTemplate2];
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/setup_environment.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/setup_environment.tsx
index 70634a226c67b..7e460d3855cb0 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/setup_environment.tsx
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/__jest__/client_integration/helpers/setup_environment.tsx
@@ -12,6 +12,7 @@ import { HttpSetup } from 'kibana/public';
import {
notificationServiceMock,
docLinksServiceMock,
+ applicationServiceMock,
} from '../../../../../../../../../../src/core/public/mocks';
import { ComponentTemplatesProvider } from '../../../component_templates_context';
@@ -28,6 +29,7 @@ const appDependencies = {
docLinks: docLinksServiceMock.createStartContract(),
toasts: notificationServiceMock.createSetupContract().toasts,
setBreadcrumbs: () => {},
+ getUrlForApp: applicationServiceMock.createStartContract().getUrlForApp,
};
export const setupEnvironment = () => {
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/component_template_details.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/component_template_details.tsx
index f94c5c38f23dd..60f1fff3cc9de 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/component_template_details.tsx
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/component_template_details.tsx
@@ -6,6 +6,7 @@
import React, { useState } from 'react';
import { FormattedMessage } from '@kbn/i18n/react';
+
import {
EuiFlyout,
EuiFlyoutHeader,
@@ -17,6 +18,7 @@ import {
EuiButtonEmpty,
EuiSpacer,
EuiCallOut,
+ EuiBadge,
} from '@elastic/eui';
import { SectionLoading, TabSettings, TabAliases, TabMappings } from '../shared_imports';
@@ -29,14 +31,15 @@ import { attemptToDecodeURI } from '../lib';
interface Props {
componentTemplateName: string;
onClose: () => void;
- showFooter?: boolean;
actions?: ManageAction[];
+ showSummaryCallToAction?: boolean;
}
export const ComponentTemplateDetailsFlyout: React.FunctionComponent = ({
componentTemplateName,
onClose,
actions,
+ showSummaryCallToAction,
}) => {
const { api } = useComponentTemplatesContext();
@@ -81,7 +84,12 @@ export const ComponentTemplateDetailsFlyout: React.FunctionComponent = ({
} = componentTemplateDetails;
const tabToComponentMap: Record = {
- summary: ,
+ summary: (
+
+ ),
settings: ,
mappings: ,
aliases: ,
@@ -109,11 +117,27 @@ export const ComponentTemplateDetailsFlyout: React.FunctionComponent = ({
maxWidth={500}
>
-
-
- {decodedComponentTemplateName}
-
-
+
+
+
+
+ {decodedComponentTemplateName}
+
+
+
+
+ {componentTemplateDetails?._kbnMeta.isManaged ? (
+
+ {' '}
+
+
+
+
+ ) : null}
+
{content}
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/tab_summary.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/tab_summary.tsx
index 80f28f23c9f91..8d054b97cb4f6 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/tab_summary.tsx
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_details/tab_summary.tsx
@@ -6,6 +6,7 @@
import React from 'react';
import { FormattedMessage } from '@kbn/i18n/react';
+
import {
EuiDescriptionList,
EuiDescriptionListTitle,
@@ -14,15 +15,23 @@ import {
EuiTitle,
EuiCallOut,
EuiSpacer,
+ EuiLink,
} from '@elastic/eui';
import { ComponentTemplateDeserialized } from '../shared_imports';
+import { useComponentTemplatesContext } from '../component_templates_context';
interface Props {
componentTemplateDetails: ComponentTemplateDeserialized;
+ showCallToAction?: boolean;
}
-export const TabSummary: React.FunctionComponent = ({ componentTemplateDetails }) => {
+export const TabSummary: React.FunctionComponent = ({
+ componentTemplateDetails,
+ showCallToAction,
+}) => {
+ const { getUrlForApp } = useComponentTemplatesContext();
+
const { version, _meta, _kbnMeta } = componentTemplateDetails;
const { usedBy } = _kbnMeta;
@@ -43,7 +52,42 @@ export const TabSummary: React.FunctionComponent = ({ componentTemplateDe
iconType="pin"
data-test-subj="notInUseCallout"
size="s"
- />
+ >
+ {showCallToAction && (
+
+
+
+
+ ),
+ editLink: (
+
+
+
+ ),
+ }}
+ />
+
+ )}
+
>
)}
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_list/component_template_list.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_list/component_template_list.tsx
index d356eabc7997d..efc8b649ef872 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_list/component_template_list.tsx
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_list/component_template_list.tsx
@@ -9,6 +9,7 @@ import { RouteComponentProps } from 'react-router-dom';
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n/react';
import { ScopedHistory } from 'kibana/public';
+import { EuiLink, EuiText, EuiSpacer } from '@elastic/eui';
import { SectionLoading, ComponentTemplateDeserialized } from '../shared_imports';
import { UIM_COMPONENT_TEMPLATE_LIST_LOAD } from '../constants';
@@ -29,7 +30,7 @@ export const ComponentTemplateList: React.FunctionComponent = ({
componentTemplateName,
history,
}) => {
- const { api, trackMetric } = useComponentTemplatesContext();
+ const { api, trackMetric, documentation } = useComponentTemplatesContext();
const { data, isLoading, error, sendRequest } = api.useLoadComponentTemplates();
@@ -65,20 +66,40 @@ export const ComponentTemplateList: React.FunctionComponent = ({
);
} else if (data?.length) {
content = (
-
+ <>
+
+
+ {i18n.translate('xpack.idxMgmt.componentTemplates.list.learnMoreLinkText', {
+ defaultMessage: 'Learn more.',
+ })}
+
+ ),
+ }}
+ />
+
+
+
+
+
+ >
);
} else if (data && data.length === 0) {
content = ;
@@ -111,6 +132,7 @@ export const ComponentTemplateList: React.FunctionComponent = ({
= ({ history }) => {
{i18n.translate('xpack.idxMgmt.home.componentTemplates.emptyPromptDocumentionLink', {
- defaultMessage: 'Learn more',
+ defaultMessage: 'Learn more.',
})}
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_list/table.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_list/table.tsx
index 089c2f889e726..fc86609f1217d 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_list/table.tsx
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_list/table.tsx
@@ -13,11 +13,11 @@ import {
EuiTextColor,
EuiIcon,
EuiLink,
+ EuiBadge,
} from '@elastic/eui';
import { ScopedHistory } from 'kibana/public';
-import { reactRouterNavigate } from '../../../../../../../../src/plugins/kibana_react/public';
-import { ComponentTemplateListItem } from '../shared_imports';
+import { ComponentTemplateListItem, reactRouterNavigate } from '../shared_imports';
import { UIM_COMPONENT_TEMPLATE_DETAILS } from '../constants';
import { useComponentTemplatesContext } from '../component_templates_context';
@@ -105,6 +105,13 @@ export const ComponentTable: FunctionComponent = ({
incremental: true,
},
filters: [
+ {
+ type: 'is',
+ field: 'isManaged',
+ name: i18n.translate('xpack.idxMgmt.componentTemplatesList.table.isManagedFilterLabel', {
+ defaultMessage: 'Managed',
+ }),
+ },
{
type: 'field_value_toggle_group',
field: 'usedBy.length',
@@ -144,26 +151,38 @@ export const ComponentTable: FunctionComponent = ({
defaultMessage: 'Name',
}),
sortable: true,
- render: (name: string) => (
- /* eslint-disable-next-line @elastic/eui/href-or-on-click */
- trackMetric('click', UIM_COMPONENT_TEMPLATE_DETAILS)
+ width: '20%',
+ render: (name: string, item: ComponentTemplateListItem) => (
+ <>
+ trackMetric('click', UIM_COMPONENT_TEMPLATE_DETAILS)
+ )}
+ data-test-subj="templateDetailsLink"
+ >
+ {name}
+
+ {item.isManaged && (
+ <>
+
+
+ {i18n.translate('xpack.idxMgmt.componentTemplatesList.table.managedBadgeLabel', {
+ defaultMessage: 'Managed',
+ })}
+
+ >
)}
- data-test-subj="templateDetailsLink"
- >
- {name}
-
+ >
),
},
{
field: 'usedBy',
name: i18n.translate('xpack.idxMgmt.componentTemplatesList.table.isInUseColumnTitle', {
- defaultMessage: 'Index templates',
+ defaultMessage: 'Usage count',
}),
sortable: true,
render: (usedBy: string[]) => {
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_wizard/component_template_form/component_template_form.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_wizard/component_template_form/component_template_form.tsx
index 6e35fbad31d4e..134b8b5eda93d 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_wizard/component_template_form/component_template_form.tsx
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_wizard/component_template_form/component_template_form.tsx
@@ -74,14 +74,11 @@ const wizardSections: { [id: string]: { id: WizardSection; label: string } } = {
export const ComponentTemplateForm = ({
defaultValue = {
name: '',
- template: {
- settings: {},
- mappings: {},
- aliases: {},
- },
+ template: {},
_meta: {},
_kbnMeta: {
usedBy: [],
+ isManaged: false,
},
},
isEditing,
@@ -137,23 +134,49 @@ export const ComponentTemplateForm = ({
>
) : null;
- const buildComponentTemplateObject = (initialTemplate: ComponentTemplateDeserialized) => (
- wizardData: WizardContent
- ): ComponentTemplateDeserialized => {
- const componentTemplate = {
- ...initialTemplate,
- name: wizardData.logistics.name,
- version: wizardData.logistics.version,
- _meta: wizardData.logistics._meta,
- template: {
- settings: wizardData.settings,
- mappings: wizardData.mappings,
- aliases: wizardData.aliases,
- },
- };
- return componentTemplate;
+ /**
+ * If no mappings, settings or aliases are defined, it is better to not send an empty
+ * object for those values.
+ * @param componentTemplate The component template object to clean up
+ */
+ const cleanupComponentTemplateObject = (componentTemplate: ComponentTemplateDeserialized) => {
+ const outputTemplate = { ...componentTemplate };
+
+ if (outputTemplate.template.settings === undefined) {
+ delete outputTemplate.template.settings;
+ }
+
+ if (outputTemplate.template.mappings === undefined) {
+ delete outputTemplate.template.mappings;
+ }
+
+ if (outputTemplate.template.aliases === undefined) {
+ delete outputTemplate.template.aliases;
+ }
+
+ return outputTemplate;
};
+ const buildComponentTemplateObject = useCallback(
+ (initialTemplate: ComponentTemplateDeserialized) => (
+ wizardData: WizardContent
+ ): ComponentTemplateDeserialized => {
+ const outputComponentTemplate = {
+ ...initialTemplate,
+ name: wizardData.logistics.name,
+ version: wizardData.logistics.version,
+ _meta: wizardData.logistics._meta,
+ template: {
+ settings: wizardData.settings,
+ mappings: wizardData.mappings,
+ aliases: wizardData.aliases,
+ },
+ };
+ return cleanupComponentTemplateObject(outputComponentTemplate);
+ },
+ []
+ );
+
const onSaveComponentTemplate = useCallback(
async (wizardData: WizardContent) => {
const componentTemplate = buildComponentTemplateObject(defaultValue)(wizardData);
@@ -161,13 +184,13 @@ export const ComponentTemplateForm = ({
// This will strip an empty string if "version" is not set, as well as an empty "_meta" object
onSave(
stripEmptyFields(componentTemplate, {
- types: ['string', 'object'],
+ types: ['string'],
}) as ComponentTemplateDeserialized
);
clearSaveError();
},
- [defaultValue, onSave, clearSaveError]
+ [buildComponentTemplateObject, defaultValue, onSave, clearSaveError]
);
return (
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_wizard/component_template_form/steps/step_logistics.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_wizard/component_template_form/steps/step_logistics.tsx
index 18988fa125a06..c48a23226a371 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_wizard/component_template_form/steps/step_logistics.tsx
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_wizard/component_template_form/steps/step_logistics.tsx
@@ -117,7 +117,7 @@ export const StepLogistics: React.FunctionComponent = React.memo(
description={
}
>
@@ -141,7 +141,7 @@ export const StepLogistics: React.FunctionComponent = React.memo(
description={
}
>
@@ -165,7 +165,7 @@ export const StepLogistics: React.FunctionComponent = React.memo(
<>
= React.memo(
{i18n.translate(
'xpack.idxMgmt.componentTemplateForm.stepLogistics.metaDocumentionLink',
{
- defaultMessage: 'Learn more',
+ defaultMessage: 'Learn more.',
}
)}
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_wizard/component_template_form/steps/step_review.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_wizard/component_template_form/steps/step_review.tsx
index ce85854dc79ab..67246f2e10c3b 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_wizard/component_template_form/steps/step_review.tsx
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_wizard/component_template_form/steps/step_review.tsx
@@ -52,16 +52,12 @@ export const StepReview: React.FunctionComponent = React.memo(({ componen
const serializedComponentTemplate = serializeComponentTemplate(
stripEmptyFields(componentTemplate, {
- types: ['string', 'object'],
+ types: ['string'],
}) as ComponentTemplateDeserialized
);
const {
- template: {
- mappings: serializedMappings,
- settings: serializedSettings,
- aliases: serializedAliases,
- },
+ template: serializedTemplate,
_meta: serializedMeta,
version: serializedVersion,
} = serializedComponentTemplate;
@@ -94,7 +90,7 @@ export const StepReview: React.FunctionComponent = React.memo(({ componen
/>
- {getDescriptionText(serializedSettings)}
+ {getDescriptionText(serializedTemplate?.settings)}
{/* Mappings */}
@@ -105,7 +101,7 @@ export const StepReview: React.FunctionComponent = React.memo(({ componen
/>
- {getDescriptionText(serializedMappings)}
+ {getDescriptionText(serializedTemplate?.mappings)}
{/* Aliases */}
@@ -116,7 +112,7 @@ export const StepReview: React.FunctionComponent = React.memo(({ componen
/>
- {getDescriptionText(serializedAliases)}
+ {getDescriptionText(serializedTemplate?.aliases)}
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/component_templates_context.tsx b/x-pack/plugins/index_management/public/application/components/component_templates/component_templates_context.tsx
index ce9e28d0feefe..7be0618481a69 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/component_templates_context.tsx
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/component_templates_context.tsx
@@ -5,7 +5,7 @@
*/
import React, { createContext, useContext } from 'react';
-import { HttpSetup, DocLinksStart, NotificationsSetup } from 'src/core/public';
+import { HttpSetup, DocLinksStart, NotificationsSetup, CoreStart } from 'src/core/public';
import { ManagementAppMountParams } from 'src/plugins/management/public';
import { getApi, getUseRequest, getSendRequest, getDocumentation, getBreadcrumbs } from './lib';
@@ -19,6 +19,7 @@ interface Props {
docLinks: DocLinksStart;
toasts: NotificationsSetup['toasts'];
setBreadcrumbs: ManagementAppMountParams['setBreadcrumbs'];
+ getUrlForApp: CoreStart['application']['getUrlForApp'];
}
interface Context {
@@ -29,6 +30,7 @@ interface Context {
breadcrumbs: ReturnType;
trackMetric: (type: 'loaded' | 'click' | 'count', eventName: string) => void;
toasts: NotificationsSetup['toasts'];
+ getUrlForApp: CoreStart['application']['getUrlForApp'];
}
export const ComponentTemplatesProvider = ({
@@ -38,7 +40,15 @@ export const ComponentTemplatesProvider = ({
value: Props;
children: React.ReactNode;
}) => {
- const { httpClient, apiBasePath, trackMetric, docLinks, toasts, setBreadcrumbs } = value;
+ const {
+ httpClient,
+ apiBasePath,
+ trackMetric,
+ docLinks,
+ toasts,
+ setBreadcrumbs,
+ getUrlForApp,
+ } = value;
const useRequest = getUseRequest(httpClient);
const sendRequest = getSendRequest(httpClient);
@@ -49,7 +59,16 @@ export const ComponentTemplatesProvider = ({
return (
{children}
diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/shared_imports.ts b/x-pack/plugins/index_management/public/application/components/component_templates/shared_imports.ts
index 80e222f4f7706..278fadcd90c8b 100644
--- a/x-pack/plugins/index_management/public/application/components/component_templates/shared_imports.ts
+++ b/x-pack/plugins/index_management/public/application/components/component_templates/shared_imports.ts
@@ -62,3 +62,5 @@ export {
} from '../../../../common';
export { serializeComponentTemplate } from '../../../../common/lib';
+
+export { reactRouterNavigate } from '../../../../../../../src/plugins/kibana_react/public';
diff --git a/x-pack/plugins/index_management/public/application/index.tsx b/x-pack/plugins/index_management/public/application/index.tsx
index 7b053a15b26d0..ebc29ac86a17f 100644
--- a/x-pack/plugins/index_management/public/application/index.tsx
+++ b/x-pack/plugins/index_management/public/application/index.tsx
@@ -25,7 +25,7 @@ export const renderApp = (
return () => undefined;
}
- const { i18n, docLinks, notifications } = core;
+ const { i18n, docLinks, notifications, application } = core;
const { Context: I18nContext } = i18n;
const { services, history, setBreadcrumbs } = dependencies;
@@ -36,6 +36,7 @@ export const renderApp = (
docLinks,
toasts: notifications.toasts,
setBreadcrumbs,
+ getUrlForApp: application.getUrlForApp,
};
render(
diff --git a/x-pack/plugins/index_management/server/routes/api/component_templates/get.ts b/x-pack/plugins/index_management/server/routes/api/component_templates/get.ts
index f6f8e7d63d370..16b028887f63c 100644
--- a/x-pack/plugins/index_management/server/routes/api/component_templates/get.ts
+++ b/x-pack/plugins/index_management/server/routes/api/component_templates/get.ts
@@ -7,7 +7,7 @@ import { schema } from '@kbn/config-schema';
import {
deserializeComponentTemplate,
- deserializeComponenTemplateList,
+ deserializeComponentTemplateList,
} from '../../../../common/lib';
import { ComponentTemplateFromEs } from '../../../../common';
import { RouteDependencies } from '../../../types';
@@ -36,7 +36,7 @@ export function registerGetAllRoute({ router, license, lib: { isEsError } }: Rou
);
const body = componentTemplates.map((componentTemplate) => {
- const deserializedComponentTemplateListItem = deserializeComponenTemplateList(
+ const deserializedComponentTemplateListItem = deserializeComponentTemplateList(
componentTemplate,
indexTemplates
);
diff --git a/x-pack/plugins/index_management/server/routes/api/component_templates/schema_validation.ts b/x-pack/plugins/index_management/server/routes/api/component_templates/schema_validation.ts
index a1fc258127229..cfcb428f00501 100644
--- a/x-pack/plugins/index_management/server/routes/api/component_templates/schema_validation.ts
+++ b/x-pack/plugins/index_management/server/routes/api/component_templates/schema_validation.ts
@@ -16,5 +16,6 @@ export const componentTemplateSchema = schema.object({
_meta: schema.maybe(schema.object({}, { unknowns: 'allow' })),
_kbnMeta: schema.object({
usedBy: schema.arrayOf(schema.string()),
+ isManaged: schema.boolean(),
}),
});
diff --git a/x-pack/plugins/infra/common/http_api/log_analysis/results/index.ts b/x-pack/plugins/infra/common/http_api/log_analysis/results/index.ts
index 30b6be435837b..cbd89db97236f 100644
--- a/x-pack/plugins/infra/common/http_api/log_analysis/results/index.ts
+++ b/x-pack/plugins/infra/common/http_api/log_analysis/results/index.ts
@@ -8,4 +8,5 @@ export * from './log_entry_categories';
export * from './log_entry_category_datasets';
export * from './log_entry_category_examples';
export * from './log_entry_rate';
-export * from './log_entry_rate_examples';
+export * from './log_entry_examples';
+export * from './log_entry_anomalies';
diff --git a/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_anomalies.ts b/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_anomalies.ts
new file mode 100644
index 0000000000000..639ac63f9b14d
--- /dev/null
+++ b/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_anomalies.ts
@@ -0,0 +1,137 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import * as rt from 'io-ts';
+
+import { timeRangeRT, routeTimingMetadataRT } from '../../shared';
+
+export const LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH =
+ '/api/infra/log_analysis/results/log_entry_anomalies';
+
+// [Sort field value, tiebreaker value]
+const paginationCursorRT = rt.tuple([
+ rt.union([rt.string, rt.number]),
+ rt.union([rt.string, rt.number]),
+]);
+
+export type PaginationCursor = rt.TypeOf;
+
+export const anomalyTypeRT = rt.keyof({
+ logRate: null,
+ logCategory: null,
+});
+
+export type AnomalyType = rt.TypeOf;
+
+const logEntryAnomalyCommonFieldsRT = rt.type({
+ id: rt.string,
+ anomalyScore: rt.number,
+ dataset: rt.string,
+ typical: rt.number,
+ actual: rt.number,
+ type: anomalyTypeRT,
+ duration: rt.number,
+ startTime: rt.number,
+ jobId: rt.string,
+});
+const logEntrylogRateAnomalyRT = logEntryAnomalyCommonFieldsRT;
+const logEntrylogCategoryAnomalyRT = rt.partial({
+ categoryId: rt.string,
+});
+const logEntryAnomalyRT = rt.intersection([
+ logEntryAnomalyCommonFieldsRT,
+ logEntrylogRateAnomalyRT,
+ logEntrylogCategoryAnomalyRT,
+]);
+
+export type LogEntryAnomaly = rt.TypeOf;
+
+export const getLogEntryAnomaliesSuccessReponsePayloadRT = rt.intersection([
+ rt.type({
+ data: rt.intersection([
+ rt.type({
+ anomalies: rt.array(logEntryAnomalyRT),
+ // Signifies there are more entries backwards or forwards. If this was a request
+ // for a previous page, there are more previous pages, if this was a request for a next page,
+ // there are more next pages.
+ hasMoreEntries: rt.boolean,
+ }),
+ rt.partial({
+ paginationCursors: rt.type({
+ // The cursor to use to fetch the previous page
+ previousPageCursor: paginationCursorRT,
+ // The cursor to use to fetch the next page
+ nextPageCursor: paginationCursorRT,
+ }),
+ }),
+ ]),
+ }),
+ rt.partial({
+ timing: routeTimingMetadataRT,
+ }),
+]);
+
+export type GetLogEntryAnomaliesSuccessResponsePayload = rt.TypeOf<
+ typeof getLogEntryAnomaliesSuccessReponsePayloadRT
+>;
+
+const sortOptionsRT = rt.keyof({
+ anomalyScore: null,
+ dataset: null,
+ startTime: null,
+});
+
+const sortDirectionsRT = rt.keyof({
+ asc: null,
+ desc: null,
+});
+
+const paginationPreviousPageCursorRT = rt.type({
+ searchBefore: paginationCursorRT,
+});
+
+const paginationNextPageCursorRT = rt.type({
+ searchAfter: paginationCursorRT,
+});
+
+const paginationRT = rt.intersection([
+ rt.type({
+ pageSize: rt.number,
+ }),
+ rt.partial({
+ cursor: rt.union([paginationPreviousPageCursorRT, paginationNextPageCursorRT]),
+ }),
+]);
+
+export type Pagination = rt.TypeOf;
+
+const sortRT = rt.type({
+ field: sortOptionsRT,
+ direction: sortDirectionsRT,
+});
+
+export type Sort = rt.TypeOf;
+
+export const getLogEntryAnomaliesRequestPayloadRT = rt.type({
+ data: rt.intersection([
+ rt.type({
+ // the ID of the source configuration
+ sourceId: rt.string,
+ // the time range to fetch the log entry anomalies from
+ timeRange: timeRangeRT,
+ }),
+ rt.partial({
+ // Pagination properties
+ pagination: paginationRT,
+ // Sort properties
+ sort: sortRT,
+ }),
+ ]),
+});
+
+export type GetLogEntryAnomaliesRequestPayload = rt.TypeOf<
+ typeof getLogEntryAnomaliesRequestPayloadRT
+>;
diff --git a/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_examples.ts b/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_examples.ts
new file mode 100644
index 0000000000000..1eed29cd37560
--- /dev/null
+++ b/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_examples.ts
@@ -0,0 +1,82 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import * as rt from 'io-ts';
+
+import {
+ badRequestErrorRT,
+ forbiddenErrorRT,
+ timeRangeRT,
+ routeTimingMetadataRT,
+} from '../../shared';
+
+export const LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH =
+ '/api/infra/log_analysis/results/log_entry_examples';
+
+/**
+ * request
+ */
+
+export const getLogEntryExamplesRequestPayloadRT = rt.type({
+ data: rt.intersection([
+ rt.type({
+ // the dataset to fetch the log rate examples from
+ dataset: rt.string,
+ // the number of examples to fetch
+ exampleCount: rt.number,
+ // the id of the source configuration
+ sourceId: rt.string,
+ // the time range to fetch the log rate examples from
+ timeRange: timeRangeRT,
+ }),
+ rt.partial({
+ categoryId: rt.string,
+ }),
+ ]),
+});
+
+export type GetLogEntryExamplesRequestPayload = rt.TypeOf<
+ typeof getLogEntryExamplesRequestPayloadRT
+>;
+
+/**
+ * response
+ */
+
+const logEntryExampleRT = rt.type({
+ id: rt.string,
+ dataset: rt.string,
+ message: rt.string,
+ timestamp: rt.number,
+ tiebreaker: rt.number,
+});
+
+export type LogEntryExample = rt.TypeOf;
+
+export const getLogEntryExamplesSuccessReponsePayloadRT = rt.intersection([
+ rt.type({
+ data: rt.type({
+ examples: rt.array(logEntryExampleRT),
+ }),
+ }),
+ rt.partial({
+ timing: routeTimingMetadataRT,
+ }),
+]);
+
+export type GetLogEntryExamplesSuccessReponsePayload = rt.TypeOf<
+ typeof getLogEntryExamplesSuccessReponsePayloadRT
+>;
+
+export const getLogEntryExamplesResponsePayloadRT = rt.union([
+ getLogEntryExamplesSuccessReponsePayloadRT,
+ badRequestErrorRT,
+ forbiddenErrorRT,
+]);
+
+export type GetLogEntryExamplesResponsePayload = rt.TypeOf<
+ typeof getLogEntryExamplesResponsePayloadRT
+>;
diff --git a/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_rate_examples.ts b/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_rate_examples.ts
deleted file mode 100644
index 700f87ec3beb1..0000000000000
--- a/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_rate_examples.ts
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-import * as rt from 'io-ts';
-
-import {
- badRequestErrorRT,
- forbiddenErrorRT,
- timeRangeRT,
- routeTimingMetadataRT,
-} from '../../shared';
-
-export const LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH =
- '/api/infra/log_analysis/results/log_entry_rate_examples';
-
-/**
- * request
- */
-
-export const getLogEntryRateExamplesRequestPayloadRT = rt.type({
- data: rt.type({
- // the dataset to fetch the log rate examples from
- dataset: rt.string,
- // the number of examples to fetch
- exampleCount: rt.number,
- // the id of the source configuration
- sourceId: rt.string,
- // the time range to fetch the log rate examples from
- timeRange: timeRangeRT,
- }),
-});
-
-export type GetLogEntryRateExamplesRequestPayload = rt.TypeOf<
- typeof getLogEntryRateExamplesRequestPayloadRT
->;
-
-/**
- * response
- */
-
-const logEntryRateExampleRT = rt.type({
- id: rt.string,
- dataset: rt.string,
- message: rt.string,
- timestamp: rt.number,
- tiebreaker: rt.number,
-});
-
-export type LogEntryRateExample = rt.TypeOf;
-
-export const getLogEntryRateExamplesSuccessReponsePayloadRT = rt.intersection([
- rt.type({
- data: rt.type({
- examples: rt.array(logEntryRateExampleRT),
- }),
- }),
- rt.partial({
- timing: routeTimingMetadataRT,
- }),
-]);
-
-export type GetLogEntryRateExamplesSuccessReponsePayload = rt.TypeOf<
- typeof getLogEntryRateExamplesSuccessReponsePayloadRT
->;
-
-export const getLogEntryRateExamplesResponsePayloadRT = rt.union([
- getLogEntryRateExamplesSuccessReponsePayloadRT,
- badRequestErrorRT,
- forbiddenErrorRT,
-]);
-
-export type GetLogEntryRateExamplesResponsePayload = rt.TypeOf<
- typeof getLogEntryRateExamplesResponsePayloadRT
->;
diff --git a/x-pack/plugins/infra/common/log_analysis/log_analysis_results.ts b/x-pack/plugins/infra/common/log_analysis/log_analysis_results.ts
index 19c92cb381104..f4497dbba5056 100644
--- a/x-pack/plugins/infra/common/log_analysis/log_analysis_results.ts
+++ b/x-pack/plugins/infra/common/log_analysis/log_analysis_results.ts
@@ -41,6 +41,10 @@ export const formatAnomalyScore = (score: number) => {
return Math.round(score);
};
+export const formatOneDecimalPlace = (number: number) => {
+ return Math.round(number * 10) / 10;
+};
+
export const getFriendlyNameForPartitionId = (partitionId: string) => {
return partitionId !== '' ? partitionId : 'unknown';
};
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_results_content.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_results_content.tsx
index bf4dbcd87cc41..21c3e3ec70029 100644
--- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_results_content.tsx
+++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_results_content.tsx
@@ -5,30 +5,18 @@
*/
import datemath from '@elastic/datemath';
-import {
- EuiBadge,
- EuiFlexGroup,
- EuiFlexItem,
- EuiPage,
- EuiPanel,
- EuiSuperDatePicker,
- EuiText,
-} from '@elastic/eui';
-import numeral from '@elastic/numeral';
-import { FormattedMessage } from '@kbn/i18n/react';
+import { EuiFlexGroup, EuiFlexItem, EuiPage, EuiPanel, EuiSuperDatePicker } from '@elastic/eui';
import moment from 'moment';
import React, { useCallback, useEffect, useMemo, useState } from 'react';
import { euiStyled, useTrackPageview } from '../../../../../observability/public';
import { TimeRange } from '../../../../common/http_api/shared/time_range';
import { bucketSpan } from '../../../../common/log_analysis';
-import { LoadingOverlayWrapper } from '../../../components/loading_overlay_wrapper';
import { LogAnalysisJobProblemIndicator } from '../../../components/logging/log_analysis_job_status';
import { useInterval } from '../../../hooks/use_interval';
-import { useKibanaUiSetting } from '../../../utils/use_kibana_ui_setting';
import { AnomaliesResults } from './sections/anomalies';
-import { LogRateResults } from './sections/log_rate';
import { useLogEntryRateModuleContext } from './use_log_entry_rate_module';
import { useLogEntryRateResults } from './use_log_entry_rate_results';
+import { useLogEntryAnomaliesResults } from './use_log_entry_anomalies_results';
import {
StringTimeRange,
useLogAnalysisResultsUrlState,
@@ -36,6 +24,15 @@ import {
const JOB_STATUS_POLLING_INTERVAL = 30000;
+export const SORT_DEFAULTS = {
+ direction: 'desc' as const,
+ field: 'anomalyScore' as const,
+};
+
+export const PAGINATION_DEFAULTS = {
+ pageSize: 25,
+};
+
interface LogEntryRateResultsContentProps {
onOpenSetup: () => void;
}
@@ -46,8 +43,6 @@ export const LogEntryRateResultsContent: React.FunctionComponent {
setQueryTimeRange({
@@ -182,45 +194,18 @@ export const LogEntryRateResultsContent: React.FunctionComponent
-
-
-
- {logEntryRate ? (
-
-
-
-
- {numeral(logEntryRate.totalNumberOfLogEntries).format('0.00a')}
-
-
- ),
- startTime: (
- {moment(queryTimeRange.value.startTime).format(dateFormat)}
- ),
- endTime: {moment(queryTimeRange.value.endTime).format(dateFormat)},
- }}
- />
-
-
- ) : null}
-
-
-
-
-
-
+
+
+
+
+
-
-
-
-
-
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/chart.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/chart.tsx
index 79ab4475ee5a3..ae5c3b5b93b47 100644
--- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/chart.tsx
+++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/chart.tsx
@@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
-
+import { EuiEmptyPrompt } from '@elastic/eui';
import { RectAnnotationDatum, AnnotationId } from '@elastic/charts';
import {
Axis,
@@ -21,6 +21,7 @@ import numeral from '@elastic/numeral';
import { i18n } from '@kbn/i18n';
import moment from 'moment';
import React, { useCallback, useMemo } from 'react';
+import { LoadingOverlayWrapper } from '../../../../../components/loading_overlay_wrapper';
import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
import {
@@ -36,7 +37,16 @@ export const AnomaliesChart: React.FunctionComponent<{
series: Array<{ time: number; value: number }>;
annotations: Record;
renderAnnotationTooltip?: (details?: string) => JSX.Element;
-}> = ({ chartId, series, annotations, setTimeRange, timeRange, renderAnnotationTooltip }) => {
+ isLoading: boolean;
+}> = ({
+ chartId,
+ series,
+ annotations,
+ setTimeRange,
+ timeRange,
+ renderAnnotationTooltip,
+ isLoading,
+}) => {
const [dateFormat] = useKibanaUiSetting('dateFormat', 'Y-MM-DD HH:mm:ss.SSS');
const [isDarkMode] = useKibanaUiSetting('theme:darkMode');
@@ -68,41 +78,56 @@ export const AnomaliesChart: React.FunctionComponent<{
[setTimeRange]
);
- return (
-
-
-
- numeral(value.toPrecision(3)).format('0[.][00]a')} // https://github.com/adamwdraper/Numeral-js/issues/194
- />
-
+ {i18n.translate('xpack.infra.logs.analysis.anomalySectionLogRateChartNoData', {
+ defaultMessage: 'There is no log rate data to display.',
})}
- xScaleType="time"
- yScaleType="linear"
- xAccessor={'time'}
- yAccessors={['value']}
- data={series}
- barSeriesStyle={barSeriesStyle}
- />
- {renderAnnotations(annotations, chartId, renderAnnotationTooltip)}
-
-
-
+
+ }
+ titleSize="m"
+ />
+ ) : (
+
+
+ {series.length ? (
+
+
+ numeral(value.toPrecision(3)).format('0[.][00]a')} // https://github.com/adamwdraper/Numeral-js/issues/194
+ />
+
+ {renderAnnotations(annotations, chartId, renderAnnotationTooltip)}
+
+
+ ) : null}
+
+
);
};
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/expanded_row.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/expanded_row.tsx
index c527b8c49d099..e4b12e199a048 100644
--- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/expanded_row.tsx
+++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/expanded_row.tsx
@@ -10,12 +10,12 @@ import { i18n } from '@kbn/i18n';
import React from 'react';
import { useMount } from 'react-use';
import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
-import { AnomalyRecord } from '../../use_log_entry_rate_results';
-import { useLogEntryRateModuleContext } from '../../use_log_entry_rate_module';
-import { useLogEntryRateExamples } from '../../use_log_entry_rate_examples';
+import { LogEntryAnomaly } from '../../../../../../common/http_api';
+import { useLogEntryExamples } from '../../use_log_entry_examples';
import { LogEntryExampleMessages } from '../../../../../components/logging/log_entry_examples/log_entry_examples';
-import { LogEntryRateExampleMessage, LogEntryRateExampleMessageHeaders } from './log_entry_example';
+import { LogEntryExampleMessage, LogEntryExampleMessageHeaders } from './log_entry_example';
import { euiStyled } from '../../../../../../../observability/public';
+import { useLogSourceContext } from '../../../../../containers/logs/log_source';
const EXAMPLE_COUNT = 5;
@@ -24,29 +24,27 @@ const examplesTitle = i18n.translate('xpack.infra.logs.analysis.anomaliesTableEx
});
export const AnomaliesTableExpandedRow: React.FunctionComponent<{
- anomaly: AnomalyRecord;
+ anomaly: LogEntryAnomaly;
timeRange: TimeRange;
- jobId: string;
-}> = ({ anomaly, timeRange, jobId }) => {
- const {
- sourceConfiguration: { sourceId },
- } = useLogEntryRateModuleContext();
+}> = ({ anomaly, timeRange }) => {
+ const { sourceId } = useLogSourceContext();
const {
- getLogEntryRateExamples,
- hasFailedLoadingLogEntryRateExamples,
- isLoadingLogEntryRateExamples,
- logEntryRateExamples,
- } = useLogEntryRateExamples({
- dataset: anomaly.partitionId,
+ getLogEntryExamples,
+ hasFailedLoadingLogEntryExamples,
+ isLoadingLogEntryExamples,
+ logEntryExamples,
+ } = useLogEntryExamples({
+ dataset: anomaly.dataset,
endTime: anomaly.startTime + anomaly.duration,
exampleCount: EXAMPLE_COUNT,
sourceId,
startTime: anomaly.startTime,
+ categoryId: anomaly.categoryId,
});
useMount(() => {
- getLogEntryRateExamples();
+ getLogEntryExamples();
});
return (
@@ -57,17 +55,17 @@ export const AnomaliesTableExpandedRow: React.FunctionComponent<{
{examplesTitle}
0}
+ isLoading={isLoadingLogEntryExamples}
+ hasFailedLoading={hasFailedLoadingLogEntryExamples}
+ hasResults={logEntryExamples.length > 0}
exampleCount={EXAMPLE_COUNT}
- onReload={getLogEntryRateExamples}
+ onReload={getLogEntryExamples}
>
- {logEntryRateExamples.length > 0 ? (
+ {logEntryExamples.length > 0 ? (
<>
-
- {logEntryRateExamples.map((example, exampleIndex) => (
-
+ {logEntryExamples.map((example, exampleIndex) => (
+
))}
>
@@ -87,11 +85,11 @@ export const AnomaliesTableExpandedRow: React.FunctionComponent<{
void;
timeRange: TimeRange;
viewSetupForReconfiguration: () => void;
- jobId: string;
-}> = ({ isLoading, results, setTimeRange, timeRange, viewSetupForReconfiguration, jobId }) => {
- const hasAnomalies = useMemo(() => {
- return results && results.histogramBuckets
- ? results.histogramBuckets.some((bucket) => {
- return bucket.partitions.some((partition) => {
- return partition.anomalies.length > 0;
- });
- })
- : false;
- }, [results]);
-
+ page: Page;
+ fetchNextPage?: FetchNextPage;
+ fetchPreviousPage?: FetchPreviousPage;
+ changeSortOptions: ChangeSortOptions;
+ changePaginationOptions: ChangePaginationOptions;
+ sortOptions: SortOptions;
+ paginationOptions: PaginationOptions;
+}> = ({
+ isLoadingLogRateResults,
+ isLoadingAnomaliesResults,
+ logEntryRateResults,
+ setTimeRange,
+ timeRange,
+ viewSetupForReconfiguration,
+ anomalies,
+ changeSortOptions,
+ sortOptions,
+ changePaginationOptions,
+ paginationOptions,
+ fetchNextPage,
+ fetchPreviousPage,
+ page,
+}) => {
const logEntryRateSeries = useMemo(
- () => (results && results.histogramBuckets ? getLogEntryRateCombinedSeries(results) : []),
- [results]
+ () =>
+ logEntryRateResults && logEntryRateResults.histogramBuckets
+ ? getLogEntryRateCombinedSeries(logEntryRateResults)
+ : [],
+ [logEntryRateResults]
);
const anomalyAnnotations = useMemo(
() =>
- results && results.histogramBuckets
- ? getAnnotationsForAll(results)
+ logEntryRateResults && logEntryRateResults.histogramBuckets
+ ? getAnnotationsForAll(logEntryRateResults)
: {
warning: [],
minor: [],
major: [],
critical: [],
},
- [results]
+ [logEntryRateResults]
);
return (
<>
-
- {title}
+
+ {title}
-
-
-
- }>
- {!results || (results && results.histogramBuckets && !results.histogramBuckets.length) ? (
+ {(!logEntryRateResults ||
+ (logEntryRateResults &&
+ logEntryRateResults.histogramBuckets &&
+ !logEntryRateResults.histogramBuckets.length)) &&
+ (!anomalies || anomalies.length === 0) ? (
+ }
+ >
@@ -94,41 +123,38 @@ export const AnomaliesResults: React.FunctionComponent<{
}
/>
- ) : !hasAnomalies ? (
-
- {i18n.translate('xpack.infra.logs.analysis.anomalySectionNoAnomaliesTitle', {
- defaultMessage: 'No anomalies were detected.',
- })}
-
- }
- titleSize="m"
+
+ ) : (
+ <>
+
+
+
+
+
+
+
- ) : (
- <>
-
-
-
-
-
-
-
- >
- )}
-
+ >
+ )}
>
);
};
@@ -137,13 +163,6 @@ const title = i18n.translate('xpack.infra.logs.analysis.anomaliesSectionTitle',
defaultMessage: 'Anomalies',
});
-const loadingAriaLabel = i18n.translate(
- 'xpack.infra.logs.analysis.anomaliesSectionLoadingAriaLabel',
- { defaultMessage: 'Loading anomalies' }
-);
-
-const LoadingOverlayContent = () => ;
-
interface ParsedAnnotationDetails {
anomalyScoresByPartition: Array<{ partitionName: string; maximumAnomalyScore: number }>;
}
@@ -189,3 +208,10 @@ const renderAnnotationTooltip = (details?: string) => {
const TooltipWrapper = euiStyled('div')`
white-space: nowrap;
`;
+
+const loadingAriaLabel = i18n.translate(
+ 'xpack.infra.logs.analysis.anomaliesSectionLoadingAriaLabel',
+ { defaultMessage: 'Loading anomalies' }
+);
+
+const LoadingOverlayContent = () => ;
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/log_entry_example.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/log_entry_example.tsx
index 96f665b3693ca..2965e1fede822 100644
--- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/log_entry_example.tsx
+++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/log_entry_example.tsx
@@ -28,7 +28,7 @@ import { useLinkProps } from '../../../../../hooks/use_link_props';
import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
import { partitionField } from '../../../../../../common/log_analysis/job_parameters';
import { getEntitySpecificSingleMetricViewerLink } from '../../../../../components/logging/log_analysis_results/analyze_in_ml_button';
-import { LogEntryRateExample } from '../../../../../../common/http_api/log_analysis/results';
+import { LogEntryExample } from '../../../../../../common/http_api/log_analysis/results';
import {
LogColumnConfiguration,
isTimestampLogColumnConfiguration,
@@ -36,6 +36,7 @@ import {
isMessageLogColumnConfiguration,
} from '../../../../../utils/source_configuration';
import { localizedDate } from '../../../../../../common/formatters/datetime';
+import { LogEntryAnomaly } from '../../../../../../common/http_api';
export const exampleMessageScale = 'medium' as const;
export const exampleTimestampFormat = 'time' as const;
@@ -58,19 +59,19 @@ const VIEW_ANOMALY_IN_ML_LABEL = i18n.translate(
}
);
-type Props = LogEntryRateExample & {
+type Props = LogEntryExample & {
timeRange: TimeRange;
- jobId: string;
+ anomaly: LogEntryAnomaly;
};
-export const LogEntryRateExampleMessage: React.FunctionComponent = ({
+export const LogEntryExampleMessage: React.FunctionComponent = ({
id,
dataset,
message,
timestamp,
tiebreaker,
timeRange,
- jobId,
+ anomaly,
}) => {
const [isHovered, setIsHovered] = useState(false);
const [isMenuOpen, setIsMenuOpen] = useState(false);
@@ -107,8 +108,9 @@ export const LogEntryRateExampleMessage: React.FunctionComponent = ({
});
const viewAnomalyInMachineLearningLinkProps = useLinkProps(
- getEntitySpecificSingleMetricViewerLink(jobId, timeRange, {
+ getEntitySpecificSingleMetricViewerLink(anomaly.jobId, timeRange, {
[partitionField]: dataset,
+ ...(anomaly.categoryId ? { mlcategory: anomaly.categoryId } : {}),
})
);
@@ -233,11 +235,11 @@ export const exampleMessageColumnConfigurations: LogColumnConfiguration[] = [
},
];
-export const LogEntryRateExampleMessageHeaders: React.FunctionComponent<{
+export const LogEntryExampleMessageHeaders: React.FunctionComponent<{
dateTime: number;
}> = ({ dateTime }) => {
return (
-
+
<>
{exampleMessageColumnConfigurations.map((columnConfiguration) => {
if (isTimestampLogColumnConfiguration(columnConfiguration)) {
@@ -280,11 +282,11 @@ export const LogEntryRateExampleMessageHeaders: React.FunctionComponent<{
{null}
>
-
+
);
};
-const LogEntryRateExampleMessageHeadersWrapper = euiStyled(LogColumnHeadersWrapper)`
+const LogEntryExampleMessageHeadersWrapper = euiStyled(LogColumnHeadersWrapper)`
border-bottom: none;
box-shadow: none;
padding-right: 0;
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/table.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/table.tsx
index c70a456bfe06a..e0a3b6fb91db0 100644
--- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/table.tsx
+++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/table.tsx
@@ -4,45 +4,52 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { EuiBasicTable, EuiBasicTableColumn } from '@elastic/eui';
+import {
+ EuiBasicTable,
+ EuiBasicTableColumn,
+ EuiIcon,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiButtonIcon,
+ EuiSpacer,
+} from '@elastic/eui';
import { RIGHT_ALIGNMENT } from '@elastic/eui/lib/services';
import moment from 'moment';
import { i18n } from '@kbn/i18n';
-import React, { useCallback, useMemo, useState } from 'react';
+import React, { useCallback, useMemo } from 'react';
import { useSet } from 'react-use';
import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
import {
formatAnomalyScore,
getFriendlyNameForPartitionId,
+ formatOneDecimalPlace,
} from '../../../../../../common/log_analysis';
+import { AnomalyType } from '../../../../../../common/http_api/log_analysis';
import { RowExpansionButton } from '../../../../../components/basic_table';
-import { LogEntryRateResults } from '../../use_log_entry_rate_results';
import { AnomaliesTableExpandedRow } from './expanded_row';
import { AnomalySeverityIndicator } from '../../../../../components/logging/log_analysis_results/anomaly_severity_indicator';
import { useKibanaUiSetting } from '../../../../../utils/use_kibana_ui_setting';
+import {
+ Page,
+ FetchNextPage,
+ FetchPreviousPage,
+ ChangeSortOptions,
+ ChangePaginationOptions,
+ SortOptions,
+ PaginationOptions,
+ LogEntryAnomalies,
+} from '../../use_log_entry_anomalies_results';
+import { LoadingOverlayWrapper } from '../../../../../components/loading_overlay_wrapper';
interface TableItem {
id: string;
dataset: string;
datasetName: string;
anomalyScore: number;
- anomalyMessage: string;
startTime: number;
-}
-
-interface SortingOptions {
- sort: {
- field: keyof TableItem;
- direction: 'asc' | 'desc';
- };
-}
-
-interface PaginationOptions {
- pageIndex: number;
- pageSize: number;
- totalItemCount: number;
- pageSizeOptions: number[];
- hidePerPageOptions: boolean;
+ typical: number;
+ actual: number;
+ type: AnomalyType;
}
const anomalyScoreColumnName = i18n.translate(
@@ -73,125 +80,78 @@ const datasetColumnName = i18n.translate(
}
);
-const moreThanExpectedAnomalyMessage = i18n.translate(
- 'xpack.infra.logs.analysis.anomaliesTableMoreThanExpectedAnomalyMessage',
- {
- defaultMessage: 'More log messages in this dataset than expected',
- }
-);
-
-const fewerThanExpectedAnomalyMessage = i18n.translate(
- 'xpack.infra.logs.analysis.anomaliesTableFewerThanExpectedAnomalyMessage',
- {
- defaultMessage: 'Fewer log messages in this dataset than expected',
- }
-);
-
-const getAnomalyMessage = (actualRate: number, typicalRate: number): string => {
- return actualRate < typicalRate
- ? fewerThanExpectedAnomalyMessage
- : moreThanExpectedAnomalyMessage;
-};
-
export const AnomaliesTable: React.FunctionComponent<{
- results: LogEntryRateResults;
+ results: LogEntryAnomalies;
setTimeRange: (timeRange: TimeRange) => void;
timeRange: TimeRange;
- jobId: string;
-}> = ({ results, timeRange, setTimeRange, jobId }) => {
+ changeSortOptions: ChangeSortOptions;
+ changePaginationOptions: ChangePaginationOptions;
+ sortOptions: SortOptions;
+ paginationOptions: PaginationOptions;
+ page: Page;
+ fetchNextPage?: FetchNextPage;
+ fetchPreviousPage?: FetchPreviousPage;
+ isLoading: boolean;
+}> = ({
+ results,
+ timeRange,
+ setTimeRange,
+ changeSortOptions,
+ sortOptions,
+ changePaginationOptions,
+ paginationOptions,
+ fetchNextPage,
+ fetchPreviousPage,
+ page,
+ isLoading,
+}) => {
const [dateFormat] = useKibanaUiSetting('dateFormat', 'Y-MM-DD HH:mm:ss');
+ const tableSortOptions = useMemo(() => {
+ return {
+ sort: sortOptions,
+ };
+ }, [sortOptions]);
+
const tableItems: TableItem[] = useMemo(() => {
- return results.anomalies.map((anomaly) => {
+ return results.map((anomaly) => {
return {
id: anomaly.id,
- dataset: anomaly.partitionId,
- datasetName: getFriendlyNameForPartitionId(anomaly.partitionId),
+ dataset: anomaly.dataset,
+ datasetName: getFriendlyNameForPartitionId(anomaly.dataset),
anomalyScore: formatAnomalyScore(anomaly.anomalyScore),
- anomalyMessage: getAnomalyMessage(anomaly.actualLogEntryRate, anomaly.typicalLogEntryRate),
startTime: anomaly.startTime,
+ type: anomaly.type,
+ typical: anomaly.typical,
+ actual: anomaly.actual,
};
});
}, [results]);
const [expandedIds, { add: expandId, remove: collapseId }] = useSet(new Set());
- const expandedDatasetRowContents = useMemo(
+ const expandedIdsRowContents = useMemo(
() =>
- [...expandedIds].reduce>((aggregatedDatasetRows, id) => {
- const anomaly = results.anomalies.find((_anomaly) => _anomaly.id === id);
+ [...expandedIds].reduce>((aggregatedRows, id) => {
+ const anomaly = results.find((_anomaly) => _anomaly.id === id);
return {
- ...aggregatedDatasetRows,
+ ...aggregatedRows,
[id]: anomaly ? (
-
+
) : null,
};
}, {}),
- [expandedIds, results, timeRange, jobId]
+ [expandedIds, results, timeRange]
);
- const [sorting, setSorting] = useState({
- sort: {
- field: 'anomalyScore',
- direction: 'desc',
- },
- });
-
- const [_pagination, setPagination] = useState({
- pageIndex: 0,
- pageSize: 20,
- totalItemCount: results.anomalies.length,
- pageSizeOptions: [10, 20, 50],
- hidePerPageOptions: false,
- });
-
- const paginationOptions = useMemo(() => {
- return {
- ..._pagination,
- totalItemCount: results.anomalies.length,
- };
- }, [_pagination, results]);
-
const handleTableChange = useCallback(
- ({ page = {}, sort = {} }) => {
- const { index, size } = page;
- setPagination((currentPagination) => {
- return {
- ...currentPagination,
- pageIndex: index,
- pageSize: size,
- };
- });
- const { field, direction } = sort;
- setSorting({
- sort: {
- field,
- direction,
- },
- });
+ ({ sort = {} }) => {
+ changeSortOptions(sort);
},
- [setSorting, setPagination]
+ [changeSortOptions]
);
- const sortedTableItems = useMemo(() => {
- let sortedItems: TableItem[] = [];
- if (sorting.sort.field === 'datasetName') {
- sortedItems = tableItems.sort((a, b) => (a.datasetName > b.datasetName ? 1 : -1));
- } else if (sorting.sort.field === 'anomalyScore') {
- sortedItems = tableItems.sort((a, b) => a.anomalyScore - b.anomalyScore);
- } else if (sorting.sort.field === 'startTime') {
- sortedItems = tableItems.sort((a, b) => a.startTime - b.startTime);
- }
-
- return sorting.sort.direction === 'asc' ? sortedItems : sortedItems.reverse();
- }, [tableItems, sorting]);
-
- const pageOfItems: TableItem[] = useMemo(() => {
- const { pageIndex, pageSize } = paginationOptions;
- return sortedTableItems.slice(pageIndex * pageSize, pageIndex * pageSize + pageSize);
- }, [paginationOptions, sortedTableItems]);
-
const columns: Array> = useMemo(
() => [
{
@@ -204,10 +164,11 @@ export const AnomaliesTable: React.FunctionComponent<{
render: (anomalyScore: number) => ,
},
{
- field: 'anomalyMessage',
name: anomalyMessageColumnName,
- sortable: false,
truncateText: true,
+ render: (item: TableItem) => (
+
+ ),
},
{
field: 'startTime',
@@ -240,18 +201,116 @@ export const AnomaliesTable: React.FunctionComponent<{
],
[collapseId, expandId, expandedIds, dateFormat]
);
+ return (
+ <>
+
+
+
+
+
+ >
+ );
+};
+
+const AnomalyMessage = ({
+ actual,
+ typical,
+ type,
+}: {
+ actual: number;
+ typical: number;
+ type: AnomalyType;
+}) => {
+ const moreThanExpectedAnomalyMessage = i18n.translate(
+ 'xpack.infra.logs.analysis.anomaliesTableMoreThanExpectedAnomalyMessage',
+ {
+ defaultMessage:
+ 'more log messages in this {type, select, logRate {dataset} logCategory {category}} than expected',
+ values: { type },
+ }
+ );
+
+ const fewerThanExpectedAnomalyMessage = i18n.translate(
+ 'xpack.infra.logs.analysis.anomaliesTableFewerThanExpectedAnomalyMessage',
+ {
+ defaultMessage:
+ 'fewer log messages in this {type, select, logRate {dataset} logCategory {category}} than expected',
+ values: { type },
+ }
+ );
+
+ const isMore = actual > typical;
+ const message = isMore ? moreThanExpectedAnomalyMessage : fewerThanExpectedAnomalyMessage;
+ const ratio = isMore ? actual / typical : typical / actual;
+ const icon = isMore ? 'sortUp' : 'sortDown';
+ // Edge case scenarios where actual and typical might sit at 0.
+ const useRatio = ratio !== Infinity;
+ const ratioMessage = useRatio ? `${formatOneDecimalPlace(ratio)}x` : '';
return (
-
+
+ {`${ratioMessage} ${message}`}
+
+ );
+};
+
+const previousPageLabel = i18n.translate(
+ 'xpack.infra.logs.analysis.anomaliesTablePreviousPageLabel',
+ {
+ defaultMessage: 'Previous page',
+ }
+);
+
+const nextPageLabel = i18n.translate('xpack.infra.logs.analysis.anomaliesTableNextPageLabel', {
+ defaultMessage: 'Next page',
+});
+
+const PaginationControls = ({
+ fetchPreviousPage,
+ fetchNextPage,
+ page,
+ isLoading,
+}: {
+ fetchPreviousPage?: () => void;
+ fetchNextPage?: () => void;
+ page: number;
+ isLoading: boolean;
+}) => {
+ return (
+
+
+
+
+
+ {page}
+
+
+
+
+
);
};
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/log_rate/bar_chart.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/log_rate/bar_chart.tsx
deleted file mode 100644
index 498a9f88176f8..0000000000000
--- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/log_rate/bar_chart.tsx
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-import {
- Axis,
- BarSeries,
- Chart,
- niceTimeFormatter,
- Settings,
- TooltipValue,
- BrushEndListener,
- LIGHT_THEME,
- DARK_THEME,
-} from '@elastic/charts';
-import { i18n } from '@kbn/i18n';
-import numeral from '@elastic/numeral';
-import moment from 'moment';
-import React, { useCallback, useMemo } from 'react';
-
-import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
-import { useKibanaUiSetting } from '../../../../../utils/use_kibana_ui_setting';
-
-export const LogEntryRateBarChart: React.FunctionComponent<{
- setTimeRange: (timeRange: TimeRange) => void;
- timeRange: TimeRange;
- series: Array<{ group: string; time: number; value: number }>;
-}> = ({ series, setTimeRange, timeRange }) => {
- const [dateFormat] = useKibanaUiSetting('dateFormat');
- const [isDarkMode] = useKibanaUiSetting('theme:darkMode');
-
- const chartDateFormatter = useMemo(
- () => niceTimeFormatter([timeRange.startTime, timeRange.endTime]),
- [timeRange]
- );
-
- const tooltipProps = useMemo(
- () => ({
- headerFormatter: (tooltipData: TooltipValue) =>
- moment(tooltipData.value).format(dateFormat || 'Y-MM-DD HH:mm:ss.SSS'),
- }),
- [dateFormat]
- );
-
- const handleBrushEnd = useCallback(
- ({ x }) => {
- if (!x) {
- return;
- }
- const [startTime, endTime] = x;
- setTimeRange({
- endTime,
- startTime,
- });
- },
- [setTimeRange]
- );
-
- return (
-
-
-
- numeral(value.toPrecision(3)).format('0[.][00]a')} // https://github.com/adamwdraper/Numeral-js/issues/194
- />
-
-
-
-
- );
-};
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/log_rate/index.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/log_rate/index.tsx
deleted file mode 100644
index 3da025d90119f..0000000000000
--- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/log_rate/index.tsx
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-import { EuiEmptyPrompt, EuiLoadingSpinner, EuiSpacer, EuiText, EuiTitle } from '@elastic/eui';
-import { i18n } from '@kbn/i18n';
-import React, { useMemo } from 'react';
-
-import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
-import { BetaBadge } from '../../../../../components/beta_badge';
-import { LoadingOverlayWrapper } from '../../../../../components/loading_overlay_wrapper';
-import { LogEntryRateResults as Results } from '../../use_log_entry_rate_results';
-import { getLogEntryRatePartitionedSeries } from '../helpers/data_formatters';
-import { LogEntryRateBarChart } from './bar_chart';
-
-export const LogRateResults = ({
- isLoading,
- results,
- setTimeRange,
- timeRange,
-}: {
- isLoading: boolean;
- results: Results | null;
- setTimeRange: (timeRange: TimeRange) => void;
- timeRange: TimeRange;
-}) => {
- const logEntryRateSeries = useMemo(
- () => (results && results.histogramBuckets ? getLogEntryRatePartitionedSeries(results) : []),
- [results]
- );
-
- return (
- <>
-
-
- {title}
-
-
- }>
- {!results || (results && results.histogramBuckets && !results.histogramBuckets.length) ? (
- <>
-
-
- {i18n.translate('xpack.infra.logs.analysis.logRateSectionNoDataTitle', {
- defaultMessage: 'There is no data to display.',
- })}
-
- }
- titleSize="m"
- body={
-
- {i18n.translate('xpack.infra.logs.analysis.logRateSectionNoDataBody', {
- defaultMessage: 'You may want to adjust your time range.',
- })}
-
- }
- />
- >
- ) : (
- <>
-
-
-
- {i18n.translate('xpack.infra.logs.analysis.logRateSectionBucketSpanLabel', {
- defaultMessage: 'Bucket span: ',
- })}
-
- {i18n.translate('xpack.infra.logs.analysis.logRateSectionBucketSpanValue', {
- defaultMessage: '15 minutes',
- })}
-
-
-
- >
- )}
-
- >
- );
-};
-
-const title = i18n.translate('xpack.infra.logs.analysis.logRateSectionTitle', {
- defaultMessage: 'Log entries',
-});
-
-const loadingAriaLabel = i18n.translate(
- 'xpack.infra.logs.analysis.logRateSectionLoadingAriaLabel',
- { defaultMessage: 'Loading log rate results' }
-);
-
-const LoadingOverlayContent = () => ;
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies.ts
new file mode 100644
index 0000000000000..d4a0eaae43ac0
--- /dev/null
+++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies.ts
@@ -0,0 +1,41 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { npStart } from '../../../../legacy_singletons';
+import {
+ getLogEntryAnomaliesRequestPayloadRT,
+ getLogEntryAnomaliesSuccessReponsePayloadRT,
+ LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH,
+} from '../../../../../common/http_api/log_analysis';
+import { decodeOrThrow } from '../../../../../common/runtime_types';
+import { Sort, Pagination } from '../../../../../common/http_api/log_analysis';
+
+export const callGetLogEntryAnomaliesAPI = async (
+ sourceId: string,
+ startTime: number,
+ endTime: number,
+ sort: Sort,
+ pagination: Pagination
+) => {
+ const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH, {
+ method: 'POST',
+ body: JSON.stringify(
+ getLogEntryAnomaliesRequestPayloadRT.encode({
+ data: {
+ sourceId,
+ timeRange: {
+ startTime,
+ endTime,
+ },
+ sort,
+ pagination,
+ },
+ })
+ ),
+ });
+
+ return decodeOrThrow(getLogEntryAnomaliesSuccessReponsePayloadRT)(response);
+};
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_rate_examples.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_examples.ts
similarity index 77%
rename from x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_rate_examples.ts
rename to x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_examples.ts
index d3b30da72af96..a125b53f9e635 100644
--- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_rate_examples.ts
+++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_examples.ts
@@ -10,23 +10,24 @@ import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import {
- getLogEntryRateExamplesRequestPayloadRT,
- getLogEntryRateExamplesSuccessReponsePayloadRT,
+ getLogEntryExamplesRequestPayloadRT,
+ getLogEntryExamplesSuccessReponsePayloadRT,
LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH,
} from '../../../../../common/http_api/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types';
-export const callGetLogEntryRateExamplesAPI = async (
+export const callGetLogEntryExamplesAPI = async (
sourceId: string,
startTime: number,
endTime: number,
dataset: string,
- exampleCount: number
+ exampleCount: number,
+ categoryId?: string
) => {
const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH, {
method: 'POST',
body: JSON.stringify(
- getLogEntryRateExamplesRequestPayloadRT.encode({
+ getLogEntryExamplesRequestPayloadRT.encode({
data: {
dataset,
exampleCount,
@@ -35,13 +36,14 @@ export const callGetLogEntryRateExamplesAPI = async (
startTime,
endTime,
},
+ categoryId,
},
})
),
});
return pipe(
- getLogEntryRateExamplesSuccessReponsePayloadRT.decode(response),
+ getLogEntryExamplesSuccessReponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
};
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_anomalies_results.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_anomalies_results.ts
new file mode 100644
index 0000000000000..cadb4c420c133
--- /dev/null
+++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_anomalies_results.ts
@@ -0,0 +1,262 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { useMemo, useState, useCallback, useEffect, useReducer } from 'react';
+
+import { LogEntryAnomaly } from '../../../../common/http_api';
+import { useTrackedPromise } from '../../../utils/use_tracked_promise';
+import { callGetLogEntryAnomaliesAPI } from './service_calls/get_log_entry_anomalies';
+import { Sort, Pagination, PaginationCursor } from '../../../../common/http_api/log_analysis';
+
+export type SortOptions = Sort;
+export type PaginationOptions = Pick;
+export type Page = number;
+export type FetchNextPage = () => void;
+export type FetchPreviousPage = () => void;
+export type ChangeSortOptions = (sortOptions: Sort) => void;
+export type ChangePaginationOptions = (paginationOptions: PaginationOptions) => void;
+export type LogEntryAnomalies = LogEntryAnomaly[];
+interface PaginationCursors {
+ previousPageCursor: PaginationCursor;
+ nextPageCursor: PaginationCursor;
+}
+
+interface ReducerState {
+ page: number;
+ lastReceivedCursors: PaginationCursors | undefined;
+ paginationCursor: Pagination['cursor'] | undefined;
+ hasNextPage: boolean;
+ paginationOptions: PaginationOptions;
+ sortOptions: Sort;
+ timeRange: {
+ start: number;
+ end: number;
+ };
+}
+
+type ReducerStateDefaults = Pick<
+ ReducerState,
+ 'page' | 'lastReceivedCursors' | 'paginationCursor' | 'hasNextPage'
+>;
+
+type ReducerAction =
+ | { type: 'changePaginationOptions'; payload: { paginationOptions: PaginationOptions } }
+ | { type: 'changeSortOptions'; payload: { sortOptions: Sort } }
+ | { type: 'fetchNextPage' }
+ | { type: 'fetchPreviousPage' }
+ | { type: 'changeHasNextPage'; payload: { hasNextPage: boolean } }
+ | { type: 'changeLastReceivedCursors'; payload: { lastReceivedCursors: PaginationCursors } }
+ | { type: 'changeTimeRange'; payload: { timeRange: { start: number; end: number } } };
+
+const stateReducer = (state: ReducerState, action: ReducerAction): ReducerState => {
+ const resetPagination = {
+ page: 1,
+ paginationCursor: undefined,
+ };
+ switch (action.type) {
+ case 'changePaginationOptions':
+ return {
+ ...state,
+ ...resetPagination,
+ ...action.payload,
+ };
+ case 'changeSortOptions':
+ return {
+ ...state,
+ ...resetPagination,
+ ...action.payload,
+ };
+ case 'changeHasNextPage':
+ return {
+ ...state,
+ ...action.payload,
+ };
+ case 'changeLastReceivedCursors':
+ return {
+ ...state,
+ ...action.payload,
+ };
+ case 'fetchNextPage':
+ return state.lastReceivedCursors
+ ? {
+ ...state,
+ page: state.page + 1,
+ paginationCursor: { searchAfter: state.lastReceivedCursors.nextPageCursor },
+ }
+ : state;
+ case 'fetchPreviousPage':
+ return state.lastReceivedCursors
+ ? {
+ ...state,
+ page: state.page - 1,
+ paginationCursor: { searchBefore: state.lastReceivedCursors.previousPageCursor },
+ }
+ : state;
+ case 'changeTimeRange':
+ return {
+ ...state,
+ ...resetPagination,
+ ...action.payload,
+ };
+ default:
+ return state;
+ }
+};
+
+const STATE_DEFAULTS: ReducerStateDefaults = {
+ // NOTE: This piece of state is purely for the client side, it could be extracted out of the hook.
+ page: 1,
+ // Cursor from the last request
+ lastReceivedCursors: undefined,
+ // Cursor to use for the next request. For the first request, and therefore not paging, this will be undefined.
+ paginationCursor: undefined,
+ hasNextPage: false,
+};
+
+export const useLogEntryAnomaliesResults = ({
+ endTime,
+ startTime,
+ sourceId,
+ defaultSortOptions,
+ defaultPaginationOptions,
+}: {
+ endTime: number;
+ startTime: number;
+ sourceId: string;
+ defaultSortOptions: Sort;
+ defaultPaginationOptions: Pick;
+}) => {
+ const initStateReducer = (stateDefaults: ReducerStateDefaults): ReducerState => {
+ return {
+ ...stateDefaults,
+ paginationOptions: defaultPaginationOptions,
+ sortOptions: defaultSortOptions,
+ timeRange: {
+ start: startTime,
+ end: endTime,
+ },
+ };
+ };
+
+ const [reducerState, dispatch] = useReducer(stateReducer, STATE_DEFAULTS, initStateReducer);
+
+ const [logEntryAnomalies, setLogEntryAnomalies] = useState([]);
+
+ const [getLogEntryAnomaliesRequest, getLogEntryAnomalies] = useTrackedPromise(
+ {
+ cancelPreviousOn: 'creation',
+ createPromise: async () => {
+ const {
+ timeRange: { start: queryStartTime, end: queryEndTime },
+ sortOptions,
+ paginationOptions,
+ paginationCursor,
+ } = reducerState;
+ return await callGetLogEntryAnomaliesAPI(
+ sourceId,
+ queryStartTime,
+ queryEndTime,
+ sortOptions,
+ {
+ ...paginationOptions,
+ cursor: paginationCursor,
+ }
+ );
+ },
+ onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => {
+ const { paginationCursor } = reducerState;
+ if (requestCursors) {
+ dispatch({
+ type: 'changeLastReceivedCursors',
+ payload: { lastReceivedCursors: requestCursors },
+ });
+ }
+ // Check if we have more "next" entries. "Page" covers the "previous" scenario,
+ // since we need to know the page we're on anyway.
+ if (!paginationCursor || (paginationCursor && 'searchAfter' in paginationCursor)) {
+ dispatch({ type: 'changeHasNextPage', payload: { hasNextPage: hasMoreEntries } });
+ } else if (paginationCursor && 'searchBefore' in paginationCursor) {
+ // We've requested a previous page, therefore there is a next page.
+ dispatch({ type: 'changeHasNextPage', payload: { hasNextPage: true } });
+ }
+ setLogEntryAnomalies(anomalies);
+ },
+ },
+ [
+ sourceId,
+ dispatch,
+ reducerState.timeRange,
+ reducerState.sortOptions,
+ reducerState.paginationOptions,
+ reducerState.paginationCursor,
+ ]
+ );
+
+ const changeSortOptions = useCallback(
+ (nextSortOptions: Sort) => {
+ dispatch({ type: 'changeSortOptions', payload: { sortOptions: nextSortOptions } });
+ },
+ [dispatch]
+ );
+
+ const changePaginationOptions = useCallback(
+ (nextPaginationOptions: PaginationOptions) => {
+ dispatch({
+ type: 'changePaginationOptions',
+ payload: { paginationOptions: nextPaginationOptions },
+ });
+ },
+ [dispatch]
+ );
+
+ // Time range has changed
+ useEffect(() => {
+ dispatch({
+ type: 'changeTimeRange',
+ payload: { timeRange: { start: startTime, end: endTime } },
+ });
+ }, [startTime, endTime]);
+
+ useEffect(() => {
+ getLogEntryAnomalies();
+ }, [getLogEntryAnomalies]);
+
+ const handleFetchNextPage = useCallback(() => {
+ if (reducerState.lastReceivedCursors) {
+ dispatch({ type: 'fetchNextPage' });
+ }
+ }, [dispatch, reducerState]);
+
+ const handleFetchPreviousPage = useCallback(() => {
+ if (reducerState.lastReceivedCursors) {
+ dispatch({ type: 'fetchPreviousPage' });
+ }
+ }, [dispatch, reducerState]);
+
+ const isLoadingLogEntryAnomalies = useMemo(
+ () => getLogEntryAnomaliesRequest.state === 'pending',
+ [getLogEntryAnomaliesRequest.state]
+ );
+
+ const hasFailedLoadingLogEntryAnomalies = useMemo(
+ () => getLogEntryAnomaliesRequest.state === 'rejected',
+ [getLogEntryAnomaliesRequest.state]
+ );
+
+ return {
+ logEntryAnomalies,
+ getLogEntryAnomalies,
+ isLoadingLogEntryAnomalies,
+ hasFailedLoadingLogEntryAnomalies,
+ changeSortOptions,
+ sortOptions: reducerState.sortOptions,
+ changePaginationOptions,
+ paginationOptions: reducerState.paginationOptions,
+ fetchPreviousPage: reducerState.page > 1 ? handleFetchPreviousPage : undefined,
+ fetchNextPage: reducerState.hasNextPage ? handleFetchNextPage : undefined,
+ page: reducerState.page,
+ };
+};
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_examples.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_examples.ts
new file mode 100644
index 0000000000000..fae5bd200a415
--- /dev/null
+++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_examples.ts
@@ -0,0 +1,65 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { useMemo, useState } from 'react';
+
+import { LogEntryExample } from '../../../../common/http_api';
+import { useTrackedPromise } from '../../../utils/use_tracked_promise';
+import { callGetLogEntryExamplesAPI } from './service_calls/get_log_entry_examples';
+
+export const useLogEntryExamples = ({
+ dataset,
+ endTime,
+ exampleCount,
+ sourceId,
+ startTime,
+ categoryId,
+}: {
+ dataset: string;
+ endTime: number;
+ exampleCount: number;
+ sourceId: string;
+ startTime: number;
+ categoryId?: string;
+}) => {
+ const [logEntryExamples, setLogEntryExamples] = useState([]);
+
+ const [getLogEntryExamplesRequest, getLogEntryExamples] = useTrackedPromise(
+ {
+ cancelPreviousOn: 'creation',
+ createPromise: async () => {
+ return await callGetLogEntryExamplesAPI(
+ sourceId,
+ startTime,
+ endTime,
+ dataset,
+ exampleCount,
+ categoryId
+ );
+ },
+ onResolve: ({ data: { examples } }) => {
+ setLogEntryExamples(examples);
+ },
+ },
+ [dataset, endTime, exampleCount, sourceId, startTime]
+ );
+
+ const isLoadingLogEntryExamples = useMemo(() => getLogEntryExamplesRequest.state === 'pending', [
+ getLogEntryExamplesRequest.state,
+ ]);
+
+ const hasFailedLoadingLogEntryExamples = useMemo(
+ () => getLogEntryExamplesRequest.state === 'rejected',
+ [getLogEntryExamplesRequest.state]
+ );
+
+ return {
+ getLogEntryExamples,
+ hasFailedLoadingLogEntryExamples,
+ isLoadingLogEntryExamples,
+ logEntryExamples,
+ };
+};
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_rate_examples.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_rate_examples.ts
deleted file mode 100644
index 12bcdb2a4b4d6..0000000000000
--- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_rate_examples.ts
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-import { useMemo, useState } from 'react';
-
-import { LogEntryRateExample } from '../../../../common/http_api';
-import { useTrackedPromise } from '../../../utils/use_tracked_promise';
-import { callGetLogEntryRateExamplesAPI } from './service_calls/get_log_entry_rate_examples';
-
-export const useLogEntryRateExamples = ({
- dataset,
- endTime,
- exampleCount,
- sourceId,
- startTime,
-}: {
- dataset: string;
- endTime: number;
- exampleCount: number;
- sourceId: string;
- startTime: number;
-}) => {
- const [logEntryRateExamples, setLogEntryRateExamples] = useState([]);
-
- const [getLogEntryRateExamplesRequest, getLogEntryRateExamples] = useTrackedPromise(
- {
- cancelPreviousOn: 'creation',
- createPromise: async () => {
- return await callGetLogEntryRateExamplesAPI(
- sourceId,
- startTime,
- endTime,
- dataset,
- exampleCount
- );
- },
- onResolve: ({ data: { examples } }) => {
- setLogEntryRateExamples(examples);
- },
- },
- [dataset, endTime, exampleCount, sourceId, startTime]
- );
-
- const isLoadingLogEntryRateExamples = useMemo(
- () => getLogEntryRateExamplesRequest.state === 'pending',
- [getLogEntryRateExamplesRequest.state]
- );
-
- const hasFailedLoadingLogEntryRateExamples = useMemo(
- () => getLogEntryRateExamplesRequest.state === 'rejected',
- [getLogEntryRateExamplesRequest.state]
- );
-
- return {
- getLogEntryRateExamples,
- hasFailedLoadingLogEntryRateExamples,
- isLoadingLogEntryRateExamples,
- logEntryRateExamples,
- };
-};
diff --git a/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/dropdown_button.tsx b/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/dropdown_button.tsx
index 6e3ebee2dcb4b..62b25d5a36870 100644
--- a/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/dropdown_button.tsx
+++ b/x-pack/plugins/infra/public/pages/metrics/inventory_view/components/dropdown_button.tsx
@@ -9,13 +9,15 @@ import React, { ReactNode } from 'react';
import { withTheme, EuiTheme } from '../../../../../../observability/public';
interface Props {
+ 'data-test-subj'?: string;
label: string;
onClick: () => void;
theme: EuiTheme | undefined;
children: ReactNode;
}
-export const DropdownButton = withTheme(({ onClick, label, theme, children }: Props) => {
+export const DropdownButton = withTheme((props: Props) => {
+ const { onClick, label, theme, children } = props;
return (
{
id: 'firstPanel',
items: [
{
+ 'data-test-subj': 'goToHost',
name: getDisplayNameForType('host'),
onClick: goToHost,
},
{
+ 'data-test-subj': 'goToPods',
name: getDisplayNameForType('pod'),
onClick: goToK8,
},
{
+ 'data-test-subj': 'goToDocker',
name: getDisplayNameForType('container'),
onClick: goToDocker,
},
@@ -117,6 +120,7 @@ export const WaffleInventorySwitcher: React.FC = () => {
const button = (
diff --git a/x-pack/plugins/infra/server/infra_server.ts b/x-pack/plugins/infra/server/infra_server.ts
index 8af37a36ef745..6596e07ebaca5 100644
--- a/x-pack/plugins/infra/server/infra_server.ts
+++ b/x-pack/plugins/infra/server/infra_server.ts
@@ -15,9 +15,10 @@ import {
initGetLogEntryCategoryDatasetsRoute,
initGetLogEntryCategoryExamplesRoute,
initGetLogEntryRateRoute,
- initGetLogEntryRateExamplesRoute,
+ initGetLogEntryExamplesRoute,
initValidateLogAnalysisDatasetsRoute,
initValidateLogAnalysisIndicesRoute,
+ initGetLogEntryAnomaliesRoute,
} from './routes/log_analysis';
import { initMetricExplorerRoute } from './routes/metrics_explorer';
import { initMetadataRoute } from './routes/metadata';
@@ -51,13 +52,14 @@ export const initInfraServer = (libs: InfraBackendLibs) => {
initGetLogEntryCategoryDatasetsRoute(libs);
initGetLogEntryCategoryExamplesRoute(libs);
initGetLogEntryRateRoute(libs);
+ initGetLogEntryAnomaliesRoute(libs);
initSnapshotRoute(libs);
initNodeDetailsRoute(libs);
initSourceRoute(libs);
initValidateLogAnalysisDatasetsRoute(libs);
initValidateLogAnalysisIndicesRoute(libs);
initLogEntriesRoute(libs);
- initGetLogEntryRateExamplesRoute(libs);
+ initGetLogEntryExamplesRoute(libs);
initLogEntriesHighlightsRoute(libs);
initLogEntriesSummaryRoute(libs);
initLogEntriesSummaryHighlightsRoute(libs);
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/common.ts b/x-pack/plugins/infra/server/lib/log_analysis/common.ts
new file mode 100644
index 0000000000000..0c0b0a0f19982
--- /dev/null
+++ b/x-pack/plugins/infra/server/lib/log_analysis/common.ts
@@ -0,0 +1,29 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import type { MlAnomalyDetectors } from '../../types';
+import { startTracingSpan } from '../../../common/performance_tracing';
+import { NoLogAnalysisMlJobError } from './errors';
+
+export async function fetchMlJob(mlAnomalyDetectors: MlAnomalyDetectors, jobId: string) {
+ const finalizeMlGetJobSpan = startTracingSpan('Fetch ml job from ES');
+ const {
+ jobs: [mlJob],
+ } = await mlAnomalyDetectors.jobs(jobId);
+
+ const mlGetJobSpan = finalizeMlGetJobSpan();
+
+ if (mlJob == null) {
+ throw new NoLogAnalysisMlJobError(`Failed to find ml job ${jobId}.`);
+ }
+
+ return {
+ mlJob,
+ timing: {
+ spans: [mlGetJobSpan],
+ },
+ };
+}
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/errors.ts b/x-pack/plugins/infra/server/lib/log_analysis/errors.ts
index e07126416f4ce..09fee8844fbc5 100644
--- a/x-pack/plugins/infra/server/lib/log_analysis/errors.ts
+++ b/x-pack/plugins/infra/server/lib/log_analysis/errors.ts
@@ -33,3 +33,10 @@ export class UnknownCategoryError extends Error {
Object.setPrototypeOf(this, new.target.prototype);
}
}
+
+export class InsufficientAnomalyMlJobsConfigured extends Error {
+ constructor(message?: string) {
+ super(message);
+ Object.setPrototypeOf(this, new.target.prototype);
+ }
+}
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/index.ts b/x-pack/plugins/infra/server/lib/log_analysis/index.ts
index 44c2bafce4194..c9a176be0a28f 100644
--- a/x-pack/plugins/infra/server/lib/log_analysis/index.ts
+++ b/x-pack/plugins/infra/server/lib/log_analysis/index.ts
@@ -7,3 +7,4 @@
export * from './errors';
export * from './log_entry_categories_analysis';
export * from './log_entry_rate_analysis';
+export * from './log_entry_anomalies';
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_anomalies.ts b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_anomalies.ts
new file mode 100644
index 0000000000000..12ae516564d66
--- /dev/null
+++ b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_anomalies.ts
@@ -0,0 +1,398 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { RequestHandlerContext } from 'src/core/server';
+import { InfraRequestHandlerContext } from '../../types';
+import { TracingSpan, startTracingSpan } from '../../../common/performance_tracing';
+import { fetchMlJob } from './common';
+import {
+ getJobId,
+ logEntryCategoriesJobTypes,
+ logEntryRateJobTypes,
+ jobCustomSettingsRT,
+} from '../../../common/log_analysis';
+import { Sort, Pagination } from '../../../common/http_api/log_analysis';
+import type { MlSystem } from '../../types';
+import { createLogEntryAnomaliesQuery, logEntryAnomaliesResponseRT } from './queries';
+import {
+ InsufficientAnomalyMlJobsConfigured,
+ InsufficientLogAnalysisMlJobConfigurationError,
+ UnknownCategoryError,
+} from './errors';
+import { decodeOrThrow } from '../../../common/runtime_types';
+import {
+ createLogEntryExamplesQuery,
+ logEntryExamplesResponseRT,
+} from './queries/log_entry_examples';
+import { InfraSource } from '../sources';
+import { KibanaFramework } from '../adapters/framework/kibana_framework_adapter';
+import { fetchLogEntryCategories } from './log_entry_categories_analysis';
+
+interface MappedAnomalyHit {
+ id: string;
+ anomalyScore: number;
+ dataset: string;
+ typical: number;
+ actual: number;
+ jobId: string;
+ startTime: number;
+ duration: number;
+ categoryId?: string;
+}
+
+export async function getLogEntryAnomalies(
+ context: RequestHandlerContext & { infra: Required },
+ sourceId: string,
+ startTime: number,
+ endTime: number,
+ sort: Sort,
+ pagination: Pagination
+) {
+ const finalizeLogEntryAnomaliesSpan = startTracingSpan('get log entry anomalies');
+
+ const logRateJobId = getJobId(context.infra.spaceId, sourceId, logEntryRateJobTypes[0]);
+ const logCategoriesJobId = getJobId(
+ context.infra.spaceId,
+ sourceId,
+ logEntryCategoriesJobTypes[0]
+ );
+
+ const jobIds: string[] = [];
+ let jobSpans: TracingSpan[] = [];
+
+ try {
+ const {
+ timing: { spans },
+ } = await fetchMlJob(context.infra.mlAnomalyDetectors, logRateJobId);
+ jobIds.push(logRateJobId);
+ jobSpans = [...jobSpans, ...spans];
+ } catch (e) {
+ // Job wasn't found
+ }
+
+ try {
+ const {
+ timing: { spans },
+ } = await fetchMlJob(context.infra.mlAnomalyDetectors, logCategoriesJobId);
+ jobIds.push(logCategoriesJobId);
+ jobSpans = [...jobSpans, ...spans];
+ } catch (e) {
+ // Job wasn't found
+ }
+
+ if (jobIds.length === 0) {
+ throw new InsufficientAnomalyMlJobsConfigured(
+ 'Log rate or categorisation ML jobs need to be configured to search anomalies'
+ );
+ }
+
+ const {
+ anomalies,
+ paginationCursors,
+ hasMoreEntries,
+ timing: { spans: fetchLogEntryAnomaliesSpans },
+ } = await fetchLogEntryAnomalies(
+ context.infra.mlSystem,
+ jobIds,
+ startTime,
+ endTime,
+ sort,
+ pagination
+ );
+
+ const data = anomalies.map((anomaly) => {
+ const { jobId } = anomaly;
+
+ if (jobId === logRateJobId) {
+ return parseLogRateAnomalyResult(anomaly, logRateJobId);
+ } else {
+ return parseCategoryAnomalyResult(anomaly, logCategoriesJobId);
+ }
+ });
+
+ const logEntryAnomaliesSpan = finalizeLogEntryAnomaliesSpan();
+
+ return {
+ data,
+ paginationCursors,
+ hasMoreEntries,
+ timing: {
+ spans: [logEntryAnomaliesSpan, ...jobSpans, ...fetchLogEntryAnomaliesSpans],
+ },
+ };
+}
+
+const parseLogRateAnomalyResult = (anomaly: MappedAnomalyHit, jobId: string) => {
+ const {
+ id,
+ anomalyScore,
+ dataset,
+ typical,
+ actual,
+ duration,
+ startTime: anomalyStartTime,
+ } = anomaly;
+
+ return {
+ id,
+ anomalyScore,
+ dataset,
+ typical,
+ actual,
+ duration,
+ startTime: anomalyStartTime,
+ type: 'logRate' as const,
+ jobId,
+ };
+};
+
+const parseCategoryAnomalyResult = (anomaly: MappedAnomalyHit, jobId: string) => {
+ const {
+ id,
+ anomalyScore,
+ dataset,
+ typical,
+ actual,
+ duration,
+ startTime: anomalyStartTime,
+ categoryId,
+ } = anomaly;
+
+ return {
+ id,
+ anomalyScore,
+ dataset,
+ typical,
+ actual,
+ duration,
+ startTime: anomalyStartTime,
+ categoryId,
+ type: 'logCategory' as const,
+ jobId,
+ };
+};
+
+async function fetchLogEntryAnomalies(
+ mlSystem: MlSystem,
+ jobIds: string[],
+ startTime: number,
+ endTime: number,
+ sort: Sort,
+ pagination: Pagination
+) {
+ // We'll request 1 extra entry on top of our pageSize to determine if there are
+ // more entries to be fetched. This avoids scenarios where the client side can't
+ // determine if entries.length === pageSize actually means there are more entries / next page
+ // or not.
+ const expandedPagination = { ...pagination, pageSize: pagination.pageSize + 1 };
+
+ const finalizeFetchLogEntryAnomaliesSpan = startTracingSpan('fetch log entry anomalies');
+
+ const results = decodeOrThrow(logEntryAnomaliesResponseRT)(
+ await mlSystem.mlAnomalySearch(
+ createLogEntryAnomaliesQuery(jobIds, startTime, endTime, sort, expandedPagination)
+ )
+ );
+
+ const {
+ hits: { hits },
+ } = results;
+ const hasMoreEntries = hits.length > pagination.pageSize;
+
+ // An extra entry was found and hasMoreEntries has been determined, the extra entry can be removed.
+ if (hasMoreEntries) {
+ hits.pop();
+ }
+
+ // To "search_before" the sort order will have been reversed for ES.
+ // The results are now reversed back, to match the requested sort.
+ if (pagination.cursor && 'searchBefore' in pagination.cursor) {
+ hits.reverse();
+ }
+
+ const paginationCursors =
+ hits.length > 0
+ ? {
+ previousPageCursor: hits[0].sort,
+ nextPageCursor: hits[hits.length - 1].sort,
+ }
+ : undefined;
+
+ const anomalies = hits.map((result) => {
+ const {
+ job_id,
+ record_score: anomalyScore,
+ typical,
+ actual,
+ partition_field_value: dataset,
+ bucket_span: duration,
+ timestamp: anomalyStartTime,
+ by_field_value: categoryId,
+ } = result._source;
+
+ return {
+ id: result._id,
+ anomalyScore,
+ dataset,
+ typical: typical[0],
+ actual: actual[0],
+ jobId: job_id,
+ startTime: anomalyStartTime,
+ duration: duration * 1000,
+ categoryId,
+ };
+ });
+
+ const fetchLogEntryAnomaliesSpan = finalizeFetchLogEntryAnomaliesSpan();
+
+ return {
+ anomalies,
+ paginationCursors,
+ hasMoreEntries,
+ timing: {
+ spans: [fetchLogEntryAnomaliesSpan],
+ },
+ };
+}
+
+export async function getLogEntryExamples(
+ context: RequestHandlerContext & { infra: Required },
+ sourceId: string,
+ startTime: number,
+ endTime: number,
+ dataset: string,
+ exampleCount: number,
+ sourceConfiguration: InfraSource,
+ callWithRequest: KibanaFramework['callWithRequest'],
+ categoryId?: string
+) {
+ const finalizeLogEntryExamplesSpan = startTracingSpan('get log entry rate example log entries');
+
+ const jobId = getJobId(
+ context.infra.spaceId,
+ sourceId,
+ categoryId != null ? logEntryCategoriesJobTypes[0] : logEntryRateJobTypes[0]
+ );
+
+ const {
+ mlJob,
+ timing: { spans: fetchMlJobSpans },
+ } = await fetchMlJob(context.infra.mlAnomalyDetectors, jobId);
+
+ const customSettings = decodeOrThrow(jobCustomSettingsRT)(mlJob.custom_settings);
+ const indices = customSettings?.logs_source_config?.indexPattern;
+ const timestampField = customSettings?.logs_source_config?.timestampField;
+ const tiebreakerField = sourceConfiguration.configuration.fields.tiebreaker;
+
+ if (indices == null || timestampField == null) {
+ throw new InsufficientLogAnalysisMlJobConfigurationError(
+ `Failed to find index configuration for ml job ${jobId}`
+ );
+ }
+
+ const {
+ examples,
+ timing: { spans: fetchLogEntryExamplesSpans },
+ } = await fetchLogEntryExamples(
+ context,
+ sourceId,
+ indices,
+ timestampField,
+ tiebreakerField,
+ startTime,
+ endTime,
+ dataset,
+ exampleCount,
+ callWithRequest,
+ categoryId
+ );
+
+ const logEntryExamplesSpan = finalizeLogEntryExamplesSpan();
+
+ return {
+ data: examples,
+ timing: {
+ spans: [logEntryExamplesSpan, ...fetchMlJobSpans, ...fetchLogEntryExamplesSpans],
+ },
+ };
+}
+
+export async function fetchLogEntryExamples(
+ context: RequestHandlerContext & { infra: Required },
+ sourceId: string,
+ indices: string,
+ timestampField: string,
+ tiebreakerField: string,
+ startTime: number,
+ endTime: number,
+ dataset: string,
+ exampleCount: number,
+ callWithRequest: KibanaFramework['callWithRequest'],
+ categoryId?: string
+) {
+ const finalizeEsSearchSpan = startTracingSpan('Fetch log rate examples from ES');
+
+ let categoryQuery: string | undefined;
+
+ // Examples should be further scoped to a specific ML category
+ if (categoryId) {
+ const parsedCategoryId = parseInt(categoryId, 10);
+
+ const logEntryCategoriesCountJobId = getJobId(
+ context.infra.spaceId,
+ sourceId,
+ logEntryCategoriesJobTypes[0]
+ );
+
+ const { logEntryCategoriesById } = await fetchLogEntryCategories(
+ context,
+ logEntryCategoriesCountJobId,
+ [parsedCategoryId]
+ );
+
+ const category = logEntryCategoriesById[parsedCategoryId];
+
+ if (category == null) {
+ throw new UnknownCategoryError(parsedCategoryId);
+ }
+
+ categoryQuery = category._source.terms;
+ }
+
+ const {
+ hits: { hits },
+ } = decodeOrThrow(logEntryExamplesResponseRT)(
+ await callWithRequest(
+ context,
+ 'search',
+ createLogEntryExamplesQuery(
+ indices,
+ timestampField,
+ tiebreakerField,
+ startTime,
+ endTime,
+ dataset,
+ exampleCount,
+ categoryQuery
+ )
+ )
+ );
+
+ const esSearchSpan = finalizeEsSearchSpan();
+
+ return {
+ examples: hits.map((hit) => ({
+ id: hit._id,
+ dataset: hit._source.event?.dataset ?? '',
+ message: hit._source.message ?? '',
+ timestamp: hit.sort[0],
+ tiebreaker: hit.sort[1],
+ })),
+ timing: {
+ spans: [esSearchSpan],
+ },
+ };
+}
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_categories_analysis.ts b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_categories_analysis.ts
index 4f244d724405e..6d00ba56e0e66 100644
--- a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_categories_analysis.ts
+++ b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_categories_analysis.ts
@@ -17,7 +17,6 @@ import { decodeOrThrow } from '../../../common/runtime_types';
import type { MlAnomalyDetectors, MlSystem } from '../../types';
import {
InsufficientLogAnalysisMlJobConfigurationError,
- NoLogAnalysisMlJobError,
NoLogAnalysisResultsIndexError,
UnknownCategoryError,
} from './errors';
@@ -45,6 +44,7 @@ import {
topLogEntryCategoriesResponseRT,
} from './queries/top_log_entry_categories';
import { InfraSource } from '../sources';
+import { fetchMlJob } from './common';
const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000;
@@ -213,7 +213,7 @@ export async function getLogEntryCategoryExamples(
const {
mlJob,
timing: { spans: fetchMlJobSpans },
- } = await fetchMlJob(context, logEntryCategoriesCountJobId);
+ } = await fetchMlJob(context.infra.mlAnomalyDetectors, logEntryCategoriesCountJobId);
const customSettings = decodeOrThrow(jobCustomSettingsRT)(mlJob.custom_settings);
const indices = customSettings?.logs_source_config?.indexPattern;
@@ -330,7 +330,7 @@ async function fetchTopLogEntryCategories(
};
}
-async function fetchLogEntryCategories(
+export async function fetchLogEntryCategories(
context: { infra: { mlSystem: MlSystem } },
logEntryCategoriesCountJobId: string,
categoryIds: number[]
@@ -452,30 +452,6 @@ async function fetchTopLogEntryCategoryHistograms(
};
}
-async function fetchMlJob(
- context: { infra: { mlAnomalyDetectors: MlAnomalyDetectors } },
- logEntryCategoriesCountJobId: string
-) {
- const finalizeMlGetJobSpan = startTracingSpan('Fetch ml job from ES');
-
- const {
- jobs: [mlJob],
- } = await context.infra.mlAnomalyDetectors.jobs(logEntryCategoriesCountJobId);
-
- const mlGetJobSpan = finalizeMlGetJobSpan();
-
- if (mlJob == null) {
- throw new NoLogAnalysisMlJobError(`Failed to find ml job ${logEntryCategoriesCountJobId}.`);
- }
-
- return {
- mlJob,
- timing: {
- spans: [mlGetJobSpan],
- },
- };
-}
-
async function fetchLogEntryCategoryExamples(
requestContext: { core: { elasticsearch: { legacy: { client: ILegacyScopedClusterClient } } } },
indices: string,
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_rate_analysis.ts b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_rate_analysis.ts
index 290cf03b67365..0323980dcd013 100644
--- a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_rate_analysis.ts
+++ b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_rate_analysis.ts
@@ -7,7 +7,6 @@
import { pipe } from 'fp-ts/lib/pipeable';
import { map, fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
-import { RequestHandlerContext } from 'src/core/server';
import { throwErrors, createPlainError } from '../../../common/runtime_types';
import {
logRateModelPlotResponseRT,
@@ -15,22 +14,9 @@ import {
LogRateModelPlotBucket,
CompositeTimestampPartitionKey,
} from './queries';
-import { startTracingSpan } from '../../../common/performance_tracing';
-import { decodeOrThrow } from '../../../common/runtime_types';
-import { getJobId, jobCustomSettingsRT } from '../../../common/log_analysis';
-import {
- createLogEntryRateExamplesQuery,
- logEntryRateExamplesResponseRT,
-} from './queries/log_entry_rate_examples';
-import {
- InsufficientLogAnalysisMlJobConfigurationError,
- NoLogAnalysisMlJobError,
- NoLogAnalysisResultsIndexError,
-} from './errors';
-import { InfraSource } from '../sources';
+import { getJobId } from '../../../common/log_analysis';
+import { NoLogAnalysisResultsIndexError } from './errors';
import type { MlSystem } from '../../types';
-import { InfraRequestHandlerContext } from '../../types';
-import { KibanaFramework } from '../adapters/framework/kibana_framework_adapter';
const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000;
@@ -143,130 +129,3 @@ export async function getLogEntryRateBuckets(
}
}, []);
}
-
-export async function getLogEntryRateExamples(
- context: RequestHandlerContext & { infra: Required },
- sourceId: string,
- startTime: number,
- endTime: number,
- dataset: string,
- exampleCount: number,
- sourceConfiguration: InfraSource,
- callWithRequest: KibanaFramework['callWithRequest']
-) {
- const finalizeLogEntryRateExamplesSpan = startTracingSpan(
- 'get log entry rate example log entries'
- );
-
- const jobId = getJobId(context.infra.spaceId, sourceId, 'log-entry-rate');
-
- const {
- mlJob,
- timing: { spans: fetchMlJobSpans },
- } = await fetchMlJob(context, jobId);
-
- const customSettings = decodeOrThrow(jobCustomSettingsRT)(mlJob.custom_settings);
- const indices = customSettings?.logs_source_config?.indexPattern;
- const timestampField = customSettings?.logs_source_config?.timestampField;
- const tiebreakerField = sourceConfiguration.configuration.fields.tiebreaker;
-
- if (indices == null || timestampField == null) {
- throw new InsufficientLogAnalysisMlJobConfigurationError(
- `Failed to find index configuration for ml job ${jobId}`
- );
- }
-
- const {
- examples,
- timing: { spans: fetchLogEntryRateExamplesSpans },
- } = await fetchLogEntryRateExamples(
- context,
- indices,
- timestampField,
- tiebreakerField,
- startTime,
- endTime,
- dataset,
- exampleCount,
- callWithRequest
- );
-
- const logEntryRateExamplesSpan = finalizeLogEntryRateExamplesSpan();
-
- return {
- data: examples,
- timing: {
- spans: [logEntryRateExamplesSpan, ...fetchMlJobSpans, ...fetchLogEntryRateExamplesSpans],
- },
- };
-}
-
-export async function fetchLogEntryRateExamples(
- context: RequestHandlerContext & { infra: Required },
- indices: string,
- timestampField: string,
- tiebreakerField: string,
- startTime: number,
- endTime: number,
- dataset: string,
- exampleCount: number,
- callWithRequest: KibanaFramework['callWithRequest']
-) {
- const finalizeEsSearchSpan = startTracingSpan('Fetch log rate examples from ES');
-
- const {
- hits: { hits },
- } = decodeOrThrow(logEntryRateExamplesResponseRT)(
- await callWithRequest(
- context,
- 'search',
- createLogEntryRateExamplesQuery(
- indices,
- timestampField,
- tiebreakerField,
- startTime,
- endTime,
- dataset,
- exampleCount
- )
- )
- );
-
- const esSearchSpan = finalizeEsSearchSpan();
-
- return {
- examples: hits.map((hit) => ({
- id: hit._id,
- dataset,
- message: hit._source.message ?? '',
- timestamp: hit.sort[0],
- tiebreaker: hit.sort[1],
- })),
- timing: {
- spans: [esSearchSpan],
- },
- };
-}
-
-async function fetchMlJob(
- context: RequestHandlerContext & { infra: Required },
- logEntryRateJobId: string
-) {
- const finalizeMlGetJobSpan = startTracingSpan('Fetch ml job from ES');
- const {
- jobs: [mlJob],
- } = await context.infra.mlAnomalyDetectors.jobs(logEntryRateJobId);
-
- const mlGetJobSpan = finalizeMlGetJobSpan();
-
- if (mlJob == null) {
- throw new NoLogAnalysisMlJobError(`Failed to find ml job ${logEntryRateJobId}.`);
- }
-
- return {
- mlJob,
- timing: {
- spans: [mlGetJobSpan],
- },
- };
-}
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/common.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/common.ts
index eacf29b303db0..87394028095de 100644
--- a/x-pack/plugins/infra/server/lib/log_analysis/queries/common.ts
+++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/common.ts
@@ -21,6 +21,14 @@ export const createJobIdFilters = (jobId: string) => [
},
];
+export const createJobIdsFilters = (jobIds: string[]) => [
+ {
+ terms: {
+ job_id: jobIds,
+ },
+ },
+];
+
export const createTimeRangeFilters = (startTime: number, endTime: number) => [
{
range: {
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/index.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/index.ts
index 8c470acbf02fb..792c5bf98b538 100644
--- a/x-pack/plugins/infra/server/lib/log_analysis/queries/index.ts
+++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/index.ts
@@ -6,3 +6,4 @@
export * from './log_entry_rate';
export * from './top_log_entry_categories';
+export * from './log_entry_anomalies';
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_anomalies.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_anomalies.ts
new file mode 100644
index 0000000000000..fc72776ea5cac
--- /dev/null
+++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_anomalies.ts
@@ -0,0 +1,128 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import * as rt from 'io-ts';
+import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types';
+import {
+ createJobIdsFilters,
+ createTimeRangeFilters,
+ createResultTypeFilters,
+ defaultRequestParameters,
+} from './common';
+import { Sort, Pagination } from '../../../../common/http_api/log_analysis';
+
+// TODO: Reassess validity of this against ML docs
+const TIEBREAKER_FIELD = '_doc';
+
+const sortToMlFieldMap = {
+ dataset: 'partition_field_value',
+ anomalyScore: 'record_score',
+ startTime: 'timestamp',
+};
+
+export const createLogEntryAnomaliesQuery = (
+ jobIds: string[],
+ startTime: number,
+ endTime: number,
+ sort: Sort,
+ pagination: Pagination
+) => {
+ const { field } = sort;
+ const { pageSize } = pagination;
+
+ const filters = [
+ ...createJobIdsFilters(jobIds),
+ ...createTimeRangeFilters(startTime, endTime),
+ ...createResultTypeFilters(['record']),
+ ];
+
+ const sourceFields = [
+ 'job_id',
+ 'record_score',
+ 'typical',
+ 'actual',
+ 'partition_field_value',
+ 'timestamp',
+ 'bucket_span',
+ 'by_field_value',
+ ];
+
+ const { querySortDirection, queryCursor } = parsePaginationCursor(sort, pagination);
+
+ const sortOptions = [
+ { [sortToMlFieldMap[field]]: querySortDirection },
+ { [TIEBREAKER_FIELD]: querySortDirection }, // Tiebreaker
+ ];
+
+ const resultsQuery = {
+ ...defaultRequestParameters,
+ body: {
+ query: {
+ bool: {
+ filter: filters,
+ },
+ },
+ search_after: queryCursor,
+ sort: sortOptions,
+ size: pageSize,
+ _source: sourceFields,
+ },
+ };
+
+ return resultsQuery;
+};
+
+export const logEntryAnomalyHitRT = rt.type({
+ _id: rt.string,
+ _source: rt.intersection([
+ rt.type({
+ job_id: rt.string,
+ record_score: rt.number,
+ typical: rt.array(rt.number),
+ actual: rt.array(rt.number),
+ partition_field_value: rt.string,
+ bucket_span: rt.number,
+ timestamp: rt.number,
+ }),
+ rt.partial({
+ by_field_value: rt.string,
+ }),
+ ]),
+ sort: rt.tuple([rt.union([rt.string, rt.number]), rt.union([rt.string, rt.number])]),
+});
+
+export type LogEntryAnomalyHit = rt.TypeOf;
+
+export const logEntryAnomaliesResponseRT = rt.intersection([
+ commonSearchSuccessResponseFieldsRT,
+ rt.type({
+ hits: rt.type({
+ hits: rt.array(logEntryAnomalyHitRT),
+ }),
+ }),
+]);
+
+export type LogEntryAnomaliesResponseRT = rt.TypeOf;
+
+const parsePaginationCursor = (sort: Sort, pagination: Pagination) => {
+ const { cursor } = pagination;
+ const { direction } = sort;
+
+ if (!cursor) {
+ return { querySortDirection: direction, queryCursor: undefined };
+ }
+
+ // We will always use ES's search_after to paginate, to mimic "search_before" behaviour we
+ // need to reverse the user's chosen search direction for the ES query.
+ if ('searchBefore' in cursor) {
+ return {
+ querySortDirection: direction === 'desc' ? 'asc' : 'desc',
+ queryCursor: cursor.searchBefore,
+ };
+ } else {
+ return { querySortDirection: direction, queryCursor: cursor.searchAfter };
+ }
+};
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_rate_examples.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_examples.ts
similarity index 59%
rename from x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_rate_examples.ts
rename to x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_examples.ts
index ef06641caf797..74a664e78dcd6 100644
--- a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_rate_examples.ts
+++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_examples.ts
@@ -10,14 +10,15 @@ import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearc
import { defaultRequestParameters } from './common';
import { partitionField } from '../../../../common/log_analysis';
-export const createLogEntryRateExamplesQuery = (
+export const createLogEntryExamplesQuery = (
indices: string,
timestampField: string,
tiebreakerField: string,
startTime: number,
endTime: number,
dataset: string,
- exampleCount: number
+ exampleCount: number,
+ categoryQuery?: string
) => ({
...defaultRequestParameters,
body: {
@@ -32,11 +33,27 @@ export const createLogEntryRateExamplesQuery = (
},
},
},
- {
- term: {
- [partitionField]: dataset,
- },
- },
+ ...(!!dataset
+ ? [
+ {
+ term: {
+ [partitionField]: dataset,
+ },
+ },
+ ]
+ : []),
+ ...(categoryQuery
+ ? [
+ {
+ match: {
+ message: {
+ query: categoryQuery,
+ operator: 'AND',
+ },
+ },
+ },
+ ]
+ : []),
],
},
},
@@ -47,7 +64,7 @@ export const createLogEntryRateExamplesQuery = (
size: exampleCount,
});
-export const logEntryRateExampleHitRT = rt.type({
+export const logEntryExampleHitRT = rt.type({
_id: rt.string,
_source: rt.partial({
event: rt.partial({
@@ -58,15 +75,15 @@ export const logEntryRateExampleHitRT = rt.type({
sort: rt.tuple([rt.number, rt.number]),
});
-export type LogEntryRateExampleHit = rt.TypeOf;
+export type LogEntryExampleHit = rt.TypeOf;
-export const logEntryRateExamplesResponseRT = rt.intersection([
+export const logEntryExamplesResponseRT = rt.intersection([
commonSearchSuccessResponseFieldsRT,
rt.type({
hits: rt.type({
- hits: rt.array(logEntryRateExampleHitRT),
+ hits: rt.array(logEntryExampleHitRT),
}),
}),
]);
-export type LogEntryRateExamplesResponse = rt.TypeOf;
+export type LogEntryExamplesResponse = rt.TypeOf;
diff --git a/x-pack/plugins/infra/server/routes/log_analysis/results/index.ts b/x-pack/plugins/infra/server/routes/log_analysis/results/index.ts
index 30b6be435837b..cbd89db97236f 100644
--- a/x-pack/plugins/infra/server/routes/log_analysis/results/index.ts
+++ b/x-pack/plugins/infra/server/routes/log_analysis/results/index.ts
@@ -8,4 +8,5 @@ export * from './log_entry_categories';
export * from './log_entry_category_datasets';
export * from './log_entry_category_examples';
export * from './log_entry_rate';
-export * from './log_entry_rate_examples';
+export * from './log_entry_examples';
+export * from './log_entry_anomalies';
diff --git a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_anomalies.ts b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_anomalies.ts
new file mode 100644
index 0000000000000..f4911658ea496
--- /dev/null
+++ b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_anomalies.ts
@@ -0,0 +1,112 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import Boom from 'boom';
+import { InfraBackendLibs } from '../../../lib/infra_types';
+import {
+ LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH,
+ getLogEntryAnomaliesSuccessReponsePayloadRT,
+ getLogEntryAnomaliesRequestPayloadRT,
+ GetLogEntryAnomaliesRequestPayload,
+ Sort,
+ Pagination,
+} from '../../../../common/http_api/log_analysis';
+import { createValidationFunction } from '../../../../common/runtime_types';
+import { assertHasInfraMlPlugins } from '../../../utils/request_context';
+import { getLogEntryAnomalies } from '../../../lib/log_analysis';
+
+export const initGetLogEntryAnomaliesRoute = ({ framework }: InfraBackendLibs) => {
+ framework.registerRoute(
+ {
+ method: 'post',
+ path: LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH,
+ validate: {
+ body: createValidationFunction(getLogEntryAnomaliesRequestPayloadRT),
+ },
+ },
+ framework.router.handleLegacyErrors(async (requestContext, request, response) => {
+ const {
+ data: {
+ sourceId,
+ timeRange: { startTime, endTime },
+ sort: sortParam,
+ pagination: paginationParam,
+ },
+ } = request.body;
+
+ const { sort, pagination } = getSortAndPagination(sortParam, paginationParam);
+
+ try {
+ assertHasInfraMlPlugins(requestContext);
+
+ const {
+ data: logEntryAnomalies,
+ paginationCursors,
+ hasMoreEntries,
+ timing,
+ } = await getLogEntryAnomalies(
+ requestContext,
+ sourceId,
+ startTime,
+ endTime,
+ sort,
+ pagination
+ );
+
+ return response.ok({
+ body: getLogEntryAnomaliesSuccessReponsePayloadRT.encode({
+ data: {
+ anomalies: logEntryAnomalies,
+ hasMoreEntries,
+ paginationCursors,
+ },
+ timing,
+ }),
+ });
+ } catch (error) {
+ if (Boom.isBoom(error)) {
+ throw error;
+ }
+
+ return response.customError({
+ statusCode: error.statusCode ?? 500,
+ body: {
+ message: error.message ?? 'An unexpected error occurred',
+ },
+ });
+ }
+ })
+ );
+};
+
+const getSortAndPagination = (
+ sort: Partial = {},
+ pagination: Partial = {}
+): {
+ sort: Sort;
+ pagination: Pagination;
+} => {
+ const sortDefaults = {
+ field: 'anomalyScore' as const,
+ direction: 'desc' as const,
+ };
+
+ const sortWithDefaults = {
+ ...sortDefaults,
+ ...sort,
+ };
+
+ const paginationDefaults = {
+ pageSize: 50,
+ };
+
+ const paginationWithDefaults = {
+ ...paginationDefaults,
+ ...pagination,
+ };
+
+ return { sort: sortWithDefaults, pagination: paginationWithDefaults };
+};
diff --git a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_rate_examples.ts b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_examples.ts
similarity index 75%
rename from x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_rate_examples.ts
rename to x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_examples.ts
index b8ebcc66911dc..be4caee769506 100644
--- a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_rate_examples.ts
+++ b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_examples.ts
@@ -7,21 +7,21 @@
import Boom from 'boom';
import { createValidationFunction } from '../../../../common/runtime_types';
import { InfraBackendLibs } from '../../../lib/infra_types';
-import { NoLogAnalysisResultsIndexError, getLogEntryRateExamples } from '../../../lib/log_analysis';
+import { NoLogAnalysisResultsIndexError, getLogEntryExamples } from '../../../lib/log_analysis';
import { assertHasInfraMlPlugins } from '../../../utils/request_context';
import {
- getLogEntryRateExamplesRequestPayloadRT,
- getLogEntryRateExamplesSuccessReponsePayloadRT,
+ getLogEntryExamplesRequestPayloadRT,
+ getLogEntryExamplesSuccessReponsePayloadRT,
LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH,
} from '../../../../common/http_api/log_analysis';
-export const initGetLogEntryRateExamplesRoute = ({ framework, sources }: InfraBackendLibs) => {
+export const initGetLogEntryExamplesRoute = ({ framework, sources }: InfraBackendLibs) => {
framework.registerRoute(
{
method: 'post',
path: LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH,
validate: {
- body: createValidationFunction(getLogEntryRateExamplesRequestPayloadRT),
+ body: createValidationFunction(getLogEntryExamplesRequestPayloadRT),
},
},
framework.router.handleLegacyErrors(async (requestContext, request, response) => {
@@ -31,6 +31,7 @@ export const initGetLogEntryRateExamplesRoute = ({ framework, sources }: InfraBa
exampleCount,
sourceId,
timeRange: { startTime, endTime },
+ categoryId,
},
} = request.body;
@@ -42,7 +43,7 @@ export const initGetLogEntryRateExamplesRoute = ({ framework, sources }: InfraBa
try {
assertHasInfraMlPlugins(requestContext);
- const { data: logEntryRateExamples, timing } = await getLogEntryRateExamples(
+ const { data: logEntryExamples, timing } = await getLogEntryExamples(
requestContext,
sourceId,
startTime,
@@ -50,13 +51,14 @@ export const initGetLogEntryRateExamplesRoute = ({ framework, sources }: InfraBa
dataset,
exampleCount,
sourceConfiguration,
- framework.callWithRequest
+ framework.callWithRequest,
+ categoryId
);
return response.ok({
- body: getLogEntryRateExamplesSuccessReponsePayloadRT.encode({
+ body: getLogEntryExamplesSuccessReponsePayloadRT.encode({
data: {
- examples: logEntryRateExamples,
+ examples: logEntryExamples,
},
timing,
}),
diff --git a/x-pack/plugins/ingest_manager/common/services/config_to_yaml.ts b/x-pack/plugins/ingest_manager/common/services/config_to_yaml.ts
index c2043a40369e2..7e03e4572f9ee 100644
--- a/x-pack/plugins/ingest_manager/common/services/config_to_yaml.ts
+++ b/x-pack/plugins/ingest_manager/common/services/config_to_yaml.ts
@@ -11,8 +11,8 @@ const CONFIG_KEYS_ORDER = [
'name',
'revision',
'type',
- 'settings',
'outputs',
+ 'settings',
'inputs',
'enabled',
'use_output',
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/use_request/agent_config.ts b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/use_request/agent_config.ts
index 56b78c6faa93a..0bb09c2731032 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/use_request/agent_config.ts
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/use_request/agent_config.ts
@@ -48,6 +48,17 @@ export const useGetOneAgentConfigFull = (agentConfigId: string) => {
});
};
+export const sendGetOneAgentConfigFull = (
+ agentConfigId: string,
+ query: { standalone?: boolean } = {}
+) => {
+ return sendRequest({
+ path: agentConfigRouteService.getInfoFullPath(agentConfigId),
+ method: 'get',
+ query,
+ });
+};
+
export const sendGetOneAgentConfig = (agentConfigId: string) => {
return sendRequest({
path: agentConfigRouteService.getInfoPath(agentConfigId),
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/use_request/enrollment_api_keys.ts b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/use_request/enrollment_api_keys.ts
index 10d9e03e986e1..5a334e2739027 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/use_request/enrollment_api_keys.ts
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/hooks/use_request/enrollment_api_keys.ts
@@ -44,6 +44,18 @@ export function sendDeleteOneEnrollmentAPIKey(keyId: string, options?: RequestOp
});
}
+export function sendGetEnrollmentAPIKeys(
+ query: GetEnrollmentAPIKeysRequest['query'],
+ options?: RequestOptions
+) {
+ return sendRequest({
+ method: 'get',
+ path: enrollmentAPIKeyRouteService.getListPath(),
+ query,
+ ...options,
+ });
+}
+
export function useGetEnrollmentAPIKeys(
query: GetEnrollmentAPIKeysRequest['query'],
options?: RequestOptions
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/layout.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/layout.tsx
index e0f40f1b15375..7ccb59f0e741e 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/layout.tsx
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/layout.tsx
@@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
-import React from 'react';
+import React, { memo, useMemo } from 'react';
import { FormattedMessage } from '@kbn/i18n/react';
import {
EuiFlexGroup,
@@ -27,130 +27,148 @@ export const CreatePackageConfigPageLayout: React.FunctionComponent<{
agentConfig?: AgentConfig;
packageInfo?: PackageInfo;
'data-test-subj'?: string;
-}> = ({
- from,
- cancelUrl,
- onCancel,
- agentConfig,
- packageInfo,
- children,
- 'data-test-subj': dataTestSubj,
-}) => {
- const leftColumn = (
-
-
- {/* eslint-disable-next-line @elastic/eui/href-or-on-click */}
-
-
-
-
-
+}> = memo(
+ ({
+ from,
+ cancelUrl,
+ onCancel,
+ agentConfig,
+ packageInfo,
+ children,
+ 'data-test-subj': dataTestSubj,
+ }) => {
+ const pageTitle = useMemo(() => {
+ if ((from === 'package' || from === 'edit') && packageInfo) {
+ return (
+
+
+
+
+
+
+
+ {from === 'edit' ? (
+
+ ) : (
+
+ )}
+
+
+
+
+ );
+ }
+
+ return from === 'edit' ? (
- {from === 'edit' ? (
-
- ) : (
-
- )}
+
-
-
-
-
- {from === 'edit' ? (
+ ) : (
+
+
- ) : from === 'config' ? (
+
+
+ );
+ }, [from, packageInfo]);
+
+ const pageDescription = useMemo(() => {
+ return from === 'edit' ? (
+
+ ) : from === 'config' ? (
+
+ ) : (
+
+ );
+ }, [from]);
+
+ const leftColumn = (
+
+
+ {/* eslint-disable-next-line @elastic/eui/href-or-on-click */}
+
- ) : (
+
+
+ {pageTitle}
+
+
+
+ {pageDescription}
+
+
+
+ );
+
+ const rightColumn =
+ agentConfig && (from === 'config' || from === 'edit') ? (
+
+
- )}
-
-
-
- );
- const rightColumn = (
-
-
-
- {agentConfig && (from === 'config' || from === 'edit') ? (
-
-
-
-
-
- {agentConfig?.name || '-'}
-
-
- ) : null}
- {packageInfo && from === 'package' ? (
-
-
-
-
-
-
-
-
-
-
- {packageInfo?.title || packageInfo?.name || '-'}
-
-
-
-
- ) : null}
-
-
- );
+
+ {agentConfig?.name || '-'}
+
+ ) : undefined;
- const maxWidth = 770;
- return (
-
- {children}
-
- );
-};
+ const maxWidth = 770;
+ return (
+
+ {children}
+
+ );
+ }
+);
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/package_config_input_config.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/package_config_input_config.tsx
index 85c0f2134d8dc..98f04dbd92659 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/package_config_input_config.tsx
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/package_config_input_config.tsx
@@ -3,17 +3,15 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
-import React, { useState, Fragment } from 'react';
+import React, { useState, Fragment, memo, useMemo } from 'react';
import { FormattedMessage } from '@kbn/i18n/react';
import {
+ EuiFlexGrid,
EuiFlexGroup,
EuiFlexItem,
EuiText,
- EuiTextColor,
EuiSpacer,
EuiButtonEmpty,
- EuiTitle,
- EuiIconTip,
} from '@elastic/eui';
import { PackageConfigInput, RegistryVarsEntry } from '../../../../types';
import {
@@ -29,150 +27,157 @@ export const PackageConfigInputConfig: React.FunctionComponent<{
updatePackageConfigInput: (updatedInput: Partial) => void;
inputVarsValidationResults: PackageConfigConfigValidationResults;
forceShowErrors?: boolean;
-}> = ({
- packageInputVars,
- packageConfigInput,
- updatePackageConfigInput,
- inputVarsValidationResults,
- forceShowErrors,
-}) => {
- // Showing advanced options toggle state
- const [isShowingAdvanced, setIsShowingAdvanced] = useState(false);
+}> = memo(
+ ({
+ packageInputVars,
+ packageConfigInput,
+ updatePackageConfigInput,
+ inputVarsValidationResults,
+ forceShowErrors,
+ }) => {
+ // Showing advanced options toggle state
+ const [isShowingAdvanced, setIsShowingAdvanced] = useState(false);
- // Errors state
- const hasErrors = forceShowErrors && validationHasErrors(inputVarsValidationResults);
+ // Errors state
+ const hasErrors = forceShowErrors && validationHasErrors(inputVarsValidationResults);
- const requiredVars: RegistryVarsEntry[] = [];
- const advancedVars: RegistryVarsEntry[] = [];
+ const requiredVars: RegistryVarsEntry[] = [];
+ const advancedVars: RegistryVarsEntry[] = [];
- if (packageInputVars) {
- packageInputVars.forEach((varDef) => {
- if (isAdvancedVar(varDef)) {
- advancedVars.push(varDef);
- } else {
- requiredVars.push(varDef);
- }
- });
- }
+ if (packageInputVars) {
+ packageInputVars.forEach((varDef) => {
+ if (isAdvancedVar(varDef)) {
+ advancedVars.push(varDef);
+ } else {
+ requiredVars.push(varDef);
+ }
+ });
+ }
+
+ const advancedVarsWithErrorsCount: number = useMemo(
+ () =>
+ advancedVars.filter(
+ ({ name: varName }) => inputVarsValidationResults.vars?.[varName]?.length
+ ).length,
+ [advancedVars, inputVarsValidationResults.vars]
+ );
- return (
-
-
-
-
-
-
-
+ return (
+
+
+
+
+
+
+
-
-
+
+
+
+
+
+
+
+
- {hasErrors ? (
-
-
- }
- position="right"
- type="alert"
- iconProps={{ color: 'danger' }}
- />
-
- ) : null}
-
-
-
-
-
-
-
-
-
-
- {requiredVars.map((varDef) => {
- const { name: varName, type: varType } = varDef;
- const value = packageConfigInput.vars![varName].value;
- return (
-
- {
- updatePackageConfigInput({
- vars: {
- ...packageConfigInput.vars,
- [varName]: {
- type: varType,
- value: newValue,
+
+
+
+ {requiredVars.map((varDef) => {
+ const { name: varName, type: varType } = varDef;
+ const value = packageConfigInput.vars![varName].value;
+ return (
+
+ {
+ updatePackageConfigInput({
+ vars: {
+ ...packageConfigInput.vars,
+ [varName]: {
+ type: varType,
+ value: newValue,
+ },
},
- },
- });
- }}
- errors={inputVarsValidationResults.vars![varName]}
- forceShowErrors={forceShowErrors}
- />
-
- );
- })}
- {advancedVars.length ? (
-
-
- {/* Wrapper div to prevent button from going full width */}
-
- setIsShowingAdvanced(!isShowingAdvanced)}
- flush="left"
- >
-
-
-
-
- {isShowingAdvanced
- ? advancedVars.map((varDef) => {
- const { name: varName, type: varType } = varDef;
- const value = packageConfigInput.vars![varName].value;
- return (
-
- {
- updatePackageConfigInput({
- vars: {
- ...packageConfigInput.vars,
- [varName]: {
- type: varType,
- value: newValue,
- },
- },
- });
- }}
- errors={inputVarsValidationResults.vars![varName]}
- forceShowErrors={forceShowErrors}
+ });
+ }}
+ errors={inputVarsValidationResults.vars![varName]}
+ forceShowErrors={forceShowErrors}
+ />
+
+ );
+ })}
+ {advancedVars.length ? (
+
+
+ {/* Wrapper div to prevent button from going full width */}
+
+
+ setIsShowingAdvanced(!isShowingAdvanced)}
+ flush="left"
+ >
+
+
+
+ {!isShowingAdvanced && hasErrors && advancedVarsWithErrorsCount ? (
+
+
+
+
- );
- })
- : null}
-
- ) : null}
-
-
-
- );
-};
+ ) : null}
+
+
+ {isShowingAdvanced
+ ? advancedVars.map((varDef) => {
+ const { name: varName, type: varType } = varDef;
+ const value = packageConfigInput.vars![varName].value;
+ return (
+
+ {
+ updatePackageConfigInput({
+ vars: {
+ ...packageConfigInput.vars,
+ [varName]: {
+ type: varType,
+ value: newValue,
+ },
+ },
+ });
+ }}
+ errors={inputVarsValidationResults.vars![varName]}
+ forceShowErrors={forceShowErrors}
+ />
+
+ );
+ })
+ : null}
+
+ ) : null}
+
+
+
+ );
+ }
+);
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/package_config_input_panel.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/package_config_input_panel.tsx
index f9c9dcd469b25..af26afdbf74d7 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/package_config_input_panel.tsx
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/package_config_input_panel.tsx
@@ -3,21 +3,18 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
-import React, { useState, Fragment } from 'react';
+import React, { useState, Fragment, memo } from 'react';
import styled from 'styled-components';
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n/react';
import {
- EuiPanel,
EuiFlexGroup,
EuiFlexItem,
EuiSwitch,
EuiText,
- EuiTextColor,
EuiButtonIcon,
EuiHorizontalRule,
EuiSpacer,
- EuiIconTip,
} from '@elastic/eui';
import {
PackageConfigInput,
@@ -25,16 +22,44 @@ import {
RegistryInput,
RegistryStream,
} from '../../../../types';
-import { PackageConfigInputValidationResults, validationHasErrors } from '../services';
+import {
+ PackageConfigInputValidationResults,
+ hasInvalidButRequiredVar,
+ countValidationErrors,
+} from '../services';
import { PackageConfigInputConfig } from './package_config_input_config';
import { PackageConfigInputStreamConfig } from './package_config_input_stream';
-const FlushHorizontalRule = styled(EuiHorizontalRule)`
- margin-left: -${(props) => props.theme.eui.paddingSizes.m};
- margin-right: -${(props) => props.theme.eui.paddingSizes.m};
- width: auto;
+const ShortenedHorizontalRule = styled(EuiHorizontalRule)`
+ &&& {
+ width: ${(11 / 12) * 100}%;
+ margin-left: auto;
+ }
`;
+const shouldShowStreamsByDefault = (
+ packageInput: RegistryInput,
+ packageInputStreams: Array,
+ packageConfigInput: PackageConfigInput
+): boolean => {
+ return (
+ packageConfigInput.enabled &&
+ (hasInvalidButRequiredVar(packageInput.vars, packageConfigInput.vars) ||
+ Boolean(
+ packageInputStreams.find(
+ (stream) =>
+ stream.enabled &&
+ hasInvalidButRequiredVar(
+ stream.vars,
+ packageConfigInput.streams.find(
+ (pkgStream) => stream.dataset.name === pkgStream.dataset.name
+ )?.vars
+ )
+ )
+ ))
+ );
+};
+
export const PackageConfigInputPanel: React.FunctionComponent<{
packageInput: RegistryInput;
packageInputStreams: Array;
@@ -42,148 +67,136 @@ export const PackageConfigInputPanel: React.FunctionComponent<{
updatePackageConfigInput: (updatedInput: Partial) => void;
inputValidationResults: PackageConfigInputValidationResults;
forceShowErrors?: boolean;
-}> = ({
- packageInput,
- packageInputStreams,
- packageConfigInput,
- updatePackageConfigInput,
- inputValidationResults,
- forceShowErrors,
-}) => {
- // Showing streams toggle state
- const [isShowingStreams, setIsShowingStreams] = useState(false);
+}> = memo(
+ ({
+ packageInput,
+ packageInputStreams,
+ packageConfigInput,
+ updatePackageConfigInput,
+ inputValidationResults,
+ forceShowErrors,
+ }) => {
+ // Showing streams toggle state
+ const [isShowingStreams, setIsShowingStreams] = useState(
+ shouldShowStreamsByDefault(packageInput, packageInputStreams, packageConfigInput)
+ );
- // Errors state
- const hasErrors = forceShowErrors && validationHasErrors(inputValidationResults);
+ // Errors state
+ const errorCount = countValidationErrors(inputValidationResults);
+ const hasErrors = forceShowErrors && errorCount;
- return (
-
- {/* Header / input-level toggle */}
-
-
-
-
-
-
-
- {packageInput.title || packageInput.type}
-
-
-
-
- {hasErrors ? (
+ const inputStreams = packageInputStreams
+ .map((packageInputStream) => {
+ return {
+ packageInputStream,
+ packageConfigInputStream: packageConfigInput.streams.find(
+ (stream) => stream.dataset.name === packageInputStream.dataset.name
+ ),
+ };
+ })
+ .filter((stream) => Boolean(stream.packageConfigInputStream));
+
+ return (
+ <>
+ {/* Header / input-level toggle */}
+
+
+
-
- }
- position="right"
- type="alert"
- iconProps={{ color: 'danger' }}
- />
+
+ {packageInput.title || packageInput.type}
+
- ) : null}
-
- }
- checked={packageConfigInput.enabled}
- onChange={(e) => {
- const enabled = e.target.checked;
- updatePackageConfigInput({
- enabled,
- streams: packageConfigInput.streams.map((stream) => ({
- ...stream,
+
+ }
+ checked={packageConfigInput.enabled}
+ onChange={(e) => {
+ const enabled = e.target.checked;
+ updatePackageConfigInput({
enabled,
- })),
- });
- }}
- />
-
-
-
-
-
-
-
- {packageConfigInput.streams.filter((stream) => stream.enabled).length}
-
-
- ),
- total: packageInputStreams.length,
- }}
- />
-
-
-
- setIsShowingStreams(!isShowingStreams)}
- color="text"
- aria-label={
- isShowingStreams
- ? i18n.translate(
- 'xpack.ingestManager.createPackageConfig.stepConfigure.hideStreamsAriaLabel',
- {
- defaultMessage: 'Hide {type} streams',
- values: {
- type: packageInput.type,
- },
- }
- )
- : i18n.translate(
- 'xpack.ingestManager.createPackageConfig.stepConfigure.showStreamsAriaLabel',
- {
- defaultMessage: 'Show {type} streams',
- values: {
- type: packageInput.type,
- },
- }
- )
+ streams: packageConfigInput.streams.map((stream) => ({
+ ...stream,
+ enabled,
+ })),
+ });
+ if (!enabled && isShowingStreams) {
+ setIsShowingStreams(false);
}
- />
-
-
-
-
+ }}
+ />
+
+
+
+ {hasErrors ? (
+
+
+
+
+
+ ) : null}
+
+ setIsShowingStreams(!isShowingStreams)}
+ color={hasErrors ? 'danger' : 'text'}
+ aria-label={
+ isShowingStreams
+ ? i18n.translate(
+ 'xpack.ingestManager.createPackageConfig.stepConfigure.hideStreamsAriaLabel',
+ {
+ defaultMessage: 'Hide {type} inputs',
+ values: {
+ type: packageInput.type,
+ },
+ }
+ )
+ : i18n.translate(
+ 'xpack.ingestManager.createPackageConfig.stepConfigure.showStreamsAriaLabel',
+ {
+ defaultMessage: 'Show {type} inputs',
+ values: {
+ type: packageInput.type,
+ },
+ }
+ )
+ }
+ />
+
+
+
+
- {/* Header rule break */}
- {isShowingStreams ? : null}
+ {/* Header rule break */}
+ {isShowingStreams ? : null}
- {/* Input level configuration */}
- {isShowingStreams && packageInput.vars && packageInput.vars.length ? (
-
-
-
-
- ) : null}
+ {/* Input level configuration */}
+ {isShowingStreams && packageInput.vars && packageInput.vars.length ? (
+
+
+
+
+ ) : null}
- {/* Per-stream configuration */}
- {isShowingStreams ? (
-
- {packageInputStreams.map((packageInputStream) => {
- const packageConfigInputStream = packageConfigInput.streams.find(
- (stream) => stream.dataset.name === packageInputStream.dataset.name
- );
- return packageConfigInputStream ? (
-
+ {/* Per-stream configuration */}
+ {isShowingStreams ? (
+
+ {inputStreams.map(({ packageInputStream, packageConfigInputStream }, index) => (
+
) => {
@@ -213,17 +226,21 @@ export const PackageConfigInputPanel: React.FunctionComponent<{
updatePackageConfigInput(updatedInput);
}}
inputStreamValidationResults={
- inputValidationResults.streams![packageConfigInputStream.id]
+ inputValidationResults.streams![packageConfigInputStream!.id]
}
forceShowErrors={forceShowErrors}
/>
-
-
+ {index !== inputStreams.length - 1 ? (
+ <>
+
+
+ >
+ ) : null}
- ) : null;
- })}
-
- ) : null}
-
- );
-};
+ ))}
+
+ ) : null}
+ >
+ );
+ }
+);
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/package_config_input_stream.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/package_config_input_stream.tsx
index 52a4748fe14c7..11a9df276485b 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/package_config_input_stream.tsx
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/package_config_input_stream.tsx
@@ -3,18 +3,17 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
-import React, { useState, Fragment } from 'react';
+import React, { useState, Fragment, memo, useMemo } from 'react';
import ReactMarkdown from 'react-markdown';
import { FormattedMessage } from '@kbn/i18n/react';
import {
+ EuiFlexGrid,
EuiFlexGroup,
EuiFlexItem,
EuiSwitch,
EuiText,
EuiSpacer,
EuiButtonEmpty,
- EuiTextColor,
- EuiIconTip,
} from '@elastic/eui';
import { PackageConfigInputStream, RegistryStream, RegistryVarsEntry } from '../../../../types';
import {
@@ -30,153 +29,157 @@ export const PackageConfigInputStreamConfig: React.FunctionComponent<{
updatePackageConfigInputStream: (updatedStream: Partial) => void;
inputStreamValidationResults: PackageConfigConfigValidationResults;
forceShowErrors?: boolean;
-}> = ({
- packageInputStream,
- packageConfigInputStream,
- updatePackageConfigInputStream,
- inputStreamValidationResults,
- forceShowErrors,
-}) => {
- // Showing advanced options toggle state
- const [isShowingAdvanced, setIsShowingAdvanced] = useState(false);
+}> = memo(
+ ({
+ packageInputStream,
+ packageConfigInputStream,
+ updatePackageConfigInputStream,
+ inputStreamValidationResults,
+ forceShowErrors,
+ }) => {
+ // Showing advanced options toggle state
+ const [isShowingAdvanced, setIsShowingAdvanced] = useState();
- // Errors state
- const hasErrors = forceShowErrors && validationHasErrors(inputStreamValidationResults);
+ // Errors state
+ const hasErrors = forceShowErrors && validationHasErrors(inputStreamValidationResults);
- const requiredVars: RegistryVarsEntry[] = [];
- const advancedVars: RegistryVarsEntry[] = [];
+ const requiredVars: RegistryVarsEntry[] = [];
+ const advancedVars: RegistryVarsEntry[] = [];
- if (packageInputStream.vars && packageInputStream.vars.length) {
- packageInputStream.vars.forEach((varDef) => {
- if (isAdvancedVar(varDef)) {
- advancedVars.push(varDef);
- } else {
- requiredVars.push(varDef);
- }
- });
- }
+ if (packageInputStream.vars && packageInputStream.vars.length) {
+ packageInputStream.vars.forEach((varDef) => {
+ if (isAdvancedVar(varDef)) {
+ advancedVars.push(varDef);
+ } else {
+ requiredVars.push(varDef);
+ }
+ });
+ }
- return (
-
-
-
-
-
- {packageInputStream.title}
-
-
- {hasErrors ? (
-
-
- }
- position="right"
- type="alert"
- iconProps={{ color: 'danger' }}
- />
-
+ const advancedVarsWithErrorsCount: number = useMemo(
+ () =>
+ advancedVars.filter(
+ ({ name: varName }) => inputStreamValidationResults.vars?.[varName]?.length
+ ).length,
+ [advancedVars, inputStreamValidationResults.vars]
+ );
+
+ return (
+
+
+
+
+
+ {
+ const enabled = e.target.checked;
+ updatePackageConfigInputStream({
+ enabled,
+ });
+ }}
+ />
+ {packageInputStream.description ? (
+
+
+
+
+
+
) : null}
-
- }
- checked={packageConfigInputStream.enabled}
- onChange={(e) => {
- const enabled = e.target.checked;
- updatePackageConfigInputStream({
- enabled,
- });
- }}
- />
- {packageInputStream.description ? (
-
-
-
-
-
-
- ) : null}
-
-
-
- {requiredVars.map((varDef) => {
- const { name: varName, type: varType } = varDef;
- const value = packageConfigInputStream.vars![varName].value;
- return (
-
- {
- updatePackageConfigInputStream({
- vars: {
- ...packageConfigInputStream.vars,
- [varName]: {
- type: varType,
- value: newValue,
+
+
+
+
+
+ {requiredVars.map((varDef) => {
+ const { name: varName, type: varType } = varDef;
+ const value = packageConfigInputStream.vars![varName].value;
+ return (
+
+ {
+ updatePackageConfigInputStream({
+ vars: {
+ ...packageConfigInputStream.vars,
+ [varName]: {
+ type: varType,
+ value: newValue,
+ },
},
- },
- });
- }}
- errors={inputStreamValidationResults.vars![varName]}
- forceShowErrors={forceShowErrors}
- />
-
- );
- })}
- {advancedVars.length ? (
-
-
- {/* Wrapper div to prevent button from going full width */}
-
- setIsShowingAdvanced(!isShowingAdvanced)}
- flush="left"
- >
-
-
-
-
- {isShowingAdvanced
- ? advancedVars.map((varDef) => {
- const { name: varName, type: varType } = varDef;
- const value = packageConfigInputStream.vars![varName].value;
- return (
-
- {
- updatePackageConfigInputStream({
- vars: {
- ...packageConfigInputStream.vars,
- [varName]: {
- type: varType,
- value: newValue,
- },
- },
- });
- }}
- errors={inputStreamValidationResults.vars![varName]}
- forceShowErrors={forceShowErrors}
+ });
+ }}
+ errors={inputStreamValidationResults.vars![varName]}
+ forceShowErrors={forceShowErrors}
+ />
+
+ );
+ })}
+ {advancedVars.length ? (
+
+
+
+
+ setIsShowingAdvanced(!isShowingAdvanced)}
+ flush="left"
+ >
+
+
+
+ {!isShowingAdvanced && hasErrors && advancedVarsWithErrorsCount ? (
+
+
+
+
- );
- })
- : null}
-
- ) : null}
-
-
-
- );
-};
+ ) : null}
+
+
+ {isShowingAdvanced
+ ? advancedVars.map((varDef) => {
+ const { name: varName, type: varType } = varDef;
+ const value = packageConfigInputStream.vars![varName].value;
+ return (
+
+ {
+ updatePackageConfigInputStream({
+ vars: {
+ ...packageConfigInputStream.vars,
+ [varName]: {
+ type: varType,
+ value: newValue,
+ },
+ },
+ });
+ }}
+ errors={inputStreamValidationResults.vars![varName]}
+ forceShowErrors={forceShowErrors}
+ />
+
+ );
+ })
+ : null}
+
+ ) : null}
+
+
+
+ );
+ }
+);
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/package_config_input_var_field.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/package_config_input_var_field.tsx
index 8868e00ecc1f1..eb681096a080e 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/package_config_input_var_field.tsx
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/components/package_config_input_var_field.tsx
@@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
-import React, { useState } from 'react';
+import React, { useState, memo, useMemo } from 'react';
import ReactMarkdown from 'react-markdown';
import { FormattedMessage } from '@kbn/i18n/react';
import { EuiFormRow, EuiFieldText, EuiComboBox, EuiText, EuiCodeEditor } from '@elastic/eui';
@@ -18,13 +18,13 @@ export const PackageConfigInputVarField: React.FunctionComponent<{
onChange: (newValue: any) => void;
errors?: string[] | null;
forceShowErrors?: boolean;
-}> = ({ varDef, value, onChange, errors: varErrors, forceShowErrors }) => {
+}> = memo(({ varDef, value, onChange, errors: varErrors, forceShowErrors }) => {
const [isDirty, setIsDirty] = useState(false);
const { multi, required, type, title, name, description } = varDef;
const isInvalid = (isDirty || forceShowErrors) && !!varErrors;
const errors = isInvalid ? varErrors : null;
- const renderField = () => {
+ const field = useMemo(() => {
if (multi) {
return (
setIsDirty(true)}
/>
);
- };
+ }, [isInvalid, multi, onChange, type, value]);
return (
}
>
- {renderField()}
+ {field}
);
-};
+});
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/index.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/index.tsx
index b446e6bf97e7b..74cbcdca512db 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/index.tsx
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/index.tsx
@@ -5,6 +5,7 @@
*/
import React, { useState, useEffect, useMemo, useCallback, ReactEventHandler } from 'react';
import { useRouteMatch, useHistory } from 'react-router-dom';
+import styled from 'styled-components';
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n/react';
import {
@@ -31,6 +32,7 @@ import {
useConfig,
sendGetAgentStatus,
} from '../../../hooks';
+import { Loading } from '../../../components';
import { ConfirmDeployConfigModal } from '../components';
import { CreatePackageConfigPageLayout } from './components';
import { CreatePackageConfigFrom, PackageConfigFormState } from './types';
@@ -45,6 +47,12 @@ import { StepConfigurePackage } from './step_configure_package';
import { StepDefinePackageConfig } from './step_define_package_config';
import { useIntraAppState } from '../../../hooks/use_intra_app_state';
+const StepsWithLessPadding = styled(EuiSteps)`
+ .euiStep__content {
+ padding-bottom: ${(props) => props.theme.eui.paddingSizes.m};
+ }
+`;
+
export const CreatePackageConfigPage: React.FunctionComponent = () => {
const {
notifications,
@@ -75,6 +83,7 @@ export const CreatePackageConfigPage: React.FunctionComponent = () => {
// Agent config and package info states
const [agentConfig, setAgentConfig] = useState();
const [packageInfo, setPackageInfo] = useState();
+ const [isLoadingSecondStep, setIsLoadingSecondStep] = useState(false);
const agentConfigId = agentConfig?.id;
// Retrieve agent count
@@ -151,40 +160,47 @@ export const CreatePackageConfigPage: React.FunctionComponent = () => {
const hasErrors = validationResults ? validationHasErrors(validationResults) : false;
+ // Update package config validation
+ const updatePackageConfigValidation = useCallback(
+ (newPackageConfig?: NewPackageConfig) => {
+ if (packageInfo) {
+ const newValidationResult = validatePackageConfig(
+ newPackageConfig || packageConfig,
+ packageInfo
+ );
+ setValidationResults(newValidationResult);
+ // eslint-disable-next-line no-console
+ console.debug('Package config validation results', newValidationResult);
+
+ return newValidationResult;
+ }
+ },
+ [packageConfig, packageInfo]
+ );
+
// Update package config method
- const updatePackageConfig = (updatedFields: Partial) => {
- const newPackageConfig = {
- ...packageConfig,
- ...updatedFields,
- };
- setPackageConfig(newPackageConfig);
-
- // eslint-disable-next-line no-console
- console.debug('Package config updated', newPackageConfig);
- const newValidationResults = updatePackageConfigValidation(newPackageConfig);
- const hasPackage = newPackageConfig.package;
- const hasValidationErrors = newValidationResults
- ? validationHasErrors(newValidationResults)
- : false;
- const hasAgentConfig = newPackageConfig.config_id && newPackageConfig.config_id !== '';
- if (hasPackage && hasAgentConfig && !hasValidationErrors) {
- setFormState('VALID');
- }
- };
+ const updatePackageConfig = useCallback(
+ (updatedFields: Partial) => {
+ const newPackageConfig = {
+ ...packageConfig,
+ ...updatedFields,
+ };
+ setPackageConfig(newPackageConfig);
- const updatePackageConfigValidation = (newPackageConfig?: NewPackageConfig) => {
- if (packageInfo) {
- const newValidationResult = validatePackageConfig(
- newPackageConfig || packageConfig,
- packageInfo
- );
- setValidationResults(newValidationResult);
// eslint-disable-next-line no-console
- console.debug('Package config validation results', newValidationResult);
-
- return newValidationResult;
- }
- };
+ console.debug('Package config updated', newPackageConfig);
+ const newValidationResults = updatePackageConfigValidation(newPackageConfig);
+ const hasPackage = newPackageConfig.package;
+ const hasValidationErrors = newValidationResults
+ ? validationHasErrors(newValidationResults)
+ : false;
+ const hasAgentConfig = newPackageConfig.config_id && newPackageConfig.config_id !== '';
+ if (hasPackage && hasAgentConfig && !hasValidationErrors) {
+ setFormState('VALID');
+ }
+ },
+ [packageConfig, updatePackageConfigValidation]
+ );
// Cancel path
const cancelUrl = useMemo(() => {
@@ -276,6 +292,7 @@ export const CreatePackageConfigPage: React.FunctionComponent = () => {
updatePackageInfo={updatePackageInfo}
agentConfig={agentConfig}
updateAgentConfig={updateAgentConfig}
+ setIsLoadingSecondStep={setIsLoadingSecondStep}
/>
),
[pkgkey, updatePackageInfo, agentConfig, updateAgentConfig]
@@ -288,11 +305,47 @@ export const CreatePackageConfigPage: React.FunctionComponent = () => {
updateAgentConfig={updateAgentConfig}
packageInfo={packageInfo}
updatePackageInfo={updatePackageInfo}
+ setIsLoadingSecondStep={setIsLoadingSecondStep}
/>
),
[configId, updateAgentConfig, packageInfo, updatePackageInfo]
);
+ const stepConfigurePackage = useMemo(
+ () =>
+ isLoadingSecondStep ? (
+
+ ) : agentConfig && packageInfo ? (
+ <>
+
+
+ >
+ ) : (
+
+ ),
+ [
+ agentConfig,
+ formState,
+ isLoadingSecondStep,
+ packageConfig,
+ packageInfo,
+ updatePackageConfig,
+ validationResults,
+ ]
+ );
+
const steps: EuiStepProps[] = [
from === 'package'
? {
@@ -310,44 +363,16 @@ export const CreatePackageConfigPage: React.FunctionComponent = () => {
}),
children: stepSelectPackage,
},
- {
- title: i18n.translate(
- 'xpack.ingestManager.createPackageConfig.stepDefinePackageConfigTitle',
- {
- defaultMessage: 'Configure integration',
- }
- ),
- status: !packageInfo || !agentConfig ? 'disabled' : undefined,
- children:
- agentConfig && packageInfo ? (
-
- ) : null,
- },
{
title: i18n.translate(
'xpack.ingestManager.createPackageConfig.stepConfigurePackageConfigTitle',
{
- defaultMessage: 'Select the data you want to collect',
+ defaultMessage: 'Configure integration',
}
),
- status: !packageInfo || !agentConfig ? 'disabled' : undefined,
+ status: !packageInfo || !agentConfig || isLoadingSecondStep ? 'disabled' : undefined,
'data-test-subj': 'dataCollectionSetupStep',
- children:
- agentConfig && packageInfo ? (
-
- ) : null,
+ children: stepConfigurePackage,
},
];
@@ -371,7 +396,7 @@ export const CreatePackageConfigPage: React.FunctionComponent = () => {
: agentConfig && (
)}
-
+
{/* TODO #64541 - Remove classes */}
{
: undefined
}
>
-
+
- {/* eslint-disable-next-line @elastic/eui/href-or-on-click */}
-
+ {!isLoadingSecondStep && agentConfig && packageInfo && formState === 'INVALID' ? (
-
+ ) : null}
-
-
-
+
+
+ {/* eslint-disable-next-line @elastic/eui/href-or-on-click */}
+
+
+
+
+
+
+
+
+
+
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/services/has_invalid_but_required_var.test.ts b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/services/has_invalid_but_required_var.test.ts
new file mode 100644
index 0000000000000..679ae4b1456d6
--- /dev/null
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/services/has_invalid_but_required_var.test.ts
@@ -0,0 +1,94 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+import { hasInvalidButRequiredVar } from './has_invalid_but_required_var';
+
+describe('Ingest Manager - hasInvalidButRequiredVar', () => {
+ it('returns true for invalid & required vars', () => {
+ expect(
+ hasInvalidButRequiredVar(
+ [
+ {
+ name: 'mock_var',
+ type: 'text',
+ required: true,
+ },
+ ],
+ {}
+ )
+ ).toBe(true);
+
+ expect(
+ hasInvalidButRequiredVar(
+ [
+ {
+ name: 'mock_var',
+ type: 'text',
+ required: true,
+ },
+ ],
+ {
+ mock_var: {
+ value: undefined,
+ },
+ }
+ )
+ ).toBe(true);
+ });
+
+ it('returns false for valid & required vars', () => {
+ expect(
+ hasInvalidButRequiredVar(
+ [
+ {
+ name: 'mock_var',
+ type: 'text',
+ required: true,
+ },
+ ],
+ {
+ mock_var: {
+ value: 'foo',
+ },
+ }
+ )
+ ).toBe(false);
+ });
+
+ it('returns false for optional vars', () => {
+ expect(
+ hasInvalidButRequiredVar(
+ [
+ {
+ name: 'mock_var',
+ type: 'text',
+ },
+ ],
+ {
+ mock_var: {
+ value: 'foo',
+ },
+ }
+ )
+ ).toBe(false);
+
+ expect(
+ hasInvalidButRequiredVar(
+ [
+ {
+ name: 'mock_var',
+ type: 'text',
+ required: false,
+ },
+ ],
+ {
+ mock_var: {
+ value: undefined,
+ },
+ }
+ )
+ ).toBe(false);
+ });
+});
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/services/has_invalid_but_required_var.ts b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/services/has_invalid_but_required_var.ts
new file mode 100644
index 0000000000000..f632d40a05621
--- /dev/null
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/services/has_invalid_but_required_var.ts
@@ -0,0 +1,26 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+import { PackageConfigConfigRecord, RegistryVarsEntry } from '../../../../types';
+import { validatePackageConfigConfig } from './';
+
+export const hasInvalidButRequiredVar = (
+ registryVars?: RegistryVarsEntry[],
+ packageConfigVars?: PackageConfigConfigRecord
+): boolean => {
+ return (
+ (registryVars && !packageConfigVars) ||
+ Boolean(
+ registryVars &&
+ registryVars.find(
+ (registryVar) =>
+ registryVar.required &&
+ (!packageConfigVars ||
+ !packageConfigVars[registryVar.name] ||
+ validatePackageConfigConfig(packageConfigVars[registryVar.name], registryVar)?.length)
+ )
+ )
+ );
+};
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/services/index.ts b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/services/index.ts
index 6cfb1c74bd661..0d33a4e113f03 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/services/index.ts
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/services/index.ts
@@ -4,10 +4,13 @@
* you may not use this file except in compliance with the Elastic License.
*/
export { isAdvancedVar } from './is_advanced_var';
+export { hasInvalidButRequiredVar } from './has_invalid_but_required_var';
export {
PackageConfigValidationResults,
PackageConfigConfigValidationResults,
PackageConfigInputValidationResults,
validatePackageConfig,
+ validatePackageConfigConfig,
validationHasErrors,
+ countValidationErrors,
} from './validate_package_config';
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/services/is_advanced_var.ts b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/services/is_advanced_var.ts
index 398f1d675c5df..a2f4a6675ac80 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/services/is_advanced_var.ts
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/services/is_advanced_var.ts
@@ -6,7 +6,7 @@
import { RegistryVarsEntry } from '../../../../types';
export const isAdvancedVar = (varDef: RegistryVarsEntry): boolean => {
- if (varDef.show_user || (varDef.required && !varDef.default)) {
+ if (varDef.show_user || (varDef.required && varDef.default === undefined)) {
return false;
}
return true;
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/services/validate_package_config.ts b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/services/validate_package_config.ts
index cd301747c3f53..bd9d216ca969a 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/services/validate_package_config.ts
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/services/validate_package_config.ts
@@ -171,7 +171,7 @@ export const validatePackageConfig = (
return validationResults;
};
-const validatePackageConfigConfig = (
+export const validatePackageConfigConfig = (
configEntry: PackageConfigConfigRecordEntry,
varDef: RegistryVarsEntry
): string[] | null => {
@@ -237,13 +237,22 @@ const validatePackageConfigConfig = (
return errors.length ? errors : null;
};
-export const validationHasErrors = (
+export const countValidationErrors = (
validationResults:
| PackageConfigValidationResults
| PackageConfigInputValidationResults
| PackageConfigConfigValidationResults
-) => {
+): number => {
const flattenedValidation = getFlattenedObject(validationResults);
+ const errors = Object.values(flattenedValidation).filter((value) => Boolean(value)) || [];
+ return errors.length;
+};
- return !!Object.entries(flattenedValidation).find(([, value]) => !!value);
+export const validationHasErrors = (
+ validationResults:
+ | PackageConfigValidationResults
+ | PackageConfigInputValidationResults
+ | PackageConfigConfigValidationResults
+): boolean => {
+ return countValidationErrors(validationResults) > 0;
};
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/step_configure_package.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/step_configure_package.tsx
index eecd204a5e307..380a03e15695b 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/step_configure_package.tsx
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/step_configure_package.tsx
@@ -4,12 +4,10 @@
* you may not use this file except in compliance with the Elastic License.
*/
import React from 'react';
-import { FormattedMessage } from '@kbn/i18n/react';
-import { EuiPanel, EuiFlexGroup, EuiFlexItem, EuiSpacer, EuiCallOut } from '@elastic/eui';
-import { i18n } from '@kbn/i18n';
+import { EuiHorizontalRule, EuiFlexGroup, EuiFlexItem } from '@elastic/eui';
import { PackageInfo, RegistryStream, NewPackageConfig, PackageConfigInput } from '../../../types';
import { Loading } from '../../../components';
-import { PackageConfigValidationResults, validationHasErrors } from './services';
+import { PackageConfigValidationResults } from './services';
import { PackageConfigInputPanel, CustomPackageConfig } from './components';
import { CreatePackageConfigFrom } from './types';
@@ -52,8 +50,6 @@ export const StepConfigurePackage: React.FunctionComponent<{
validationResults,
submitAttempted,
}) => {
- const hasErrors = validationResults ? validationHasErrors(validationResults) : false;
-
// Configure inputs (and their streams)
// Assume packages only export one config template for now
const renderConfigureInputs = () =>
@@ -61,76 +57,50 @@ export const StepConfigurePackage: React.FunctionComponent<{
packageInfo.config_templates[0] &&
packageInfo.config_templates[0].inputs &&
packageInfo.config_templates[0].inputs.length ? (
-
- {packageInfo.config_templates[0].inputs.map((packageInput) => {
- const packageConfigInput = packageConfig.inputs.find(
- (input) => input.type === packageInput.type
- );
- const packageInputStreams = findStreamsForInputType(packageInput.type, packageInfo);
- return packageConfigInput ? (
-
- ) => {
- const indexOfUpdatedInput = packageConfig.inputs.findIndex(
- (input) => input.type === packageInput.type
- );
- const newInputs = [...packageConfig.inputs];
- newInputs[indexOfUpdatedInput] = {
- ...newInputs[indexOfUpdatedInput],
- ...updatedInput,
- };
- updatePackageConfig({
- inputs: newInputs,
- });
- }}
- inputValidationResults={validationResults!.inputs![packageConfigInput.type]}
- forceShowErrors={submitAttempted}
- />
-
- ) : null;
- })}
-
+ <>
+
+
+ {packageInfo.config_templates[0].inputs.map((packageInput) => {
+ const packageConfigInput = packageConfig.inputs.find(
+ (input) => input.type === packageInput.type
+ );
+ const packageInputStreams = findStreamsForInputType(packageInput.type, packageInfo);
+ return packageConfigInput ? (
+
+ ) => {
+ const indexOfUpdatedInput = packageConfig.inputs.findIndex(
+ (input) => input.type === packageInput.type
+ );
+ const newInputs = [...packageConfig.inputs];
+ newInputs[indexOfUpdatedInput] = {
+ ...newInputs[indexOfUpdatedInput],
+ ...updatedInput,
+ };
+ updatePackageConfig({
+ inputs: newInputs,
+ });
+ }}
+ inputValidationResults={validationResults!.inputs![packageConfigInput.type]}
+ forceShowErrors={submitAttempted}
+ />
+
+
+ ) : null;
+ })}
+
+ >
) : (
-
-
-
+
);
- return validationResults ? (
-
- {renderConfigureInputs()}
- {hasErrors && submitAttempted ? (
-
-
-
-
-
-
-
-
-
- ) : null}
-
- ) : (
-
- );
+ return validationResults ? renderConfigureInputs() : ;
};
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/step_define_package_config.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/step_define_package_config.tsx
index b2ffe62104eb1..a04d023ebcc48 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/step_define_package_config.tsx
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/step_define_package_config.tsx
@@ -3,17 +3,18 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
-import React, { useEffect, useState, Fragment } from 'react';
+import React, { useEffect, useState } from 'react';
import { FormattedMessage } from '@kbn/i18n/react';
import {
- EuiFlexGrid,
- EuiFlexItem,
EuiFormRow,
EuiFieldText,
EuiButtonEmpty,
EuiSpacer,
EuiText,
EuiComboBox,
+ EuiDescribedFormGroup,
+ EuiFlexGroup,
+ EuiFlexItem,
} from '@elastic/eui';
import { AgentConfig, PackageInfo, PackageConfig, NewPackageConfig } from '../../../types';
import { packageToPackageConfigInputs } from '../../../services';
@@ -28,7 +29,7 @@ export const StepDefinePackageConfig: React.FunctionComponent<{
validationResults: PackageConfigValidationResults;
}> = ({ agentConfig, packageInfo, packageConfig, updatePackageConfig, validationResults }) => {
// Form show/hide states
- const [isShowingAdvancedDefine, setIsShowingAdvancedDefine] = useState(false);
+ const [isShowingAdvanced, setIsShowingAdvanced] = useState(false);
// Update package config's package and config info
useEffect(() => {
@@ -74,111 +75,140 @@ export const StepDefinePackageConfig: React.FunctionComponent<{
]);
return validationResults ? (
- <>
-
-
-
+
+
+
+ }
+ description={
+
+ }
+ >
+ <>
+ {/* Name */}
+
+ }
+ >
+
+ updatePackageConfig({
+ name: e.target.value,
+ })
}
- >
-
- updatePackageConfig({
- name: e.target.value,
- })
- }
- data-test-subj="packageConfigNameInput"
+ data-test-subj="packageConfigNameInput"
+ />
+
+
+ {/* Description */}
+
-
-
-
-
+
+ }
+ isInvalid={!!validationResults.description}
+ error={validationResults.description}
+ >
+
+ updatePackageConfig({
+ description: e.target.value,
+ })
}
- labelAppend={
-
+ />
+
+
+
+ {/* Advanced options toggle */}
+
+
+ setIsShowingAdvanced(!isShowingAdvanced)}
+ flush="left"
+ >
+
+
+
+ {!isShowingAdvanced && !!validationResults.namespace ? (
+
+
- }
- isInvalid={!!validationResults.description}
- error={validationResults.description}
- >
-
- updatePackageConfig({
- description: e.target.value,
- })
+
+ ) : null}
+
+
+ {/* Advanced options content */}
+ {/* Todo: Populate list of existing namespaces */}
+ {isShowingAdvanced ? (
+ <>
+
+
}
- />
-
-
-
-
- setIsShowingAdvancedDefine(!isShowingAdvancedDefine)}
- >
-
-
- {/* Todo: Populate list of existing namespaces */}
- {isShowingAdvancedDefine || !!validationResults.namespace ? (
-
-
-
-
-
+ >
+
- {
- updatePackageConfig({
- namespace: newNamespace,
- });
- }}
- onChange={(newNamespaces: Array<{ label: string }>) => {
- updatePackageConfig({
- namespace: newNamespaces.length ? newNamespaces[0].label : '',
- });
- }}
- />
-
-
-
-
- ) : null}
- >
+ onCreateOption={(newNamespace: string) => {
+ updatePackageConfig({
+ namespace: newNamespace,
+ });
+ }}
+ onChange={(newNamespaces: Array<{ label: string }>) => {
+ updatePackageConfig({
+ namespace: newNamespaces.length ? newNamespaces[0].label : '',
+ });
+ }}
+ />
+
+ >
+ ) : null}
+ >
+
) : (
);
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/step_select_config.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/step_select_config.tsx
index f6391cf1fa456..d3120f9051f45 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/step_select_config.tsx
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/step_select_config.tsx
@@ -6,29 +6,50 @@
import React, { useEffect, useState, Fragment } from 'react';
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n/react';
-import { EuiFlexGroup, EuiFlexItem, EuiSelectable, EuiSpacer, EuiTextColor } from '@elastic/eui';
+import {
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiSelectable,
+ EuiSpacer,
+ EuiTextColor,
+ EuiPortal,
+ EuiButtonEmpty,
+} from '@elastic/eui';
import { Error } from '../../../components';
import { AgentConfig, PackageInfo, GetAgentConfigsResponseItem } from '../../../types';
import { isPackageLimited, doesAgentConfigAlreadyIncludePackage } from '../../../services';
-import { useGetPackageInfoByKey, useGetAgentConfigs, sendGetOneAgentConfig } from '../../../hooks';
+import {
+ useGetPackageInfoByKey,
+ useGetAgentConfigs,
+ sendGetOneAgentConfig,
+ useCapabilities,
+} from '../../../hooks';
+import { CreateAgentConfigFlyout } from '../list_page/components';
export const StepSelectConfig: React.FunctionComponent<{
pkgkey: string;
updatePackageInfo: (packageInfo: PackageInfo | undefined) => void;
agentConfig: AgentConfig | undefined;
updateAgentConfig: (config: AgentConfig | undefined) => void;
-}> = ({ pkgkey, updatePackageInfo, agentConfig, updateAgentConfig }) => {
+ setIsLoadingSecondStep: (isLoading: boolean) => void;
+}> = ({ pkgkey, updatePackageInfo, agentConfig, updateAgentConfig, setIsLoadingSecondStep }) => {
// Selected config state
const [selectedConfigId, setSelectedConfigId] = useState(
agentConfig ? agentConfig.id : undefined
);
const [selectedConfigError, setSelectedConfigError] = useState();
+ // Create new config flyout state
+ const hasWriteCapabilites = useCapabilities().write;
+ const [isCreateAgentConfigFlyoutOpen, setIsCreateAgentConfigFlyoutOpen] = useState(
+ false
+ );
+
// Fetch package info
const {
data: packageInfoData,
error: packageInfoError,
- isLoading: packageInfoLoading,
+ isLoading: isPackageInfoLoading,
} = useGetPackageInfoByKey(pkgkey);
const isLimitedPackage = (packageInfoData && isPackageLimited(packageInfoData.response)) || false;
@@ -37,6 +58,7 @@ export const StepSelectConfig: React.FunctionComponent<{
data: agentConfigsData,
error: agentConfigsError,
isLoading: isAgentConfigsLoading,
+ sendRequest: refreshAgentConfigs,
} = useGetAgentConfigs({
page: 1,
perPage: 1000,
@@ -64,6 +86,7 @@ export const StepSelectConfig: React.FunctionComponent<{
useEffect(() => {
const fetchAgentConfigInfo = async () => {
if (selectedConfigId) {
+ setIsLoadingSecondStep(true);
const { data, error } = await sendGetOneAgentConfig(selectedConfigId);
if (error) {
setSelectedConfigError(error);
@@ -76,11 +99,12 @@ export const StepSelectConfig: React.FunctionComponent<{
setSelectedConfigError(undefined);
updateAgentConfig(undefined);
}
+ setIsLoadingSecondStep(false);
};
if (!agentConfig || selectedConfigId !== agentConfig.id) {
fetchAgentConfigInfo();
}
- }, [selectedConfigId, agentConfig, updateAgentConfig]);
+ }, [selectedConfigId, agentConfig, updateAgentConfig, setIsLoadingSecondStep]);
// Display package error if there is one
if (packageInfoError) {
@@ -113,92 +137,125 @@ export const StepSelectConfig: React.FunctionComponent<{
}
return (
-
-
- {
- const alreadyHasLimitedPackage =
- (isLimitedPackage &&
- packageInfoData &&
- doesAgentConfigAlreadyIncludePackage(agentConf, packageInfoData.response.name)) ||
- false;
- return {
- label: agentConf.name,
- key: agentConf.id,
- checked: selectedConfigId === agentConf.id ? 'on' : undefined,
- disabled: alreadyHasLimitedPackage,
- 'data-test-subj': 'agentConfigItem',
- };
- })}
- renderOption={(option) => (
-
- {option.label}
-
-
- {agentConfigsById[option.key!].description}
-
-
-
-
-
-
-
-
- )}
- listProps={{
- bordered: true,
- }}
- searchProps={{
- placeholder: i18n.translate(
- 'xpack.ingestManager.createPackageConfig.StepSelectConfig.filterAgentConfigsInputPlaceholder',
- {
- defaultMessage: 'Search for agent configurations',
+ <>
+ {isCreateAgentConfigFlyoutOpen ? (
+
+ {
+ setIsCreateAgentConfigFlyoutOpen(false);
+ if (newAgentConfig) {
+ refreshAgentConfigs();
+ setSelectedConfigId(newAgentConfig.id);
+ }
+ }}
+ />
+
+ ) : null}
+
+
+ {
+ const alreadyHasLimitedPackage =
+ (isLimitedPackage &&
+ packageInfoData &&
+ doesAgentConfigAlreadyIncludePackage(agentConf, packageInfoData.response.name)) ||
+ false;
+ return {
+ label: agentConf.name,
+ key: agentConf.id,
+ checked: selectedConfigId === agentConf.id ? 'on' : undefined,
+ disabled: alreadyHasLimitedPackage,
+ 'data-test-subj': 'agentConfigItem',
+ };
+ })}
+ renderOption={(option) => (
+
+ {option.label}
+
+
+ {agentConfigsById[option.key!].description}
+
+
+
+
+
+
+
+
+ )}
+ listProps={{
+ bordered: true,
+ }}
+ searchProps={{
+ placeholder: i18n.translate(
+ 'xpack.ingestManager.createPackageConfig.StepSelectConfig.filterAgentConfigsInputPlaceholder',
+ {
+ defaultMessage: 'Search for agent configurations',
+ }
+ ),
+ }}
+ height={180}
+ onChange={(options) => {
+ const selectedOption = options.find((option) => option.checked === 'on');
+ if (selectedOption) {
+ if (selectedOption.key !== selectedConfigId) {
+ setSelectedConfigId(selectedOption.key);
+ }
+ } else {
+ setSelectedConfigId(undefined);
+ }
+ }}
+ >
+ {(list, search) => (
+
+ {search}
+
+ {list}
+
+ )}
+
+
+ {/* Display selected agent config error if there is one */}
+ {selectedConfigError ? (
+
+
}
- ),
- }}
- height={240}
- onChange={(options) => {
- const selectedOption = options.find((option) => option.checked === 'on');
- if (selectedOption) {
- setSelectedConfigId(selectedOption.key);
- } else {
- setSelectedConfigId(undefined);
- }
- }}
- >
- {(list, search) => (
-
- {search}
-
- {list}
-
- )}
-
-
- {/* Display selected agent config error if there is one */}
- {selectedConfigError ? (
+ error={selectedConfigError}
+ />
+
+ ) : null}
-
+ setIsCreateAgentConfigFlyoutOpen(true)}
+ flush="left"
+ size="s"
+ >
- }
- error={selectedConfigError}
- />
+
+
- ) : null}
-
+
+ >
);
};
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/step_select_package.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/step_select_package.tsx
index 204b862bd4dc4..048ae101fcd6f 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/step_select_package.tsx
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/create_package_config_page/step_select_package.tsx
@@ -22,7 +22,14 @@ export const StepSelectPackage: React.FunctionComponent<{
updateAgentConfig: (config: AgentConfig | undefined) => void;
packageInfo?: PackageInfo;
updatePackageInfo: (packageInfo: PackageInfo | undefined) => void;
-}> = ({ agentConfigId, updateAgentConfig, packageInfo, updatePackageInfo }) => {
+ setIsLoadingSecondStep: (isLoading: boolean) => void;
+}> = ({
+ agentConfigId,
+ updateAgentConfig,
+ packageInfo,
+ updatePackageInfo,
+ setIsLoadingSecondStep,
+}) => {
// Selected package state
const [selectedPkgKey, setSelectedPkgKey] = useState(
packageInfo ? `${packageInfo.name}-${packageInfo.version}` : undefined
@@ -30,7 +37,11 @@ export const StepSelectPackage: React.FunctionComponent<{
const [selectedPkgError, setSelectedPkgError] = useState();
// Fetch agent config info
- const { data: agentConfigData, error: agentConfigError } = useGetOneAgentConfig(agentConfigId);
+ const {
+ data: agentConfigData,
+ error: agentConfigError,
+ isLoading: isAgentConfigsLoading,
+ } = useGetOneAgentConfig(agentConfigId);
// Fetch packages info
// Filter out limited packages already part of selected agent config
@@ -66,6 +77,7 @@ export const StepSelectPackage: React.FunctionComponent<{
useEffect(() => {
const fetchPackageInfo = async () => {
if (selectedPkgKey) {
+ setIsLoadingSecondStep(true);
const { data, error } = await sendGetPackageInfoByKey(selectedPkgKey);
if (error) {
setSelectedPkgError(error);
@@ -74,6 +86,7 @@ export const StepSelectPackage: React.FunctionComponent<{
setSelectedPkgError(undefined);
updatePackageInfo(data.response);
}
+ setIsLoadingSecondStep(false);
} else {
setSelectedPkgError(undefined);
updatePackageInfo(undefined);
@@ -82,7 +95,7 @@ export const StepSelectPackage: React.FunctionComponent<{
if (!packageInfo || selectedPkgKey !== `${packageInfo.name}-${packageInfo.version}`) {
fetchPackageInfo();
}
- }, [selectedPkgKey, packageInfo, updatePackageInfo]);
+ }, [selectedPkgKey, packageInfo, updatePackageInfo, setIsLoadingSecondStep]);
// Display agent config error if there is one
if (agentConfigError) {
@@ -121,7 +134,7 @@ export const StepSelectPackage: React.FunctionComponent<{
searchable
allowExclusions={false}
singleSelection={true}
- isLoading={isPackagesLoading || isLimitedPackagesLoading}
+ isLoading={isPackagesLoading || isLimitedPackagesLoading || isAgentConfigsLoading}
options={packages.map(({ title, name, version, icons }) => {
const pkgkey = `${name}-${version}`;
return {
@@ -154,7 +167,9 @@ export const StepSelectPackage: React.FunctionComponent<{
onChange={(options) => {
const selectedOption = options.find((option) => option.checked === 'on');
if (selectedOption) {
- setSelectedPkgKey(selectedOption.key);
+ if (selectedOption.key !== selectedPkgKey) {
+ setSelectedPkgKey(selectedOption.key);
+ }
} else {
setSelectedPkgKey(undefined);
}
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/edit_package_config_page/index.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/edit_package_config_page/index.tsx
index 52fd95d663671..f4411a6057a15 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/edit_package_config_page/index.tsx
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/edit_package_config_page/index.tsx
@@ -3,14 +3,13 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
-import React, { useState, useEffect } from 'react';
+import React, { useState, useEffect, useCallback, useMemo } from 'react';
import { useRouteMatch, useHistory } from 'react-router-dom';
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n/react';
import {
EuiButtonEmpty,
EuiButton,
- EuiSteps,
EuiBottomBar,
EuiFlexGroup,
EuiFlexItem,
@@ -160,38 +159,45 @@ export const EditPackageConfigPage: React.FunctionComponent = () => {
const [validationResults, setValidationResults] = useState();
const hasErrors = validationResults ? validationHasErrors(validationResults) : false;
- // Update package config method
- const updatePackageConfig = (updatedFields: Partial) => {
- const newPackageConfig = {
- ...packageConfig,
- ...updatedFields,
- };
- setPackageConfig(newPackageConfig);
+ // Update package config validation
+ const updatePackageConfigValidation = useCallback(
+ (newPackageConfig?: UpdatePackageConfig) => {
+ if (packageInfo) {
+ const newValidationResult = validatePackageConfig(
+ newPackageConfig || packageConfig,
+ packageInfo
+ );
+ setValidationResults(newValidationResult);
+ // eslint-disable-next-line no-console
+ console.debug('Package config validation results', newValidationResult);
- // eslint-disable-next-line no-console
- console.debug('Package config updated', newPackageConfig);
- const newValidationResults = updatePackageConfigValidation(newPackageConfig);
- const hasValidationErrors = newValidationResults
- ? validationHasErrors(newValidationResults)
- : false;
- if (!hasValidationErrors) {
- setFormState('VALID');
- }
- };
+ return newValidationResult;
+ }
+ },
+ [packageConfig, packageInfo]
+ );
- const updatePackageConfigValidation = (newPackageConfig?: UpdatePackageConfig) => {
- if (packageInfo) {
- const newValidationResult = validatePackageConfig(
- newPackageConfig || packageConfig,
- packageInfo
- );
- setValidationResults(newValidationResult);
- // eslint-disable-next-line no-console
- console.debug('Package config validation results', newValidationResult);
+ // Update package config method
+ const updatePackageConfig = useCallback(
+ (updatedFields: Partial) => {
+ const newPackageConfig = {
+ ...packageConfig,
+ ...updatedFields,
+ };
+ setPackageConfig(newPackageConfig);
- return newValidationResult;
- }
- };
+ // eslint-disable-next-line no-console
+ console.debug('Package config updated', newPackageConfig);
+ const newValidationResults = updatePackageConfigValidation(newPackageConfig);
+ const hasValidationErrors = newValidationResults
+ ? validationHasErrors(newValidationResults)
+ : false;
+ if (!hasValidationErrors) {
+ setFormState('VALID');
+ }
+ },
+ [packageConfig, updatePackageConfigValidation]
+ );
// Cancel url
const cancelUrl = getHref('configuration_details', { configId });
@@ -271,6 +277,40 @@ export const EditPackageConfigPage: React.FunctionComponent = () => {
packageInfo,
};
+ const configurePackage = useMemo(
+ () =>
+ agentConfig && packageInfo ? (
+ <>
+
+
+
+ >
+ ) : null,
+ [
+ agentConfig,
+ formState,
+ packageConfig,
+ packageConfigId,
+ packageInfo,
+ updatePackageConfig,
+ validationResults,
+ ]
+ );
+
return (
{isLoadingData ? (
@@ -301,46 +341,7 @@ export const EditPackageConfigPage: React.FunctionComponent = () => {
onCancel={() => setFormState('VALID')}
/>
)}
-
- ),
- },
- {
- title: i18n.translate(
- 'xpack.ingestManager.editPackageConfig.stepConfigurePackageConfigTitle',
- {
- defaultMessage: 'Select the data you want to collect',
- }
- ),
- children: (
-
- ),
- },
- ]}
- />
+ {configurePackage}
{/* TODO #64541 - Remove classes */}
{
: undefined
}
>
-
+
-
+ {agentConfig && packageInfo && formState === 'INVALID' ? (
-
+ ) : null}
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/list_page/components/create_config.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/list_page/components/create_config.tsx
index d1abd88adba86..795c46ec282c5 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/list_page/components/create_config.tsx
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/agent_config/list_page/components/create_config.tsx
@@ -18,12 +18,12 @@ import {
EuiButton,
EuiText,
} from '@elastic/eui';
-import { NewAgentConfig } from '../../../../types';
+import { NewAgentConfig, AgentConfig } from '../../../../types';
import { useCapabilities, useCore, sendCreateAgentConfig } from '../../../../hooks';
import { AgentConfigForm, agentConfigFormValidation } from '../../components';
interface Props {
- onClose: () => void;
+ onClose: (createdAgentConfig?: AgentConfig) => void;
}
export const CreateAgentConfigFlyout: React.FunctionComponent = ({ onClose }) => {
@@ -86,7 +86,7 @@ export const CreateAgentConfigFlyout: React.FunctionComponent = ({ onClos
-
+ onClose()} flush="left">
= ({ onClos
}
)
);
- onClose();
+ onClose(data.item);
} else {
notifications.toasts.addDanger(
error
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/components/agent_enrollment_flyout/config_selection.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/components/agent_enrollment_flyout/config_selection.tsx
index 8cd337586d1bc..6f53a237187e5 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/components/agent_enrollment_flyout/config_selection.tsx
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/components/agent_enrollment_flyout/config_selection.tsx
@@ -4,46 +4,91 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import React, { useState } from 'react';
+import React, { useState, useEffect } from 'react';
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n/react';
import { EuiSelect, EuiSpacer, EuiText, EuiButtonEmpty } from '@elastic/eui';
-import { AgentConfig } from '../../../../types';
-import { useGetEnrollmentAPIKeys } from '../../../../hooks';
+import { AgentConfig, GetEnrollmentAPIKeysResponse } from '../../../../types';
+import { sendGetEnrollmentAPIKeys, useCore } from '../../../../hooks';
import { AgentConfigPackageBadges } from '../agent_config_package_badges';
-interface Props {
+type Props = {
agentConfigs: AgentConfig[];
- onKeyChange: (key: string) => void;
-}
+ onConfigChange?: (key: string) => void;
+} & (
+ | {
+ withKeySelection: true;
+ onKeyChange?: (key: string) => void;
+ }
+ | {
+ withKeySelection: false;
+ }
+);
-export const EnrollmentStepAgentConfig: React.FC = ({ agentConfigs, onKeyChange }) => {
- const [isAuthenticationSettingsOpen, setIsAuthenticationSettingsOpen] = useState(false);
- const enrollmentAPIKeysRequest = useGetEnrollmentAPIKeys({
- page: 1,
- perPage: 1000,
- });
+export const EnrollmentStepAgentConfig: React.FC = (props) => {
+ const { notifications } = useCore();
+ const { withKeySelection, agentConfigs, onConfigChange } = props;
+ const onKeyChange = props.withKeySelection && props.onKeyChange;
+ const [isAuthenticationSettingsOpen, setIsAuthenticationSettingsOpen] = useState(false);
+ const [enrollmentAPIKeys, setEnrollmentAPIKeys] = useState(
+ []
+ );
const [selectedState, setSelectedState] = useState<{
agentConfigId?: string;
enrollmentAPIKeyId?: string;
}>({
agentConfigId: agentConfigs.length ? agentConfigs[0].id : undefined,
});
- const filteredEnrollmentAPIKeys = React.useMemo(() => {
- if (!selectedState.agentConfigId || !enrollmentAPIKeysRequest.data) {
- return [];
+
+ useEffect(() => {
+ if (onConfigChange && selectedState.agentConfigId) {
+ onConfigChange(selectedState.agentConfigId);
}
+ }, [selectedState.agentConfigId, onConfigChange]);
- return enrollmentAPIKeysRequest.data.list.filter(
- (key) => key.config_id === selectedState.agentConfigId
- );
- }, [enrollmentAPIKeysRequest.data, selectedState.agentConfigId]);
+ useEffect(() => {
+ if (!withKeySelection) {
+ return;
+ }
+ if (!selectedState.agentConfigId) {
+ setEnrollmentAPIKeys([]);
+ return;
+ }
+
+ async function fetchEnrollmentAPIKeys() {
+ try {
+ const res = await sendGetEnrollmentAPIKeys({
+ page: 1,
+ perPage: 10000,
+ });
+ if (res.error) {
+ throw res.error;
+ }
+
+ if (!res.data) {
+ throw new Error('No data while fetching enrollment API keys');
+ }
+
+ setEnrollmentAPIKeys(
+ res.data.list.filter((key) => key.config_id === selectedState.agentConfigId)
+ );
+ } catch (error) {
+ notifications.toasts.addError(error, {
+ title: 'Error',
+ });
+ }
+ }
+ fetchEnrollmentAPIKeys();
+ }, [withKeySelection, selectedState.agentConfigId, notifications.toasts]);
// Select first API key when config change
React.useEffect(() => {
- if (!selectedState.enrollmentAPIKeyId && filteredEnrollmentAPIKeys.length > 0) {
- const enrollmentAPIKeyId = filteredEnrollmentAPIKeys[0].id;
+ if (!withKeySelection || !onKeyChange) {
+ return;
+ }
+ if (!selectedState.enrollmentAPIKeyId && enrollmentAPIKeys.length > 0) {
+ const enrollmentAPIKeyId = enrollmentAPIKeys[0].id;
setSelectedState({
agentConfigId: selectedState.agentConfigId,
enrollmentAPIKeyId,
@@ -51,7 +96,7 @@ export const EnrollmentStepAgentConfig: React.FC = ({ agentConfigs, onKey
onKeyChange(enrollmentAPIKeyId);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
- }, [filteredEnrollmentAPIKeys, selectedState.enrollmentAPIKeyId, selectedState.agentConfigId]);
+ }, [enrollmentAPIKeys, selectedState.enrollmentAPIKeyId, selectedState.agentConfigId]);
return (
<>
@@ -85,43 +130,47 @@ export const EnrollmentStepAgentConfig: React.FC = ({ agentConfigs, onKey
{selectedState.agentConfigId && (
)}
-
- setIsAuthenticationSettingsOpen(!isAuthenticationSettingsOpen)}
- >
-
-
- {isAuthenticationSettingsOpen && (
+ {withKeySelection && onKeyChange && (
<>
- ({
- value: key.id,
- text: key.name,
- }))}
- value={selectedState.enrollmentAPIKeyId || undefined}
- prepend={
-
-
-
- }
- onChange={(e) => {
- setSelectedState({
- ...selectedState,
- enrollmentAPIKeyId: e.target.value,
- });
- onKeyChange(e.target.value);
- }}
- />
+ setIsAuthenticationSettingsOpen(!isAuthenticationSettingsOpen)}
+ >
+
+
+ {isAuthenticationSettingsOpen && (
+ <>
+
+ ({
+ value: key.id,
+ text: key.name,
+ }))}
+ value={selectedState.enrollmentAPIKeyId || undefined}
+ prepend={
+
+
+
+ }
+ onChange={(e) => {
+ setSelectedState({
+ ...selectedState,
+ enrollmentAPIKeyId: e.target.value,
+ });
+ onKeyChange(e.target.value);
+ }}
+ />
+ >
+ )}
>
)}
>
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/components/agent_enrollment_flyout/index.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/components/agent_enrollment_flyout/index.tsx
index 43173124d6bae..5a9d3b7efe1bb 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/components/agent_enrollment_flyout/index.tsx
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/components/agent_enrollment_flyout/index.tsx
@@ -14,23 +14,13 @@ import {
EuiButtonEmpty,
EuiButton,
EuiFlyoutFooter,
- EuiSteps,
- EuiText,
- EuiLink,
+ EuiTab,
+ EuiTabs,
} from '@elastic/eui';
-import { EuiContainedStepProps } from '@elastic/eui/src/components/steps/steps';
-import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n/react';
import { AgentConfig } from '../../../../types';
-import { EnrollmentStepAgentConfig } from './config_selection';
-import {
- useGetOneEnrollmentAPIKey,
- useCore,
- useGetSettings,
- useLink,
- useFleetStatus,
-} from '../../../../hooks';
-import { ManualInstructions } from '../../../../components/enrollment_instructions';
+import { ManagedInstructions } from './managed_instructions';
+import { StandaloneInstructions } from './standalone_instructions';
interface Props {
onClose: () => void;
@@ -41,99 +31,40 @@ export const AgentEnrollmentFlyout: React.FunctionComponent = ({
onClose,
agentConfigs = [],
}) => {
- const { getHref } = useLink();
- const core = useCore();
- const fleetStatus = useFleetStatus();
-
- const [selectedAPIKeyId, setSelectedAPIKeyId] = useState();
-
- const settings = useGetSettings();
- const apiKey = useGetOneEnrollmentAPIKey(selectedAPIKeyId);
-
- const kibanaUrl =
- settings.data?.item?.kibana_url ?? `${window.location.origin}${core.http.basePath.get()}`;
- const kibanaCASha256 = settings.data?.item?.kibana_ca_sha256;
-
- const steps: EuiContainedStepProps[] = [
- {
- title: i18n.translate('xpack.ingestManager.agentEnrollment.stepDownloadAgentTitle', {
- defaultMessage: 'Download the Elastic Agent',
- }),
- children: (
-
-
-
-
- ),
- }}
- />
-
- ),
- },
- {
- title: i18n.translate('xpack.ingestManager.agentEnrollment.stepChooseAgentConfigTitle', {
- defaultMessage: 'Choose an agent configuration',
- }),
- children: (
-
- ),
- },
- {
- title: i18n.translate('xpack.ingestManager.agentEnrollment.stepRunAgentTitle', {
- defaultMessage: 'Enroll and run the Elastic Agent',
- }),
- children: apiKey.data && (
-
- ),
- },
- ];
+ const [mode, setMode] = useState<'managed' | 'standalone'>('managed');
return (
-
+
+
+ setMode('managed')}>
+
+
+ setMode('standalone')}>
+
+
+
+
- {fleetStatus.isReady ? (
- <>
-
- >
+ {mode === 'managed' ? (
+
) : (
- <>
-
-
-
- ),
- }}
- />
- >
+
)}
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/components/agent_enrollment_flyout/managed_instructions.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/components/agent_enrollment_flyout/managed_instructions.tsx
new file mode 100644
index 0000000000000..aabbd37e809a8
--- /dev/null
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/components/agent_enrollment_flyout/managed_instructions.tsx
@@ -0,0 +1,91 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import React, { useState } from 'react';
+import { EuiSteps, EuiLink, EuiText, EuiSpacer } from '@elastic/eui';
+import { EuiContainedStepProps } from '@elastic/eui/src/components/steps/steps';
+import { i18n } from '@kbn/i18n';
+import { FormattedMessage } from '@kbn/i18n/react';
+import { AgentConfig } from '../../../../types';
+import {
+ useGetOneEnrollmentAPIKey,
+ useCore,
+ useGetSettings,
+ useLink,
+ useFleetStatus,
+} from '../../../../hooks';
+import { ManualInstructions } from '../../../../components/enrollment_instructions';
+import { DownloadStep, AgentConfigSelectionStep } from './steps';
+
+interface Props {
+ agentConfigs: AgentConfig[];
+}
+
+export const ManagedInstructions: React.FunctionComponent = ({ agentConfigs = [] }) => {
+ const { getHref } = useLink();
+ const core = useCore();
+ const fleetStatus = useFleetStatus();
+
+ const [selectedAPIKeyId, setSelectedAPIKeyId] = useState();
+
+ const settings = useGetSettings();
+ const apiKey = useGetOneEnrollmentAPIKey(selectedAPIKeyId);
+
+ const kibanaUrl =
+ settings.data?.item?.kibana_url ?? `${window.location.origin}${core.http.basePath.get()}`;
+ const kibanaCASha256 = settings.data?.item?.kibana_ca_sha256;
+
+ const steps: EuiContainedStepProps[] = [
+ DownloadStep(),
+ AgentConfigSelectionStep({ agentConfigs, setSelectedAPIKeyId }),
+ {
+ title: i18n.translate('xpack.ingestManager.agentEnrollment.stepEnrollAndRunAgentTitle', {
+ defaultMessage: 'Enroll and start the Elastic Agent',
+ }),
+ children: apiKey.data && (
+
+ ),
+ },
+ ];
+
+ return (
+ <>
+
+
+
+
+ {fleetStatus.isReady ? (
+ <>
+
+ >
+ ) : (
+ <>
+
+
+
+ ),
+ }}
+ />
+ >
+ )}{' '}
+ >
+ );
+};
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/components/agent_enrollment_flyout/standalone_instructions.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/components/agent_enrollment_flyout/standalone_instructions.tsx
new file mode 100644
index 0000000000000..27f64059deb84
--- /dev/null
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/components/agent_enrollment_flyout/standalone_instructions.tsx
@@ -0,0 +1,181 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import React, { useState, useEffect, useMemo } from 'react';
+import {
+ EuiSteps,
+ EuiText,
+ EuiSpacer,
+ EuiButton,
+ EuiCode,
+ EuiFlexItem,
+ EuiFlexGroup,
+ EuiCodeBlock,
+ EuiCopy,
+} from '@elastic/eui';
+import { EuiContainedStepProps } from '@elastic/eui/src/components/steps/steps';
+import { i18n } from '@kbn/i18n';
+import { FormattedMessage } from '@kbn/i18n/react';
+import { AgentConfig } from '../../../../types';
+import { useCore, sendGetOneAgentConfigFull } from '../../../../hooks';
+import { DownloadStep, AgentConfigSelectionStep } from './steps';
+import { configToYaml, agentConfigRouteService } from '../../../../services';
+
+interface Props {
+ agentConfigs: AgentConfig[];
+}
+
+const RUN_INSTRUCTIONS = './elastic-agent run';
+
+export const StandaloneInstructions: React.FunctionComponent = ({ agentConfigs = [] }) => {
+ const core = useCore();
+ const { notifications } = core;
+
+ const [selectedConfigId, setSelectedConfigId] = useState();
+ const [fullAgentConfig, setFullAgentConfig] = useState();
+
+ const downloadLink = selectedConfigId
+ ? core.http.basePath.prepend(
+ `${agentConfigRouteService.getInfoFullDownloadPath(selectedConfigId)}?standalone=true`
+ )
+ : undefined;
+
+ useEffect(() => {
+ async function fetchFullConfig() {
+ try {
+ if (!selectedConfigId) {
+ return;
+ }
+ const res = await sendGetOneAgentConfigFull(selectedConfigId, { standalone: true });
+ if (res.error) {
+ throw res.error;
+ }
+
+ if (!res.data) {
+ throw new Error('No data while fetching full agent config');
+ }
+
+ setFullAgentConfig(res.data.item);
+ } catch (error) {
+ notifications.toasts.addError(error, {
+ title: 'Error',
+ });
+ }
+ }
+ fetchFullConfig();
+ }, [selectedConfigId, notifications.toasts]);
+
+ const yaml = useMemo(() => configToYaml(fullAgentConfig), [fullAgentConfig]);
+ const steps: EuiContainedStepProps[] = [
+ DownloadStep(),
+ AgentConfigSelectionStep({ agentConfigs, setSelectedConfigId }),
+ {
+ title: i18n.translate('xpack.ingestManager.agentEnrollment.stepConfigureAgentTitle', {
+ defaultMessage: 'Configure the agent',
+ }),
+ children: (
+ <>
+
+ elastic-agent.yml,
+ ESUsernameVariable: ES_USERNAME,
+ ESPasswordVariable: ES_PASSWORD,
+ outputSection: outputs,
+ }}
+ />
+
+
+
+
+ {(copy) => (
+
+
+
+ )}
+
+
+
+
+
+
+
+
+
+
+ {yaml}
+
+
+ >
+ ),
+ },
+ {
+ title: i18n.translate('xpack.ingestManager.agentEnrollment.stepRunAgentTitle', {
+ defaultMessage: 'Start the agent',
+ }),
+ children: (
+ <>
+
+
+
+ {RUN_INSTRUCTIONS}
+
+
+ {(copy) => (
+
+
+
+ )}
+
+
+ >
+ ),
+ },
+ {
+ title: i18n.translate('xpack.ingestManager.agentEnrollment.stepCheckForDataTitle', {
+ defaultMessage: 'Check for data',
+ }),
+ status: 'incomplete',
+ children: (
+ <>
+
+
+
+ >
+ ),
+ },
+ ];
+
+ return (
+ <>
+
+
+
+
+
+ >
+ );
+};
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/components/agent_enrollment_flyout/steps.tsx b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/components/agent_enrollment_flyout/steps.tsx
new file mode 100644
index 0000000000000..267f9027a094a
--- /dev/null
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/sections/fleet/components/agent_enrollment_flyout/steps.tsx
@@ -0,0 +1,66 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import React from 'react';
+import { EuiText, EuiButton, EuiSpacer } from '@elastic/eui';
+import { FormattedMessage } from '@kbn/i18n/react';
+import { i18n } from '@kbn/i18n';
+import { EnrollmentStepAgentConfig } from './config_selection';
+import { AgentConfig } from '../../../../types';
+
+export const DownloadStep = () => {
+ return {
+ title: i18n.translate('xpack.ingestManager.agentEnrollment.stepDownloadAgentTitle', {
+ defaultMessage: 'Download the Elastic Agent',
+ }),
+ children: (
+ <>
+
+
+
+
+
+
+
+ >
+ ),
+ };
+};
+
+export const AgentConfigSelectionStep = ({
+ agentConfigs,
+ setSelectedAPIKeyId,
+ setSelectedConfigId,
+}: {
+ agentConfigs: AgentConfig[];
+ setSelectedAPIKeyId?: (key: string) => void;
+ setSelectedConfigId?: (configId: string) => void;
+}) => {
+ return {
+ title: i18n.translate('xpack.ingestManager.agentEnrollment.stepChooseAgentConfigTitle', {
+ defaultMessage: 'Choose an agent configuration',
+ }),
+ children: (
+
+ ),
+ };
+};
diff --git a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/types/index.ts b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/types/index.ts
index 170a9cedc08d9..dc27da18bc008 100644
--- a/x-pack/plugins/ingest_manager/public/applications/ingest_manager/types/index.ts
+++ b/x-pack/plugins/ingest_manager/public/applications/ingest_manager/types/index.ts
@@ -18,6 +18,7 @@ export {
UpdatePackageConfig,
PackageConfigInput,
PackageConfigInputStream,
+ PackageConfigConfigRecord,
PackageConfigConfigRecordEntry,
Output,
DataStream,
diff --git a/x-pack/plugins/ingest_manager/server/routes/agent_config/handlers.ts b/x-pack/plugins/ingest_manager/server/routes/agent_config/handlers.ts
index 110f6b9950829..2aaf889296bd6 100644
--- a/x-pack/plugins/ingest_manager/server/routes/agent_config/handlers.ts
+++ b/x-pack/plugins/ingest_manager/server/routes/agent_config/handlers.ts
@@ -232,15 +232,17 @@ export const deleteAgentConfigsHandler: RequestHandler<
}
};
-export const getFullAgentConfig: RequestHandler> = async (context, request, response) => {
+export const getFullAgentConfig: RequestHandler<
+ TypeOf,
+ TypeOf
+> = async (context, request, response) => {
const soClient = context.core.savedObjects.client;
try {
const fullAgentConfig = await agentConfigService.getFullConfig(
soClient,
- request.params.agentConfigId
+ request.params.agentConfigId,
+ { standalone: request.query.standalone === true }
);
if (fullAgentConfig) {
const body: GetFullAgentConfigResponse = {
@@ -264,16 +266,19 @@ export const getFullAgentConfig: RequestHandler> = async (context, request, response) => {
+export const downloadFullAgentConfig: RequestHandler<
+ TypeOf,
+ TypeOf
+> = async (context, request, response) => {
const soClient = context.core.savedObjects.client;
const {
params: { agentConfigId },
} = request;
try {
- const fullAgentConfig = await agentConfigService.getFullConfig(soClient, agentConfigId);
+ const fullAgentConfig = await agentConfigService.getFullConfig(soClient, agentConfigId, {
+ standalone: request.query.standalone === true,
+ });
if (fullAgentConfig) {
const body = configToYaml(fullAgentConfig);
const headers: ResponseHeaders = {
diff --git a/x-pack/plugins/ingest_manager/server/services/agent_config.ts b/x-pack/plugins/ingest_manager/server/services/agent_config.ts
index fe247d5b91db0..5f98c8881388d 100644
--- a/x-pack/plugins/ingest_manager/server/services/agent_config.ts
+++ b/x-pack/plugins/ingest_manager/server/services/agent_config.ts
@@ -365,7 +365,8 @@ class AgentConfigService {
public async getFullConfig(
soClient: SavedObjectsClientContract,
- id: string
+ id: string,
+ options?: { standalone: boolean }
): Promise {
let config;
@@ -400,6 +401,13 @@ class AgentConfigService {
api_key,
...outputConfig,
};
+
+ if (options?.standalone) {
+ delete outputs[name].api_key;
+ outputs[name].username = 'ES_USERNAME';
+ outputs[name].password = 'ES_PASSWORD';
+ }
+
return outputs;
},
{} as FullAgentConfig['outputs']
diff --git a/x-pack/plugins/ingest_manager/server/services/package_config.ts b/x-pack/plugins/ingest_manager/server/services/package_config.ts
index 9433a81e74b07..e8ca09a83c2b6 100644
--- a/x-pack/plugins/ingest_manager/server/services/package_config.ts
+++ b/x-pack/plugins/ingest_manager/server/services/package_config.ts
@@ -44,6 +44,20 @@ class PackageConfigService {
packageConfig: NewPackageConfig,
options?: { id?: string; user?: AuthenticatedUser }
): Promise {
+ // Check that its agent config does not have a package config with the same name
+ const parentAgentConfig = await agentConfigService.get(soClient, packageConfig.config_id);
+ if (!parentAgentConfig) {
+ throw new Error('Agent config not found');
+ } else {
+ if (
+ (parentAgentConfig.package_configs as PackageConfig[]).find(
+ (siblingPackageConfig) => siblingPackageConfig.name === packageConfig.name
+ )
+ ) {
+ throw new Error('There is already a package with the same name on this agent config');
+ }
+ }
+
// Make sure the associated package is installed
if (packageConfig.package?.name) {
const [, pkgInfo] = await Promise.all([
@@ -225,6 +239,21 @@ class PackageConfigService {
throw new Error('Package config not found');
}
+ // Check that its agent config does not have a package config with the same name
+ const parentAgentConfig = await agentConfigService.get(soClient, packageConfig.config_id);
+ if (!parentAgentConfig) {
+ throw new Error('Agent config not found');
+ } else {
+ if (
+ (parentAgentConfig.package_configs as PackageConfig[]).find(
+ (siblingPackageConfig) =>
+ siblingPackageConfig.id !== id && siblingPackageConfig.name === packageConfig.name
+ )
+ ) {
+ throw new Error('There is already a package with the same name on this agent config');
+ }
+ }
+
await soClient.update(
SAVED_OBJECT_TYPE,
id,
diff --git a/x-pack/plugins/ingest_manager/server/services/setup.ts b/x-pack/plugins/ingest_manager/server/services/setup.ts
index e27a5456a5a7d..627abc158143d 100644
--- a/x-pack/plugins/ingest_manager/server/services/setup.ts
+++ b/x-pack/plugins/ingest_manager/server/services/setup.ts
@@ -180,11 +180,18 @@ export async function setupFleet(
fleet_enroll_password: password,
});
- // Generate default enrollment key
- await generateEnrollmentAPIKey(soClient, {
- name: 'Default',
- configId: await agentConfigService.getDefaultAgentConfigId(soClient),
+ const { items: agentConfigs } = await agentConfigService.list(soClient, {
+ perPage: 10000,
});
+
+ await Promise.all(
+ agentConfigs.map((agentConfig) => {
+ return generateEnrollmentAPIKey(soClient, {
+ name: `Default`,
+ configId: agentConfig.id,
+ });
+ })
+ );
}
function generateRandomPassword() {
diff --git a/x-pack/plugins/ingest_manager/server/types/rest_spec/agent_config.ts b/x-pack/plugins/ingest_manager/server/types/rest_spec/agent_config.ts
index d076a803f4b53..594bd141459c1 100644
--- a/x-pack/plugins/ingest_manager/server/types/rest_spec/agent_config.ts
+++ b/x-pack/plugins/ingest_manager/server/types/rest_spec/agent_config.ts
@@ -51,5 +51,6 @@ export const GetFullAgentConfigRequestSchema = {
}),
query: schema.object({
download: schema.maybe(schema.boolean()),
+ standalone: schema.maybe(schema.boolean()),
}),
};
diff --git a/x-pack/plugins/lens/public/visualization_container.scss b/x-pack/plugins/lens/public/visualization_container.scss
new file mode 100644
index 0000000000000..e5c359112fe4b
--- /dev/null
+++ b/x-pack/plugins/lens/public/visualization_container.scss
@@ -0,0 +1,3 @@
+.lnsVisualizationContainer {
+ overflow: auto;
+}
\ No newline at end of file
diff --git a/x-pack/plugins/lens/public/visualization_container.test.tsx b/x-pack/plugins/lens/public/visualization_container.test.tsx
index b29f0a5d783f9..454399ec90121 100644
--- a/x-pack/plugins/lens/public/visualization_container.test.tsx
+++ b/x-pack/plugins/lens/public/visualization_container.test.tsx
@@ -60,4 +60,13 @@ describe('VisualizationContainer', () => {
expect(reportingEl.prop('style')).toEqual({ color: 'blue' });
});
+
+ test('combines class names with container class', () => {
+ const component = mount(
+ Hello!
+ );
+ const reportingEl = component.find('[data-shared-item]').first();
+
+ expect(reportingEl.prop('className')).toEqual('myClass lnsVisualizationContainer');
+ });
});
diff --git a/x-pack/plugins/lens/public/visualization_container.tsx b/x-pack/plugins/lens/public/visualization_container.tsx
index fb7a1268192a8..3ca8d5de932d7 100644
--- a/x-pack/plugins/lens/public/visualization_container.tsx
+++ b/x-pack/plugins/lens/public/visualization_container.tsx
@@ -5,6 +5,9 @@
*/
import React from 'react';
+import classNames from 'classnames';
+
+import './visualization_container.scss';
interface Props extends React.HTMLAttributes {
isReady?: boolean;
@@ -15,9 +18,21 @@ interface Props extends React.HTMLAttributes {
* This is a convenience component that wraps rendered Lens visualizations. It adds reporting
* attributes (data-shared-item, data-render-complete, and data-title).
*/
-export function VisualizationContainer({ isReady = true, reportTitle, children, ...rest }: Props) {
+export function VisualizationContainer({
+ isReady = true,
+ reportTitle,
+ children,
+ className,
+ ...rest
+}: Props) {
return (
-
+
{children}
);
diff --git a/x-pack/plugins/maps/common/descriptor_types/data_request_descriptor_types.ts b/x-pack/plugins/maps/common/descriptor_types/data_request_descriptor_types.ts
index c7bfe94742bd6..1bd8c5401eb1d 100644
--- a/x-pack/plugins/maps/common/descriptor_types/data_request_descriptor_types.ts
+++ b/x-pack/plugins/maps/common/descriptor_types/data_request_descriptor_types.ts
@@ -5,7 +5,7 @@
*/
/* eslint-disable @typescript-eslint/consistent-type-definitions */
-import { RENDER_AS, SORT_ORDER, SCALING_TYPES } from '../constants';
+import { RENDER_AS, SORT_ORDER, SCALING_TYPES, SOURCE_TYPES } from '../constants';
import { MapExtent, MapQuery } from './map_descriptor';
import { Filter, TimeRange } from '../../../../../src/plugins/data/common';
@@ -26,10 +26,12 @@ type ESSearchSourceSyncMeta = {
scalingType: SCALING_TYPES;
topHitsSplitField: string;
topHitsSize: number;
+ sourceType: SOURCE_TYPES.ES_SEARCH;
};
type ESGeoGridSourceSyncMeta = {
requestType: RENDER_AS;
+ sourceType: SOURCE_TYPES.ES_GEO_GRID;
};
export type VectorSourceSyncMeta = ESSearchSourceSyncMeta | ESGeoGridSourceSyncMeta | null;
@@ -51,7 +53,6 @@ export type VectorStyleRequestMeta = MapFilters & {
export type ESSearchSourceResponseMeta = {
areResultsTrimmed?: boolean;
- sourceType?: string;
// top hits meta
areEntitiesTrimmed?: boolean;
diff --git a/x-pack/plugins/maps/common/descriptor_types/sources.ts b/x-pack/plugins/maps/common/descriptor_types/sources.ts
index e32b5f44c8272..7eda37bf53351 100644
--- a/x-pack/plugins/maps/common/descriptor_types/sources.ts
+++ b/x-pack/plugins/maps/common/descriptor_types/sources.ts
@@ -77,8 +77,8 @@ export type ESPewPewSourceDescriptor = AbstractESAggSourceDescriptor & {
};
export type ESTermSourceDescriptor = AbstractESAggSourceDescriptor & {
- indexPatternTitle: string;
- term: string; // term field name
+ indexPatternTitle?: string;
+ term?: string; // term field name
whereQuery?: Query;
};
@@ -138,7 +138,7 @@ export type GeojsonFileSourceDescriptor = {
};
export type JoinDescriptor = {
- leftField: string;
+ leftField?: string;
right: ESTermSourceDescriptor;
};
diff --git a/x-pack/plugins/maps/public/classes/layers/blended_vector_layer/blended_vector_layer.ts b/x-pack/plugins/maps/public/classes/layers/blended_vector_layer/blended_vector_layer.ts
index 551e20fc5ceb5..26a0ffc1b1a37 100644
--- a/x-pack/plugins/maps/public/classes/layers/blended_vector_layer/blended_vector_layer.ts
+++ b/x-pack/plugins/maps/public/classes/layers/blended_vector_layer/blended_vector_layer.ts
@@ -126,7 +126,7 @@ function getClusterStyleDescriptor(
),
}
: undefined;
- // @ts-ignore
+ // @ts-expect-error
clusterStyleDescriptor.properties[styleName] = {
type: STYLE_TYPE.DYNAMIC,
options: {
@@ -136,7 +136,7 @@ function getClusterStyleDescriptor(
};
} else {
// copy static styles to cluster style
- // @ts-ignore
+ // @ts-expect-error
clusterStyleDescriptor.properties[styleName] = {
type: STYLE_TYPE.STATIC,
options: { ...styleProperty.getOptions() },
@@ -192,8 +192,8 @@ export class BlendedVectorLayer extends VectorLayer implements IVectorLayer {
const requestMeta = sourceDataRequest.getMeta();
if (
requestMeta &&
- requestMeta.sourceType &&
- requestMeta.sourceType === SOURCE_TYPES.ES_GEO_GRID
+ requestMeta.sourceMeta &&
+ requestMeta.sourceMeta.sourceType === SOURCE_TYPES.ES_GEO_GRID
) {
isClustered = true;
}
@@ -220,8 +220,12 @@ export class BlendedVectorLayer extends VectorLayer implements IVectorLayer {
: displayName;
}
- isJoinable() {
- return false;
+ showJoinEditor() {
+ return true;
+ }
+
+ getJoinsDisabledReason() {
+ return this._documentSource.getJoinsDisabledReason();
}
getJoins() {
diff --git a/x-pack/plugins/maps/public/classes/layers/layer.tsx b/x-pack/plugins/maps/public/classes/layers/layer.tsx
index d6f6ee8fa609b..d8def155a9185 100644
--- a/x-pack/plugins/maps/public/classes/layers/layer.tsx
+++ b/x-pack/plugins/maps/public/classes/layers/layer.tsx
@@ -78,6 +78,8 @@ export interface ILayer {
isPreviewLayer: () => boolean;
areLabelsOnTop: () => boolean;
supportsLabelsOnTop: () => boolean;
+ showJoinEditor(): boolean;
+ getJoinsDisabledReason(): string | null;
}
export type Footnote = {
icon: ReactElement
;
@@ -141,13 +143,12 @@ export class AbstractLayer implements ILayer {
}
static getBoundDataForSource(mbMap: unknown, sourceId: string): FeatureCollection {
- // @ts-ignore
+ // @ts-expect-error
const mbStyle = mbMap.getStyle();
return mbStyle.sources[sourceId].data;
}
async cloneDescriptor(): Promise {
- // @ts-ignore
const clonedDescriptor = copyPersistentState(this._descriptor);
// layer id is uuid used to track styles/layers in mapbox
clonedDescriptor.id = uuid();
@@ -155,14 +156,10 @@ export class AbstractLayer implements ILayer {
clonedDescriptor.label = `Clone of ${displayName}`;
clonedDescriptor.sourceDescriptor = this.getSource().cloneDescriptor();
- // todo: remove this
- // This should not be in AbstractLayer. It relies on knowledge of VectorLayerDescriptor
- // @ts-ignore
if (clonedDescriptor.joins) {
- // @ts-ignore
+ // @ts-expect-error
clonedDescriptor.joins.forEach((joinDescriptor) => {
// right.id is uuid used to track requests in inspector
- // @ts-ignore
joinDescriptor.right.id = uuid();
});
}
@@ -173,8 +170,12 @@ export class AbstractLayer implements ILayer {
return `${this.getId()}${MB_SOURCE_ID_LAYER_ID_PREFIX_DELIMITER}${layerNameSuffix}`;
}
- isJoinable(): boolean {
- return this.getSource().isJoinable();
+ showJoinEditor(): boolean {
+ return this.getSource().showJoinEditor();
+ }
+
+ getJoinsDisabledReason() {
+ return this.getSource().getJoinsDisabledReason();
}
isPreviewLayer(): boolean {
@@ -394,7 +395,6 @@ export class AbstractLayer implements ILayer {
const requestTokens = this._dataRequests.map((dataRequest) => dataRequest.getRequestToken());
// Compact removes all the undefineds
- // @ts-ignore
return _.compact(requestTokens);
}
@@ -478,7 +478,7 @@ export class AbstractLayer implements ILayer {
}
syncVisibilityWithMb(mbMap: unknown, mbLayerId: string) {
- // @ts-ignore
+ // @ts-expect-error
mbMap.setLayoutProperty(mbLayerId, 'visibility', this.isVisible() ? 'visible' : 'none');
}
diff --git a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.js b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.js
index 9431fb55dc88b..1be74140fe1bf 100644
--- a/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.js
+++ b/x-pack/plugins/maps/public/classes/sources/es_geo_grid_source/es_geo_grid_source.js
@@ -63,6 +63,7 @@ export class ESGeoGridSource extends AbstractESAggSource {
getSyncMeta() {
return {
requestType: this._descriptor.requestType,
+ sourceType: SOURCE_TYPES.ES_GEO_GRID,
};
}
@@ -103,7 +104,7 @@ export class ESGeoGridSource extends AbstractESAggSource {
return true;
}
- isJoinable() {
+ showJoinEditor() {
return false;
}
@@ -307,7 +308,6 @@ export class ESGeoGridSource extends AbstractESAggSource {
},
meta: {
areResultsTrimmed: false,
- sourceType: SOURCE_TYPES.ES_GEO_GRID,
},
};
}
diff --git a/x-pack/plugins/maps/public/classes/sources/es_pew_pew_source/es_pew_pew_source.js b/x-pack/plugins/maps/public/classes/sources/es_pew_pew_source/es_pew_pew_source.js
index a4cff7c89a011..98db7bcdcc8a3 100644
--- a/x-pack/plugins/maps/public/classes/sources/es_pew_pew_source/es_pew_pew_source.js
+++ b/x-pack/plugins/maps/public/classes/sources/es_pew_pew_source/es_pew_pew_source.js
@@ -51,7 +51,7 @@ export class ESPewPewSource extends AbstractESAggSource {
return true;
}
- isJoinable() {
+ showJoinEditor() {
return false;
}
diff --git a/x-pack/plugins/maps/public/classes/sources/es_search_source/es_search_source.js b/x-pack/plugins/maps/public/classes/sources/es_search_source/es_search_source.js
index c8f14f1dc6a4b..330fa6e8318ed 100644
--- a/x-pack/plugins/maps/public/classes/sources/es_search_source/es_search_source.js
+++ b/x-pack/plugins/maps/public/classes/sources/es_search_source/es_search_source.js
@@ -385,7 +385,7 @@ export class ESSearchSource extends AbstractESSource {
return {
data: featureCollection,
- meta: { ...meta, sourceType: SOURCE_TYPES.ES_SEARCH },
+ meta,
};
}
@@ -540,6 +540,7 @@ export class ESSearchSource extends AbstractESSource {
scalingType: this._descriptor.scalingType,
topHitsSplitField: this._descriptor.topHitsSplitField,
topHitsSize: this._descriptor.topHitsSize,
+ sourceType: SOURCE_TYPES.ES_SEARCH,
};
}
@@ -551,6 +552,14 @@ export class ESSearchSource extends AbstractESSource {
path: geoField.name,
};
}
+
+ getJoinsDisabledReason() {
+ return this._descriptor.scalingType === SCALING_TYPES.CLUSTERS
+ ? i18n.translate('xpack.maps.source.esSearch.joinsDisabledReason', {
+ defaultMessage: 'Joins are not supported when scaling by clusters',
+ })
+ : null;
+ }
}
registerSource({
diff --git a/x-pack/plugins/maps/public/classes/sources/source.ts b/x-pack/plugins/maps/public/classes/sources/source.ts
index c68e22ada8b0c..696c07376575b 100644
--- a/x-pack/plugins/maps/public/classes/sources/source.ts
+++ b/x-pack/plugins/maps/public/classes/sources/source.ts
@@ -54,7 +54,8 @@ export interface ISource {
isESSource(): boolean;
renderSourceSettingsEditor({ onChange }: SourceEditorArgs): ReactElement | null;
supportsFitToBounds(): Promise;
- isJoinable(): boolean;
+ showJoinEditor(): boolean;
+ getJoinsDisabledReason(): string | null;
cloneDescriptor(): SourceDescriptor;
getFieldNames(): string[];
getApplyGlobalQuery(): boolean;
@@ -80,7 +81,6 @@ export class AbstractSource implements ISource {
destroy(): void {}
cloneDescriptor(): SourceDescriptor {
- // @ts-ignore
return copyPersistentState(this._descriptor);
}
@@ -148,10 +148,14 @@ export class AbstractSource implements ISource {
return 0;
}
- isJoinable(): boolean {
+ showJoinEditor(): boolean {
return false;
}
+ getJoinsDisabledReason() {
+ return null;
+ }
+
isESSource(): boolean {
return false;
}
diff --git a/x-pack/plugins/maps/public/classes/sources/vector_source/vector_source.js b/x-pack/plugins/maps/public/classes/sources/vector_source/vector_source.js
index ecb13bb875721..98ed89a6ff0ad 100644
--- a/x-pack/plugins/maps/public/classes/sources/vector_source/vector_source.js
+++ b/x-pack/plugins/maps/public/classes/sources/vector_source/vector_source.js
@@ -122,7 +122,7 @@ export class AbstractVectorSource extends AbstractSource {
return false;
}
- isJoinable() {
+ showJoinEditor() {
return true;
}
diff --git a/x-pack/plugins/maps/public/connected_components/layer_panel/__snapshots__/view.test.js.snap b/x-pack/plugins/maps/public/connected_components/layer_panel/__snapshots__/view.test.js.snap
index 1c48ed2290dce..2cf5287ae6594 100644
--- a/x-pack/plugins/maps/public/connected_components/layer_panel/__snapshots__/view.test.js.snap
+++ b/x-pack/plugins/maps/public/connected_components/layer_panel/__snapshots__/view.test.js.snap
@@ -96,8 +96,8 @@ exports[`LayerPanel is rendered 1`] = `
"getId": [Function],
"getImmutableSourceProperties": [Function],
"getLayerTypeIconName": [Function],
- "isJoinable": [Function],
"renderSourceSettingsEditor": [Function],
+ "showJoinEditor": [Function],
"supportsElasticsearchFilters": [Function],
}
}
@@ -107,6 +107,17 @@ exports[`LayerPanel is rendered 1`] = `
diff --git a/x-pack/plugins/maps/public/connected_components/layer_panel/index.js b/x-pack/plugins/maps/public/connected_components/layer_panel/index.js
index 1c8dcdb43d434..17fd41d120194 100644
--- a/x-pack/plugins/maps/public/connected_components/layer_panel/index.js
+++ b/x-pack/plugins/maps/public/connected_components/layer_panel/index.js
@@ -12,7 +12,7 @@ import { updateSourceProp } from '../../actions';
function mapStateToProps(state = {}) {
const selectedLayer = getSelectedLayer(state);
return {
- key: selectedLayer ? `${selectedLayer.getId()}${selectedLayer.isJoinable()}` : '',
+ key: selectedLayer ? `${selectedLayer.getId()}${selectedLayer.showJoinEditor()}` : '',
selectedLayer,
};
}
diff --git a/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/__snapshots__/join_editor.test.tsx.snap b/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/__snapshots__/join_editor.test.tsx.snap
new file mode 100644
index 0000000000000..00d7f44d6273f
--- /dev/null
+++ b/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/__snapshots__/join_editor.test.tsx.snap
@@ -0,0 +1,100 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Should render callout when joins are disabled 1`] = `
+
+
+
+
+
+
+
+
+
+ Simulated disabled reason
+
+
+`;
+
+exports[`Should render join editor 1`] = `
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+`;
diff --git a/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/index.js b/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/index.js
deleted file mode 100644
index cf55c16bbe0be..0000000000000
--- a/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/index.js
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-import { connect } from 'react-redux';
-import { JoinEditor } from './view';
-import {
- getSelectedLayer,
- getSelectedLayerJoinDescriptors,
-} from '../../../selectors/map_selectors';
-import { setJoinsForLayer } from '../../../actions';
-
-function mapDispatchToProps(dispatch) {
- return {
- onChange: (layer, joins) => {
- dispatch(setJoinsForLayer(layer, joins));
- },
- };
-}
-
-function mapStateToProps(state = {}) {
- return {
- joins: getSelectedLayerJoinDescriptors(state),
- layer: getSelectedLayer(state),
- };
-}
-
-const connectedJoinEditor = connect(mapStateToProps, mapDispatchToProps)(JoinEditor);
-export { connectedJoinEditor as JoinEditor };
diff --git a/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/index.tsx b/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/index.tsx
new file mode 100644
index 0000000000000..0348b38351971
--- /dev/null
+++ b/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/index.tsx
@@ -0,0 +1,31 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { AnyAction, Dispatch } from 'redux';
+import { connect } from 'react-redux';
+import { JoinEditor } from './join_editor';
+import { getSelectedLayerJoinDescriptors } from '../../../selectors/map_selectors';
+import { setJoinsForLayer } from '../../../actions';
+import { MapStoreState } from '../../../reducers/store';
+import { ILayer } from '../../../classes/layers/layer';
+import { JoinDescriptor } from '../../../../common/descriptor_types';
+
+function mapStateToProps(state: MapStoreState) {
+ return {
+ joins: getSelectedLayerJoinDescriptors(state),
+ };
+}
+
+function mapDispatchToProps(dispatch: Dispatch) {
+ return {
+ onChange: (layer: ILayer, joins: JoinDescriptor[]) => {
+ dispatch(setJoinsForLayer(layer, joins));
+ },
+ };
+}
+
+const connectedJoinEditor = connect(mapStateToProps, mapDispatchToProps)(JoinEditor);
+export { connectedJoinEditor as JoinEditor };
diff --git a/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/join_editor.test.tsx b/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/join_editor.test.tsx
new file mode 100644
index 0000000000000..12da1c4bb9388
--- /dev/null
+++ b/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/join_editor.test.tsx
@@ -0,0 +1,63 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import React from 'react';
+import { ILayer } from '../../../classes/layers/layer';
+import { JoinEditor } from './join_editor';
+import { shallow } from 'enzyme';
+import { JoinDescriptor } from '../../../../common/descriptor_types';
+
+class MockLayer {
+ private readonly _disableReason: string | null;
+
+ constructor(disableReason: string | null) {
+ this._disableReason = disableReason;
+ }
+
+ getJoinsDisabledReason() {
+ return this._disableReason;
+ }
+}
+
+const defaultProps = {
+ joins: [
+ {
+ leftField: 'iso2',
+ right: {
+ id: '673ff994-fc75-4c67-909b-69fcb0e1060e',
+ indexPatternTitle: 'kibana_sample_data_logs',
+ term: 'geo.src',
+ indexPatternId: 'abcde',
+ metrics: [
+ {
+ type: 'count',
+ label: 'web logs count',
+ },
+ ],
+ },
+ } as JoinDescriptor,
+ ],
+ layerDisplayName: 'myLeftJoinField',
+ leftJoinFields: [],
+ onChange: () => {},
+};
+
+test('Should render join editor', () => {
+ const component = shallow(
+
+ );
+ expect(component).toMatchSnapshot();
+});
+
+test('Should render callout when joins are disabled', () => {
+ const component = shallow(
+
+ );
+ expect(component).toMatchSnapshot();
+});
diff --git a/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/join_editor.tsx b/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/join_editor.tsx
new file mode 100644
index 0000000000000..c589604e85112
--- /dev/null
+++ b/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/join_editor.tsx
@@ -0,0 +1,124 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import React, { Fragment } from 'react';
+import uuid from 'uuid/v4';
+
+import {
+ EuiButtonEmpty,
+ EuiTitle,
+ EuiSpacer,
+ EuiToolTip,
+ EuiTextAlign,
+ EuiCallOut,
+} from '@elastic/eui';
+
+import { FormattedMessage } from '@kbn/i18n/react';
+import { i18n } from '@kbn/i18n';
+// @ts-expect-error
+import { Join } from './resources/join';
+import { ILayer } from '../../../classes/layers/layer';
+import { JoinDescriptor } from '../../../../common/descriptor_types';
+import { IField } from '../../../classes/fields/field';
+
+interface Props {
+ joins: JoinDescriptor[];
+ layer: ILayer;
+ layerDisplayName: string;
+ leftJoinFields: IField[];
+ onChange: (layer: ILayer, joins: JoinDescriptor[]) => void;
+}
+
+export function JoinEditor({ joins, layer, onChange, leftJoinFields, layerDisplayName }: Props) {
+ const renderJoins = () => {
+ return joins.map((joinDescriptor: JoinDescriptor, index: number) => {
+ const handleOnChange = (updatedDescriptor: JoinDescriptor) => {
+ onChange(layer, [...joins.slice(0, index), updatedDescriptor, ...joins.slice(index + 1)]);
+ };
+
+ const handleOnRemove = () => {
+ onChange(layer, [...joins.slice(0, index), ...joins.slice(index + 1)]);
+ };
+
+ return (
+
+
+
+
+ );
+ });
+ };
+
+ const addJoin = () => {
+ onChange(layer, [
+ ...joins,
+ {
+ right: {
+ id: uuid(),
+ applyGlobalQuery: true,
+ },
+ } as JoinDescriptor,
+ ]);
+ };
+
+ const renderContent = () => {
+ const disabledReason = layer.getJoinsDisabledReason();
+ return disabledReason ? (
+ {disabledReason}
+ ) : (
+
+ {renderJoins()}
+
+
+
+
+
+
+
+
+
+ );
+ };
+
+ return (
+
+
+
+
+
+
+
+
+
+ {renderContent()}
+
+ );
+}
diff --git a/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/view.js b/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/view.js
deleted file mode 100644
index 900f5c9ff53ea..0000000000000
--- a/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/view.js
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-import React, { Fragment } from 'react';
-import uuid from 'uuid/v4';
-
-import {
- EuiFlexGroup,
- EuiFlexItem,
- EuiButtonIcon,
- EuiTitle,
- EuiSpacer,
- EuiToolTip,
-} from '@elastic/eui';
-
-import { Join } from './resources/join';
-import { FormattedMessage } from '@kbn/i18n/react';
-import { i18n } from '@kbn/i18n';
-
-export function JoinEditor({ joins, layer, onChange, leftJoinFields, layerDisplayName }) {
- const renderJoins = () => {
- return joins.map((joinDescriptor, index) => {
- const handleOnChange = (updatedDescriptor) => {
- onChange(layer, [...joins.slice(0, index), updatedDescriptor, ...joins.slice(index + 1)]);
- };
-
- const handleOnRemove = () => {
- onChange(layer, [...joins.slice(0, index), ...joins.slice(index + 1)]);
- };
-
- return (
-
-
-
-
- );
- });
- };
-
- const addJoin = () => {
- onChange(layer, [
- ...joins,
- {
- right: {
- id: uuid(),
- applyGlobalQuery: true,
- },
- },
- ]);
- };
-
- if (!layer.isJoinable()) {
- return null;
- }
-
- return (
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {renderJoins()}
-
- );
-}
diff --git a/x-pack/plugins/maps/public/connected_components/layer_panel/view.js b/x-pack/plugins/maps/public/connected_components/layer_panel/view.js
index 71d76ff53d8a9..2e20a4492f08b 100644
--- a/x-pack/plugins/maps/public/connected_components/layer_panel/view.js
+++ b/x-pack/plugins/maps/public/connected_components/layer_panel/view.js
@@ -75,7 +75,7 @@ export class LayerPanel extends React.Component {
};
async _loadLeftJoinFields() {
- if (!this.props.selectedLayer || !this.props.selectedLayer.isJoinable()) {
+ if (!this.props.selectedLayer || !this.props.selectedLayer.showJoinEditor()) {
return;
}
@@ -120,7 +120,7 @@ export class LayerPanel extends React.Component {
}
_renderJoinSection() {
- if (!this.props.selectedLayer.isJoinable()) {
+ if (!this.props.selectedLayer.showJoinEditor()) {
return null;
}
@@ -128,6 +128,7 @@ export class LayerPanel extends React.Component {
diff --git a/x-pack/plugins/maps/public/connected_components/layer_panel/view.test.js b/x-pack/plugins/maps/public/connected_components/layer_panel/view.test.js
index 99893c1bc5bee..33ca80b00c451 100644
--- a/x-pack/plugins/maps/public/connected_components/layer_panel/view.test.js
+++ b/x-pack/plugins/maps/public/connected_components/layer_panel/view.test.js
@@ -55,7 +55,7 @@ const mockLayer = {
getImmutableSourceProperties: () => {
return [{ label: 'source prop1', value: 'you get one chance to set me' }];
},
- isJoinable: () => {
+ showJoinEditor: () => {
return true;
},
supportsElasticsearchFilters: () => {
diff --git a/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/logo.json b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/logo.json
new file mode 100644
index 0000000000000..ca61db7992083
--- /dev/null
+++ b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/logo.json
@@ -0,0 +1,3 @@
+{
+ "icon": "logoSecurity"
+}
diff --git a/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/manifest.json b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/manifest.json
new file mode 100644
index 0000000000000..b7afe8d2b158a
--- /dev/null
+++ b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/manifest.json
@@ -0,0 +1,64 @@
+{
+ "id": "siem_cloudtrail",
+ "title": "SIEM Cloudtrail",
+ "description": "Detect suspicious activity recorded in your cloudtrail logs.",
+ "type": "Filebeat data",
+ "logoFile": "logo.json",
+ "defaultIndexPattern": "filebeat-*",
+ "query": {
+ "bool": {
+ "filter": [
+ {"term": {"event.dataset": "aws.cloudtrail"}}
+ ]
+ }
+ },
+ "jobs": [
+ {
+ "id": "rare_method_for_a_city",
+ "file": "rare_method_for_a_city.json"
+ },
+ {
+ "id": "rare_method_for_a_country",
+ "file": "rare_method_for_a_country.json"
+ },
+ {
+ "id": "rare_method_for_a_username",
+ "file": "rare_method_for_a_username.json"
+ },
+ {
+ "id": "high_distinct_count_error_message",
+ "file": "high_distinct_count_error_message.json"
+ },
+ {
+ "id": "rare_error_code",
+ "file": "rare_error_code.json"
+ }
+ ],
+ "datafeeds": [
+ {
+ "id": "datafeed-rare_method_for_a_city",
+ "file": "datafeed_rare_method_for_a_city.json",
+ "job_id": "rare_method_for_a_city"
+ },
+ {
+ "id": "datafeed-rare_method_for_a_country",
+ "file": "datafeed_rare_method_for_a_country.json",
+ "job_id": "rare_method_for_a_country"
+ },
+ {
+ "id": "datafeed-rare_method_for_a_username",
+ "file": "datafeed_rare_method_for_a_username.json",
+ "job_id": "rare_method_for_a_username"
+ },
+ {
+ "id": "datafeed-high_distinct_count_error_message",
+ "file": "datafeed_high_distinct_count_error_message.json",
+ "job_id": "high_distinct_count_error_message"
+ },
+ {
+ "id": "datafeed-rare_error_code",
+ "file": "datafeed_rare_error_code.json",
+ "job_id": "rare_error_code"
+ }
+ ]
+ }
\ No newline at end of file
diff --git a/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/datafeed_high_distinct_count_error_message.json b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/datafeed_high_distinct_count_error_message.json
new file mode 100644
index 0000000000000..269aac2ea72a1
--- /dev/null
+++ b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/datafeed_high_distinct_count_error_message.json
@@ -0,0 +1,16 @@
+{
+ "job_id": "JOB_ID",
+ "indices": [
+ "INDEX_PATTERN_NAME"
+ ],
+ "max_empty_searches": 10,
+ "query": {
+ "bool": {
+ "filter": [
+ {"term": {"event.dataset": "aws.cloudtrail"}},
+ {"term": {"event.module": "aws"}},
+ {"exists": {"field": "aws.cloudtrail.error_message"}}
+ ]
+ }
+ }
+}
\ No newline at end of file
diff --git a/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/datafeed_rare_error_code.json b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/datafeed_rare_error_code.json
new file mode 100644
index 0000000000000..4b463a4d10991
--- /dev/null
+++ b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/datafeed_rare_error_code.json
@@ -0,0 +1,16 @@
+{
+ "job_id": "JOB_ID",
+ "indices": [
+ "INDEX_PATTERN_NAME"
+ ],
+ "max_empty_searches": 10,
+ "query": {
+ "bool": {
+ "filter": [
+ {"term": {"event.dataset": "aws.cloudtrail"}},
+ {"term": {"event.module": "aws"}},
+ {"exists": {"field": "aws.cloudtrail.error_code"}}
+ ]
+ }
+ }
+}
\ No newline at end of file
diff --git a/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/datafeed_rare_method_for_a_city.json b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/datafeed_rare_method_for_a_city.json
new file mode 100644
index 0000000000000..e436273a848e7
--- /dev/null
+++ b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/datafeed_rare_method_for_a_city.json
@@ -0,0 +1,16 @@
+{
+ "job_id": "JOB_ID",
+ "indices": [
+ "INDEX_PATTERN_NAME"
+ ],
+ "max_empty_searches": 10,
+ "query": {
+ "bool": {
+ "filter": [
+ {"term": {"event.dataset": "aws.cloudtrail"}},
+ {"term": {"event.module": "aws"}},
+ {"exists": {"field": "source.geo.city_name"}}
+ ]
+ }
+ }
+}
\ No newline at end of file
diff --git a/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/datafeed_rare_method_for_a_country.json b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/datafeed_rare_method_for_a_country.json
new file mode 100644
index 0000000000000..f0e80174b8791
--- /dev/null
+++ b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/datafeed_rare_method_for_a_country.json
@@ -0,0 +1,16 @@
+{
+ "job_id": "JOB_ID",
+ "indices": [
+ "INDEX_PATTERN_NAME"
+ ],
+ "max_empty_searches": 10,
+ "query": {
+ "bool": {
+ "filter": [
+ {"term": {"event.dataset": "aws.cloudtrail"}},
+ {"term": {"event.module": "aws"}},
+ {"exists": {"field": "source.geo.country_iso_code"}}
+ ]
+ }
+ }
+}
\ No newline at end of file
diff --git a/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/datafeed_rare_method_for_a_username.json b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/datafeed_rare_method_for_a_username.json
new file mode 100644
index 0000000000000..2fd3622ff81ce
--- /dev/null
+++ b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/datafeed_rare_method_for_a_username.json
@@ -0,0 +1,16 @@
+{
+ "job_id": "JOB_ID",
+ "indices": [
+ "INDEX_PATTERN_NAME"
+ ],
+ "max_empty_searches": 10,
+ "query": {
+ "bool": {
+ "filter": [
+ {"term": {"event.dataset": "aws.cloudtrail"}},
+ {"term": {"event.module": "aws"}},
+ {"exists": {"field": "user.name"}}
+ ]
+ }
+ }
+}
\ No newline at end of file
diff --git a/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/high_distinct_count_error_message.json b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/high_distinct_count_error_message.json
new file mode 100644
index 0000000000000..fdabf66ac91b3
--- /dev/null
+++ b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/high_distinct_count_error_message.json
@@ -0,0 +1,33 @@
+{
+ "job_type": "anomaly_detector",
+ "description": "Looks for a spike in the rate of an error message which may simply indicate an impending service failure but these can also be byproducts of attempted or successful persistence, privilege escalation, defense evasion, discovery, lateral movement, or collection activity by a threat actor.",
+ "groups": [
+ "siem",
+ "cloudtrail"
+ ],
+ "analysis_config": {
+ "bucket_span": "15m",
+ "detectors": [
+ {
+ "detector_description": "high_distinct_count(\"aws.cloudtrail.error_message\")",
+ "function": "high_distinct_count",
+ "field_name": "aws.cloudtrail.error_message"
+ }
+ ],
+ "influencers": [
+ "aws.cloudtrail.user_identity.arn",
+ "source.ip",
+ "source.geo.city_name"
+ ]
+ },
+ "allow_lazy_open": true,
+ "analysis_limits": {
+ "model_memory_limit": "16mb"
+ },
+ "data_description": {
+ "time_field": "@timestamp"
+ },
+ "custom_settings": {
+ "created_by": "ml-module-siem-cloudtrail"
+ }
+ }
\ No newline at end of file
diff --git a/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/rare_error_code.json b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/rare_error_code.json
new file mode 100644
index 0000000000000..0f8fa814ac60a
--- /dev/null
+++ b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/rare_error_code.json
@@ -0,0 +1,33 @@
+{
+ "job_type": "anomaly_detector",
+ "description": "Looks for unsual errors. Rare and unusual errors may simply indicate an impending service failure but they can also be byproducts of attempted or successful persistence, privilege escalation, defense evasion, discovery, lateral movement, or collection activity by a threat actor.",
+ "groups": [
+ "siem",
+ "cloudtrail"
+ ],
+ "analysis_config": {
+ "bucket_span": "60m",
+ "detectors": [
+ {
+ "detector_description": "rare by \"aws.cloudtrail.error_code\"",
+ "function": "rare",
+ "by_field_name": "aws.cloudtrail.error_code"
+ }
+ ],
+ "influencers": [
+ "aws.cloudtrail.user_identity.arn",
+ "source.ip",
+ "source.geo.city_name"
+ ]
+ },
+ "allow_lazy_open": true,
+ "analysis_limits": {
+ "model_memory_limit": "16mb"
+ },
+ "data_description": {
+ "time_field": "@timestamp"
+ },
+ "custom_settings": {
+ "created_by": "ml-module-siem-cloudtrail"
+ }
+ }
\ No newline at end of file
diff --git a/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/rare_method_for_a_city.json b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/rare_method_for_a_city.json
new file mode 100644
index 0000000000000..eff4d4cdbb889
--- /dev/null
+++ b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/rare_method_for_a_city.json
@@ -0,0 +1,34 @@
+{
+ "job_type": "anomaly_detector",
+ "description": "Looks for AWS API calls that, while not inherently suspicious or abnormal, are sourcing from a geolocation (city) that is unusual. This can be the result of compromised credentials or keys.",
+ "groups": [
+ "siem",
+ "cloudtrail"
+ ],
+ "analysis_config": {
+ "bucket_span": "60m",
+ "detectors": [
+ {
+ "detector_description": "rare by \"event.action\" partition by \"source.geo.city_name\"",
+ "function": "rare",
+ "by_field_name": "event.action",
+ "partition_field_name": "source.geo.city_name"
+ }
+ ],
+ "influencers": [
+ "aws.cloudtrail.user_identity.arn",
+ "source.ip",
+ "source.geo.city_name"
+ ]
+ },
+ "allow_lazy_open": true,
+ "analysis_limits": {
+ "model_memory_limit": "64mb"
+ },
+ "data_description": {
+ "time_field": "@timestamp"
+ },
+ "custom_settings": {
+ "created_by": "ml-module-siem-cloudtrail"
+ }
+ }
\ No newline at end of file
diff --git a/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/rare_method_for_a_country.json b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/rare_method_for_a_country.json
new file mode 100644
index 0000000000000..810822c30a5dd
--- /dev/null
+++ b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/rare_method_for_a_country.json
@@ -0,0 +1,34 @@
+{
+ "job_type": "anomaly_detector",
+ "description": "Looks for AWS API calls that, while not inherently suspicious or abnormal, are sourcing from a geolocation (country) that is unusual. This can be the result of compromised credentials or keys.",
+ "groups": [
+ "siem",
+ "cloudtrail"
+ ],
+ "analysis_config": {
+ "bucket_span": "60m",
+ "detectors": [
+ {
+ "detector_description": "rare by \"event.action\" partition by \"source.geo.country_iso_code\"",
+ "function": "rare",
+ "by_field_name": "event.action",
+ "partition_field_name": "source.geo.country_iso_code"
+ }
+ ],
+ "influencers": [
+ "aws.cloudtrail.user_identity.arn",
+ "source.ip",
+ "source.geo.country_iso_code"
+ ]
+ },
+ "allow_lazy_open": true,
+ "analysis_limits": {
+ "model_memory_limit": "64mb"
+ },
+ "data_description": {
+ "time_field": "@timestamp"
+ },
+ "custom_settings": {
+ "created_by": "ml-module-siem-cloudtrail"
+ }
+ }
\ No newline at end of file
diff --git a/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/rare_method_for_a_username.json b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/rare_method_for_a_username.json
new file mode 100644
index 0000000000000..2edf52e8351ed
--- /dev/null
+++ b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/rare_method_for_a_username.json
@@ -0,0 +1,34 @@
+{
+ "job_type": "anomaly_detector",
+ "description": "Looks for AWS API calls that, while not inherently suspicious or abnormal, are sourcing from a user context that does not normally call the method. This can be the result of compromised credentials or keys as someone uses a valid account to persist, move laterally, or exfil data.",
+ "groups": [
+ "siem",
+ "cloudtrail"
+ ],
+ "analysis_config": {
+ "bucket_span": "60m",
+ "detectors": [
+ {
+ "detector_description": "rare by \"event.action\" partition by \"user.name\"",
+ "function": "rare",
+ "by_field_name": "event.action",
+ "partition_field_name": "user.name"
+ }
+ ],
+ "influencers": [
+ "user.name",
+ "source.ip",
+ "source.geo.city_name"
+ ]
+ },
+ "allow_lazy_open": true,
+ "analysis_limits": {
+ "model_memory_limit": "128mb"
+ },
+ "data_description": {
+ "time_field": "@timestamp"
+ },
+ "custom_settings": {
+ "created_by": "ml-module-siem-cloudtrail"
+ }
+ }
\ No newline at end of file
diff --git a/x-pack/plugins/security_solution/common/constants.ts b/x-pack/plugins/security_solution/common/constants.ts
index 7cd5692176ee3..4e9514feec74f 100644
--- a/x-pack/plugins/security_solution/common/constants.ts
+++ b/x-pack/plugins/security_solution/common/constants.ts
@@ -59,9 +59,9 @@ export const DEFAULT_INDEX_PATTERN = [
'auditbeat-*',
'endgame-*',
'filebeat-*',
+ 'logs-*',
'packetbeat-*',
'winlogbeat-*',
- 'logs-*',
];
/** This Kibana Advanced Setting enables the `Security news` feed widget */
diff --git a/x-pack/plugins/security_solution/common/endpoint/models/event.ts b/x-pack/plugins/security_solution/common/endpoint/models/event.ts
index 86cccff957211..9b4550f52ff22 100644
--- a/x-pack/plugins/security_solution/common/endpoint/models/event.ts
+++ b/x-pack/plugins/security_solution/common/endpoint/models/event.ts
@@ -82,7 +82,6 @@ export function getAncestryAsArray(event: ResolverEvent | undefined): string[] {
* @param event The event to get the category for
*/
export function primaryEventCategory(event: ResolverEvent): string | undefined {
- // Returning "Process" as a catch-all here because it seems pretty general
if (isLegacyEvent(event)) {
const legacyFullType = event.endgame.event_type_full;
if (legacyFullType) {
@@ -96,6 +95,20 @@ export function primaryEventCategory(event: ResolverEvent): string | undefined {
}
}
+/**
+ * @param event The event to get the full ECS category for
+ */
+export function allEventCategories(event: ResolverEvent): string | string[] | undefined {
+ if (isLegacyEvent(event)) {
+ const legacyFullType = event.endgame.event_type_full;
+ if (legacyFullType) {
+ return legacyFullType;
+ }
+ } else {
+ return event.event.category;
+ }
+}
+
/**
* ECS event type will be things like 'creation', 'deletion', 'access', etc.
* see: https://www.elastic.co/guide/en/ecs/current/ecs-event.html
diff --git a/x-pack/plugins/security_solution/cypress/integration/alerts_detection_rules_custom.spec.ts b/x-pack/plugins/security_solution/cypress/integration/alerts_detection_rules_custom.spec.ts
index 81832b3d9edea..a51ad4388c428 100644
--- a/x-pack/plugins/security_solution/cypress/integration/alerts_detection_rules_custom.spec.ts
+++ b/x-pack/plugins/security_solution/cypress/integration/alerts_detection_rules_custom.spec.ts
@@ -131,6 +131,7 @@ describe.skip('Detection rules, custom', () => {
'auditbeat-*',
'endgame-*',
'filebeat-*',
+ 'logs-*',
'packetbeat-*',
'winlogbeat-*',
];
diff --git a/x-pack/plugins/security_solution/cypress/integration/timeline_toggle_column.spec.ts b/x-pack/plugins/security_solution/cypress/integration/timeline_toggle_column.spec.ts
index 12e6f3db9b61e..759eec69bc022 100644
--- a/x-pack/plugins/security_solution/cypress/integration/timeline_toggle_column.spec.ts
+++ b/x-pack/plugins/security_solution/cypress/integration/timeline_toggle_column.spec.ts
@@ -24,7 +24,8 @@ import {
import { HOSTS_URL } from '../urls/navigation';
-describe('toggle column in timeline', () => {
+// Flaky: https://github.com/elastic/kibana/issues/71361
+describe.skip('toggle column in timeline', () => {
before(() => {
loginAndWaitForPage(HOSTS_URL);
});
diff --git a/x-pack/plugins/security_solution/public/common/components/drag_and_drop/__snapshots__/drag_drop_context_wrapper.test.tsx.snap b/x-pack/plugins/security_solution/public/common/components/drag_and_drop/__snapshots__/drag_drop_context_wrapper.test.tsx.snap
index 0c96d0320d198..16f095e5effbb 100644
--- a/x-pack/plugins/security_solution/public/common/components/drag_and_drop/__snapshots__/drag_drop_context_wrapper.test.tsx.snap
+++ b/x-pack/plugins/security_solution/public/common/components/drag_and_drop/__snapshots__/drag_drop_context_wrapper.test.tsx.snap
@@ -369,9 +369,9 @@ exports[`DragDropContextWrapper rendering it renders against the snapshot 1`] =
"auditbeat-*",
"endgame-*",
"filebeat-*",
+ "logs-*",
"packetbeat-*",
"winlogbeat-*",
- "logs-*",
],
"name": "event.end",
"searchable": true,
diff --git a/x-pack/plugins/security_solution/public/common/components/event_details/__snapshots__/event_details.test.tsx.snap b/x-pack/plugins/security_solution/public/common/components/event_details/__snapshots__/event_details.test.tsx.snap
index 408a4c74e930f..9ca9cd6cce389 100644
--- a/x-pack/plugins/security_solution/public/common/components/event_details/__snapshots__/event_details.test.tsx.snap
+++ b/x-pack/plugins/security_solution/public/common/components/event_details/__snapshots__/event_details.test.tsx.snap
@@ -377,9 +377,9 @@ exports[`EventDetails rendering should match snapshot 1`] = `
"auditbeat-*",
"endgame-*",
"filebeat-*",
+ "logs-*",
"packetbeat-*",
"winlogbeat-*",
- "logs-*",
],
"name": "event.end",
"searchable": true,
@@ -1070,9 +1070,9 @@ In other use cases the message field can be used to concatenate different values
"auditbeat-*",
"endgame-*",
"filebeat-*",
+ "logs-*",
"packetbeat-*",
"winlogbeat-*",
- "logs-*",
],
"name": "event.end",
"searchable": true,
diff --git a/x-pack/plugins/security_solution/public/common/containers/source/index.test.tsx b/x-pack/plugins/security_solution/public/common/containers/source/index.test.tsx
index b9daba9a40941..bfde17723aef4 100644
--- a/x-pack/plugins/security_solution/public/common/containers/source/index.test.tsx
+++ b/x-pack/plugins/security_solution/public/common/containers/source/index.test.tsx
@@ -29,7 +29,7 @@ describe('Index Fields & Browser Fields', () => {
indexPattern: {
fields: [],
title:
- 'apm-*-transaction*,auditbeat-*,endgame-*,filebeat-*,packetbeat-*,winlogbeat-*,logs-*',
+ 'apm-*-transaction*,auditbeat-*,endgame-*,filebeat-*,logs-*,packetbeat-*,winlogbeat-*',
},
indicesExist: true,
loading: true,
@@ -59,7 +59,7 @@ describe('Index Fields & Browser Fields', () => {
indexPattern: {
fields: mockIndexFields,
title:
- 'apm-*-transaction*,auditbeat-*,endgame-*,filebeat-*,packetbeat-*,winlogbeat-*,logs-*',
+ 'apm-*-transaction*,auditbeat-*,endgame-*,filebeat-*,logs-*,packetbeat-*,winlogbeat-*',
},
loading: false,
errorMessage: null,
diff --git a/x-pack/plugins/security_solution/public/detections/components/rules/step_schedule_rule/index.tsx b/x-pack/plugins/security_solution/public/detections/components/rules/step_schedule_rule/index.tsx
index 60855bc5fa25f..fa0f4dbd3668c 100644
--- a/x-pack/plugins/security_solution/public/detections/components/rules/step_schedule_rule/index.tsx
+++ b/x-pack/plugins/security_solution/public/detections/components/rules/step_schedule_rule/index.tsx
@@ -6,7 +6,6 @@
import React, { FC, memo, useCallback, useEffect, useState } from 'react';
import deepEqual from 'fast-deep-equal';
-import styled from 'styled-components';
import { setFieldValue } from '../../../pages/detection_engine/rules/helpers';
import {
@@ -25,10 +24,6 @@ interface StepScheduleRuleProps extends RuleStepProps {
defaultValues?: ScheduleStepRule | null;
}
-const RestrictedWidthContainer = styled.div`
- max-width: 300px;
-`;
-
const stepScheduleDefaultValue = {
interval: '5m',
isNew: true,
@@ -93,29 +88,25 @@ const StepScheduleRuleComponent: FC = ({
<>
diff --git a/x-pack/plugins/security_solution/public/detections/containers/detection_engine/rules/fetch_index_patterns.test.tsx b/x-pack/plugins/security_solution/public/detections/containers/detection_engine/rules/fetch_index_patterns.test.tsx
index c282a204f19a5..0204a2980b9fc 100644
--- a/x-pack/plugins/security_solution/public/detections/containers/detection_engine/rules/fetch_index_patterns.test.tsx
+++ b/x-pack/plugins/security_solution/public/detections/containers/detection_engine/rules/fetch_index_patterns.test.tsx
@@ -352,9 +352,9 @@ describe('useFetchIndexPatterns', () => {
'auditbeat-*',
'endgame-*',
'filebeat-*',
+ 'logs-*',
'packetbeat-*',
'winlogbeat-*',
- 'logs-*',
],
name: 'event.end',
searchable: true,
@@ -369,9 +369,9 @@ describe('useFetchIndexPatterns', () => {
'auditbeat-*',
'endgame-*',
'filebeat-*',
+ 'logs-*',
'packetbeat-*',
'winlogbeat-*',
- 'logs-*',
],
indicesExists: true,
indexPatterns: {
@@ -418,7 +418,7 @@ describe('useFetchIndexPatterns', () => {
{ name: 'event.end', searchable: true, type: 'date', aggregatable: true },
],
title:
- 'apm-*-transaction*,auditbeat-*,endgame-*,filebeat-*,packetbeat-*,winlogbeat-*,logs-*',
+ 'apm-*-transaction*,auditbeat-*,endgame-*,filebeat-*,logs-*,packetbeat-*,winlogbeat-*',
},
},
result.current[1],
@@ -450,9 +450,9 @@ describe('useFetchIndexPatterns', () => {
'auditbeat-*',
'endgame-*',
'filebeat-*',
+ 'logs-*',
'packetbeat-*',
'winlogbeat-*',
- 'logs-*',
],
indicesExists: false,
isLoading: false,
diff --git a/x-pack/plugins/security_solution/public/management/pages/endpoint_hosts/view/details/policy_response_friendly_names.ts b/x-pack/plugins/security_solution/public/management/pages/endpoint_hosts/view/details/policy_response_friendly_names.ts
index 28e91331b428d..020e8c9e38ad5 100644
--- a/x-pack/plugins/security_solution/public/management/pages/endpoint_hosts/view/details/policy_response_friendly_names.ts
+++ b/x-pack/plugins/security_solution/public/management/pages/endpoint_hosts/view/details/policy_response_friendly_names.ts
@@ -6,7 +6,209 @@
import { i18n } from '@kbn/i18n';
-const responseMap = new Map();
+const policyResponses: Array<[string, string]> = [
+ [
+ 'configure_dns_events',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.configure_dns_events',
+ { defaultMessage: 'Configure DNS Events' }
+ ),
+ ],
+ [
+ 'configure_elasticsearch_connection',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.configure_elasticsearch_connection',
+ { defaultMessage: 'Configure Elastic Search Connection' }
+ ),
+ ],
+ [
+ 'configure_file_events',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.configure_file_events',
+ { defaultMessage: 'Configure File Events' }
+ ),
+ ],
+ [
+ 'configure_imageload_events',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.configure_imageload_events',
+ { defaultMessage: 'Configure Image Load Events' }
+ ),
+ ],
+ [
+ 'configure_kernel',
+ i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.configure_kernel', {
+ defaultMessage: 'Configure Kernel',
+ }),
+ ],
+ [
+ 'configure_logging',
+ i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.configure_logging', {
+ defaultMessage: 'Configure Logging',
+ }),
+ ],
+ [
+ 'configure_malware',
+ i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.configure_malware', {
+ defaultMessage: 'Configure Malware',
+ }),
+ ],
+ [
+ 'configure_network_events',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.configure_network_events',
+ { defaultMessage: 'Configure Network Events' }
+ ),
+ ],
+ [
+ 'configure_process_events',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.configure_process_events',
+ { defaultMessage: 'Configure Process Events' }
+ ),
+ ],
+ [
+ 'configure_registry_events',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.configure_registry_events',
+ { defaultMessage: 'Configure Registry Events' }
+ ),
+ ],
+ [
+ 'configure_security_events',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.configure_security_events',
+ { defaultMessage: 'Configure Security Events' }
+ ),
+ ],
+ [
+ 'connect_kernel',
+ i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.connect_kernel', {
+ defaultMessage: 'Connect Kernel',
+ }),
+ ],
+ [
+ 'detect_async_image_load_events',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.detect_async_image_load_events',
+ { defaultMessage: 'Detect Async Image Load Events' }
+ ),
+ ],
+ [
+ 'detect_file_open_events',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.detect_file_open_events',
+ { defaultMessage: 'Detect File Open Events' }
+ ),
+ ],
+ [
+ 'detect_file_write_events',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.detect_file_write_events',
+ { defaultMessage: 'Detect File Write Events' }
+ ),
+ ],
+ [
+ 'detect_network_events',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.detect_network_events',
+ { defaultMessage: 'Detect Network Events' }
+ ),
+ ],
+ [
+ 'detect_process_events',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.detect_process_events',
+ { defaultMessage: 'Detect Process Events' }
+ ),
+ ],
+ [
+ 'detect_registry_events',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.detect_registry_events',
+ { defaultMessage: 'Detect Registry Events' }
+ ),
+ ],
+ [
+ 'detect_sync_image_load_events',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.detect_sync_image_load_events',
+ { defaultMessage: 'Detect Sync Image Load Events' }
+ ),
+ ],
+ [
+ 'download_global_artifacts',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.download_global_artifacts',
+ { defaultMessage: 'Download Global Artifacts' }
+ ),
+ ],
+ [
+ 'download_user_artifacts',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.download_user_artifacts',
+ { defaultMessage: 'Download User Artifacts' }
+ ),
+ ],
+ [
+ 'load_config',
+ i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.load_config', {
+ defaultMessage: 'Load Config',
+ }),
+ ],
+ [
+ 'load_malware_model',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.load_malware_model',
+ { defaultMessage: 'Load Malware Model' }
+ ),
+ ],
+ [
+ 'read_elasticsearch_config',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.read_elasticsearch_config',
+ { defaultMessage: 'Read ElasticSearch Config' }
+ ),
+ ],
+ [
+ 'read_events_config',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.read_events_config',
+ { defaultMessage: 'Read Events Config' }
+ ),
+ ],
+ [
+ 'read_kernel_config',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.read_kernel_config',
+ { defaultMessage: 'Read Kernel Config' }
+ ),
+ ],
+ [
+ 'read_logging_config',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.read_logging_config',
+ { defaultMessage: 'Read Logging Config' }
+ ),
+ ],
+ [
+ 'read_malware_config',
+ i18n.translate(
+ 'xpack.securitySolution.endpoint.hostDetails.policyResponse.read_malware_config',
+ { defaultMessage: 'Read Malware Config' }
+ ),
+ ],
+ [
+ 'workflow',
+ i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.workflow', {
+ defaultMessage: 'Workflow',
+ }),
+ ],
+];
+
+const responseMap = new Map(policyResponses);
+
+// Additional values used in the Policy Response UI
responseMap.set(
'success',
i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.success', {
@@ -49,144 +251,6 @@ responseMap.set(
defaultMessage: 'Events',
})
);
-responseMap.set(
- 'configure_elasticsearch_connection',
- i18n.translate(
- 'xpack.securitySolution.endpoint.hostDetails.policyResponse.configureElasticSearchConnection',
- {
- defaultMessage: 'Configure Elastic Search Connection',
- }
- )
-);
-responseMap.set(
- 'configure_logging',
- i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.configureLogging', {
- defaultMessage: 'Configure Logging',
- })
-);
-responseMap.set(
- 'configure_kernel',
- i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.configureKernel', {
- defaultMessage: 'Configure Kernel',
- })
-);
-responseMap.set(
- 'configure_malware',
- i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.configureMalware', {
- defaultMessage: 'Configure Malware',
- })
-);
-responseMap.set(
- 'connect_kernel',
- i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.connectKernel', {
- defaultMessage: 'Connect Kernel',
- })
-);
-responseMap.set(
- 'detect_file_open_events',
- i18n.translate(
- 'xpack.securitySolution.endpoint.hostDetails.policyResponse.detectFileOpenEvents',
- {
- defaultMessage: 'Detect File Open Events',
- }
- )
-);
-responseMap.set(
- 'detect_file_write_events',
- i18n.translate(
- 'xpack.securitySolution.endpoint.hostDetails.policyResponse.detectFileWriteEvents',
- {
- defaultMessage: 'Detect File Write Events',
- }
- )
-);
-responseMap.set(
- 'detect_image_load_events',
- i18n.translate(
- 'xpack.securitySolution.endpoint.hostDetails.policyResponse.detectImageLoadEvents',
- {
- defaultMessage: 'Detect Image Load Events',
- }
- )
-);
-responseMap.set(
- 'detect_process_events',
- i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.detectProcessEvents', {
- defaultMessage: 'Detect Process Events',
- })
-);
-responseMap.set(
- 'download_global_artifacts',
- i18n.translate(
- 'xpack.securitySolution.endpoint.hostDetails.policyResponse.downloadGlobalArtifacts',
- {
- defaultMessage: 'Download Global Artifacts',
- }
- )
-);
-responseMap.set(
- 'load_config',
- i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.loadConfig', {
- defaultMessage: 'Load Config',
- })
-);
-responseMap.set(
- 'load_malware_model',
- i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.loadMalwareModel', {
- defaultMessage: 'Load Malware Model',
- })
-);
-responseMap.set(
- 'read_elasticsearch_config',
- i18n.translate(
- 'xpack.securitySolution.endpoint.hostDetails.policyResponse.readElasticSearchConfig',
- {
- defaultMessage: 'Read ElasticSearch Config',
- }
- )
-);
-responseMap.set(
- 'read_events_config',
- i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.readEventsConfig', {
- defaultMessage: 'Read Events Config',
- })
-);
-responseMap.set(
- 'read_kernel_config',
- i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.readKernelConfig', {
- defaultMessage: 'Read Kernel Config',
- })
-);
-responseMap.set(
- 'read_logging_config',
- i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.readLoggingConfig', {
- defaultMessage: 'Read Logging Config',
- })
-);
-responseMap.set(
- 'read_malware_config',
- i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.readMalwareConfig', {
- defaultMessage: 'Read Malware Config',
- })
-);
-responseMap.set(
- 'workflow',
- i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.workflow', {
- defaultMessage: 'Workflow',
- })
-);
-responseMap.set(
- 'download_model',
- i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.downloadModel', {
- defaultMessage: 'Download Model',
- })
-);
-responseMap.set(
- 'ingest_events_config',
- i18n.translate('xpack.securitySolution.endpoint.hostDetails.policyResponse.injestEventsConfig', {
- defaultMessage: 'Injest Events Config',
- })
-);
/**
* Maps a server provided value to corresponding i18n'd string.
@@ -195,5 +259,13 @@ export function formatResponse(responseString: string) {
if (responseMap.has(responseString)) {
return responseMap.get(responseString);
}
- return responseString;
+
+ // Its possible for the UI to receive an Action name that it does not yet have a translation,
+ // thus we generate a label for it here by making it more user fiendly
+ responseMap.set(
+ responseString,
+ responseString.replace(/_/g, ' ').replace(/\b(\w)/g, (m) => m.toUpperCase())
+ );
+
+ return responseMap.get(responseString);
}
diff --git a/x-pack/plugins/security_solution/public/management/pages/endpoint_hosts/view/index.test.tsx b/x-pack/plugins/security_solution/public/management/pages/endpoint_hosts/view/index.test.tsx
index 996b987ea2be3..a61088e2edd29 100644
--- a/x-pack/plugins/security_solution/public/management/pages/endpoint_hosts/view/index.test.tsx
+++ b/x-pack/plugins/security_solution/public/management/pages/endpoint_hosts/view/index.test.tsx
@@ -13,8 +13,9 @@ import { mockPolicyResultList } from '../../policy/store/policy_list/mock_policy
import { AppContextTestRender, createAppRootMockRenderer } from '../../../../common/mock/endpoint';
import {
HostInfo,
- HostStatus,
HostPolicyResponseActionStatus,
+ HostPolicyResponseAppliedAction,
+ HostStatus,
} from '../../../../../common/endpoint/types';
import { EndpointDocGenerator } from '../../../../../common/endpoint/generate_data';
import { AppAction } from '../../../../common/store/actions';
@@ -251,6 +252,16 @@ describe('when on the hosts page', () => {
) {
malwareResponseConfigurations.concerned_actions.push(downloadModelAction.name);
}
+
+ // Add an unknown Action Name - to ensure we handle the format of it on the UI
+ const unknownAction: HostPolicyResponseAppliedAction = {
+ status: HostPolicyResponseActionStatus.success,
+ message: 'test message',
+ name: 'a_new_unknown_action',
+ };
+ policyResponse.Endpoint.policy.applied.actions.push(unknownAction);
+ malwareResponseConfigurations.concerned_actions.push(unknownAction.name);
+
reactTestingLibrary.act(() => {
store.dispatch({
type: 'serverReturnedHostPolicyResponse',
@@ -564,6 +575,10 @@ describe('when on the hosts page', () => {
'?page_index=0&page_size=10&selected_host=1'
);
});
+
+ it('should format unknown policy action names', async () => {
+ expect(renderResult.getByText('A New Unknown Action')).not.toBeNull();
+ });
});
});
});
diff --git a/x-pack/plugins/security_solution/public/management/pages/policy/store/policy_details/index.test.ts b/x-pack/plugins/security_solution/public/management/pages/policy/store/policy_details/index.test.ts
index 102fd40c97672..d3ec0670d29c5 100644
--- a/x-pack/plugins/security_solution/public/management/pages/policy/store/policy_details/index.test.ts
+++ b/x-pack/plugins/security_solution/public/management/pages/policy/store/policy_details/index.test.ts
@@ -5,130 +5,270 @@
*/
import { PolicyDetailsState } from '../../types';
-import { createStore, Dispatch, Store } from 'redux';
-import { policyDetailsReducer, PolicyDetailsAction } from './index';
+import { applyMiddleware, createStore, Dispatch, Store } from 'redux';
+import { policyDetailsReducer, PolicyDetailsAction, policyDetailsMiddlewareFactory } from './index';
import { policyConfig } from './selectors';
import { clone } from '../../models/policy_details_config';
import { factory as policyConfigFactory } from '../../../../../../common/endpoint/models/policy_config';
+import { PolicyData } from '../../../../../../common/endpoint/types';
+import {
+ createSpyMiddleware,
+ MiddlewareActionSpyHelper,
+} from '../../../../../common/store/test_utils';
+import {
+ AppContextTestRender,
+ createAppRootMockRenderer,
+} from '../../../../../common/mock/endpoint';
+import { HttpFetchOptions } from 'kibana/public';
describe('policy details: ', () => {
- let store: Store;
+ let store: Store;
let getState: typeof store['getState'];
let dispatch: Dispatch;
+ let policyItem: PolicyData;
- beforeEach(() => {
- store = createStore(policyDetailsReducer);
- getState = store.getState;
- dispatch = store.dispatch;
-
- dispatch({
- type: 'serverReturnedPolicyDetailsData',
- payload: {
- policyItem: {
- id: '',
- name: '',
- description: '',
- created_at: '',
- created_by: '',
- updated_at: '',
- updated_by: '',
- config_id: '',
+ const generateNewPolicyItemMock = (): PolicyData => {
+ return {
+ id: '',
+ name: '',
+ description: '',
+ created_at: '',
+ created_by: '',
+ updated_at: '',
+ updated_by: '',
+ config_id: '',
+ enabled: true,
+ output_id: '',
+ inputs: [
+ {
+ type: 'endpoint',
enabled: true,
- output_id: '',
- inputs: [
- {
- type: 'endpoint',
- enabled: true,
- streams: [],
- config: {
- artifact_manifest: {
- value: {
- manifest_version: 'WzAsMF0=',
- schema_version: 'v1',
- artifacts: {},
- },
- },
- policy: {
- value: policyConfigFactory(),
- },
+ streams: [],
+ config: {
+ artifact_manifest: {
+ value: {
+ manifest_version: 'WzAsMF0=',
+ schema_version: 'v1',
+ artifacts: {},
},
},
- ],
- namespace: '',
- package: {
- name: '',
- title: '',
- version: '',
+ policy: {
+ value: policyConfigFactory(),
+ },
},
- revision: 1,
},
+ ],
+ namespace: '',
+ package: {
+ name: '',
+ title: '',
+ version: '',
},
- });
+ revision: 1,
+ };
+ };
+
+ beforeEach(() => {
+ policyItem = generateNewPolicyItemMock();
});
- describe('when the user has enabled windows process events', () => {
+ describe('When interacting with policy form', () => {
beforeEach(() => {
- const config = policyConfig(getState());
- if (!config) {
- throw new Error();
- }
-
- const newPayload1 = clone(config);
- newPayload1.windows.events.process = true;
+ store = createStore(policyDetailsReducer);
+ getState = store.getState;
+ dispatch = store.dispatch;
dispatch({
- type: 'userChangedPolicyConfig',
- payload: { policyConfig: newPayload1 },
+ type: 'serverReturnedPolicyDetailsData',
+ payload: {
+ policyItem,
+ },
});
});
- it('windows process events is enabled', () => {
- const config = policyConfig(getState());
- expect(config!.windows.events.process).toEqual(true);
+ describe('when the user has enabled windows process events', () => {
+ beforeEach(() => {
+ const config = policyConfig(getState());
+ if (!config) {
+ throw new Error();
+ }
+
+ const newPayload1 = clone(config);
+ newPayload1.windows.events.process = true;
+
+ dispatch({
+ type: 'userChangedPolicyConfig',
+ payload: { policyConfig: newPayload1 },
+ });
+ });
+
+ it('windows process events is enabled', () => {
+ const config = policyConfig(getState());
+ expect(config!.windows.events.process).toEqual(true);
+ });
});
- });
- describe('when the user has enabled mac file events', () => {
- beforeEach(() => {
- const config = policyConfig(getState());
- if (!config) {
- throw new Error();
- }
+ describe('when the user has enabled mac file events', () => {
+ beforeEach(() => {
+ const config = policyConfig(getState());
+ if (!config) {
+ throw new Error();
+ }
- const newPayload1 = clone(config);
- newPayload1.mac.events.file = true;
+ const newPayload1 = clone(config);
+ newPayload1.mac.events.file = true;
- dispatch({
- type: 'userChangedPolicyConfig',
- payload: { policyConfig: newPayload1 },
+ dispatch({
+ type: 'userChangedPolicyConfig',
+ payload: { policyConfig: newPayload1 },
+ });
+ });
+
+ it('mac file events is enabled', () => {
+ const config = policyConfig(getState());
+ expect(config!.mac.events.file).toEqual(true);
});
});
- it('mac file events is enabled', () => {
- const config = policyConfig(getState());
- expect(config!.mac.events.file).toEqual(true);
+ describe('when the user has enabled linux process events', () => {
+ beforeEach(() => {
+ const config = policyConfig(getState());
+ if (!config) {
+ throw new Error();
+ }
+
+ const newPayload1 = clone(config);
+ newPayload1.linux.events.file = true;
+
+ dispatch({
+ type: 'userChangedPolicyConfig',
+ payload: { policyConfig: newPayload1 },
+ });
+ });
+
+ it('linux file events is enabled', () => {
+ const config = policyConfig(getState());
+ expect(config!.linux.events.file).toEqual(true);
+ });
});
});
- describe('when the user has enabled linux process events', () => {
+ describe('when saving policy data', () => {
+ let waitForAction: MiddlewareActionSpyHelper['waitForAction'];
+ let http: AppContextTestRender['coreStart']['http'];
+
beforeEach(() => {
- const config = policyConfig(getState());
- if (!config) {
- throw new Error();
- }
+ let actionSpyMiddleware: MiddlewareActionSpyHelper['actionSpyMiddleware'];
+ const { coreStart, depsStart } = createAppRootMockRenderer();
+ ({ actionSpyMiddleware, waitForAction } = createSpyMiddleware());
+ http = coreStart.http;
- const newPayload1 = clone(config);
- newPayload1.linux.events.file = true;
+ store = createStore(
+ policyDetailsReducer,
+ undefined,
+ applyMiddleware(policyDetailsMiddlewareFactory(coreStart, depsStart), actionSpyMiddleware)
+ );
+ getState = store.getState;
+ dispatch = store.dispatch;
dispatch({
- type: 'userChangedPolicyConfig',
- payload: { policyConfig: newPayload1 },
+ type: 'serverReturnedPolicyDetailsData',
+ payload: {
+ policyItem,
+ },
+ });
+ });
+
+ it('should handle HTTP 409 (version missmatch) and still save the policy', async () => {
+ policyItem.inputs[0].config.policy.value.windows.events.dns = false;
+
+ const http409Error: Error & { response?: { status: number } } = new Error('conflict');
+ http409Error.response = { status: 409 };
+
+ // The most current Policy Item. Differences to `artifact_manifest` should be preserved,
+ // while the policy data should be overwritten on next `put`.
+ const mostCurrentPolicyItem = generateNewPolicyItemMock();
+ mostCurrentPolicyItem.inputs[0].config.artifact_manifest.value.manifest_version = 'updated';
+ mostCurrentPolicyItem.inputs[0].config.policy.value.windows.events.dns = true;
+
+ http.put.mockRejectedValueOnce(http409Error);
+ http.get.mockResolvedValueOnce({
+ item: mostCurrentPolicyItem,
+ success: true,
+ });
+ http.put.mockResolvedValueOnce({
+ item: policyItem,
+ success: true,
+ });
+
+ dispatch({ type: 'userClickedPolicyDetailsSaveButton' });
+ await waitForAction('serverReturnedUpdatedPolicyDetailsData');
+
+ expect(http.put).toHaveBeenCalledTimes(2);
+
+ const lastPutCallPayload = ((http.put.mock.calls[
+ http.put.mock.calls.length - 1
+ ] as unknown) as [string, HttpFetchOptions])[1];
+
+ expect(JSON.parse(lastPutCallPayload.body as string)).toEqual({
+ name: '',
+ description: '',
+ config_id: '',
+ enabled: true,
+ output_id: '',
+ inputs: [
+ {
+ type: 'endpoint',
+ enabled: true,
+ streams: [],
+ config: {
+ artifact_manifest: {
+ value: { manifest_version: 'updated', schema_version: 'v1', artifacts: {} },
+ },
+ policy: {
+ value: {
+ windows: {
+ events: {
+ dll_and_driver_load: true,
+ dns: false,
+ file: true,
+ network: true,
+ process: true,
+ registry: true,
+ security: true,
+ },
+ malware: { mode: 'prevent' },
+ logging: { file: 'info' },
+ },
+ mac: {
+ events: { process: true, file: true, network: true },
+ malware: { mode: 'prevent' },
+ logging: { file: 'info' },
+ },
+ linux: {
+ events: { process: true, file: true, network: true },
+ logging: { file: 'info' },
+ },
+ },
+ },
+ },
+ },
+ ],
+ namespace: '',
+ package: { name: '', title: '', version: '' },
});
});
- it('linux file events is enabled', () => {
- const config = policyConfig(getState());
- expect(config!.linux.events.file).toEqual(true);
+ it('should not attempt to handle other HTTP errors', async () => {
+ const http400Error: Error & { response?: { status: number } } = new Error('not found');
+
+ http400Error.response = { status: 400 };
+ http.put.mockRejectedValueOnce(http400Error);
+ dispatch({ type: 'userClickedPolicyDetailsSaveButton' });
+
+ const failureAction = await waitForAction('serverReturnedPolicyDetailsUpdateFailure');
+ expect(failureAction.payload?.error).toBeInstanceOf(Error);
+ expect(failureAction.payload?.error?.message).toEqual('not found');
});
});
});
diff --git a/x-pack/plugins/security_solution/public/management/pages/policy/store/policy_details/middleware.ts b/x-pack/plugins/security_solution/public/management/pages/policy/store/policy_details/middleware.ts
index cfa1a478619b7..1d9e3c2198b28 100644
--- a/x-pack/plugins/security_solution/public/management/pages/policy/store/policy_details/middleware.ts
+++ b/x-pack/plugins/security_solution/public/management/pages/policy/store/policy_details/middleware.ts
@@ -4,12 +4,14 @@
* you may not use this file except in compliance with the Elastic License.
*/
+import { IHttpFetchError } from 'kibana/public';
import { PolicyDetailsState, UpdatePolicyResponse } from '../../types';
import {
policyIdFromParams,
isOnPolicyDetailsPage,
policyDetails,
policyDetailsForUpdate,
+ getPolicyDataForUpdate,
} from './selectors';
import {
sendGetPackageConfig,
@@ -66,7 +68,27 @@ export const policyDetailsMiddlewareFactory: ImmutableMiddlewareFactory {
+ if (!error.response || error.response.status !== 409) {
+ return Promise.reject(error);
+ }
+ // Handle 409 error (version conflict) here, by using the latest document
+ // for the package config and adding the updated policy to it, ensuring that
+ // any recent updates to `manifest_artifacts` are retained.
+ return sendGetPackageConfig(http, id).then((packageConfig) => {
+ const latestUpdatedPolicyItem = packageConfig.item;
+ latestUpdatedPolicyItem.inputs[0].config.policy =
+ updatedPolicyItem.inputs[0].config.policy;
+
+ return sendPutPackageConfig(
+ http,
+ id,
+ getPolicyDataForUpdate(latestUpdatedPolicyItem) as NewPolicyData
+ );
+ });
+ }
+ );
} catch (error) {
dispatch({
type: 'serverReturnedPolicyDetailsUpdateFailure',
diff --git a/x-pack/plugins/security_solution/public/management/pages/policy/store/policy_details/selectors.ts b/x-pack/plugins/security_solution/public/management/pages/policy/store/policy_details/selectors.ts
index d2a5c1b7e14a3..cce0adf36bcce 100644
--- a/x-pack/plugins/security_solution/public/management/pages/policy/store/policy_details/selectors.ts
+++ b/x-pack/plugins/security_solution/public/management/pages/policy/store/policy_details/selectors.ts
@@ -11,6 +11,7 @@ import {
Immutable,
NewPolicyData,
PolicyConfig,
+ PolicyData,
UIPolicyConfig,
} from '../../../../../../common/endpoint/types';
import { factory as policyConfigFactory } from '../../../../../../common/endpoint/models/policy_config';
@@ -20,6 +21,18 @@ import { ManagementRoutePolicyDetailsParams } from '../../../../types';
/** Returns the policy details */
export const policyDetails = (state: Immutable) => state.policyItem;
+/**
+ * Given a Policy Data (package config) object, return back a new object with only the field
+ * needed for an Update/Create API action
+ * @param policy
+ */
+export const getPolicyDataForUpdate = (
+ policy: PolicyData | Immutable
+): NewPolicyData | Immutable => {
+ const { id, revision, created_by, created_at, updated_by, updated_at, ...newPolicy } = policy;
+ return newPolicy;
+};
+
/**
* Return only the policy structure accepted for update/create
*/
@@ -27,8 +40,7 @@ export const policyDetailsForUpdate: (
state: Immutable
) => Immutable | undefined = createSelector(policyDetails, (policy) => {
if (policy) {
- const { id, revision, created_by, created_at, updated_by, updated_at, ...newPolicy } = policy;
- return newPolicy;
+ return getPolicyDataForUpdate(policy);
}
});
diff --git a/x-pack/plugins/security_solution/public/overview/components/overview_host/index.test.tsx b/x-pack/plugins/security_solution/public/overview/components/overview_host/index.test.tsx
index bb9fd73d2df8e..d019a480a8045 100644
--- a/x-pack/plugins/security_solution/public/overview/components/overview_host/index.test.tsx
+++ b/x-pack/plugins/security_solution/public/overview/components/overview_host/index.test.tsx
@@ -58,9 +58,9 @@ const mockOpenTimelineQueryResults: MockedProvidedQuery[] = [
'auditbeat-*',
'endgame-*',
'filebeat-*',
+ 'logs-*',
'packetbeat-*',
'winlogbeat-*',
- 'logs-*',
],
inspect: false,
},
diff --git a/x-pack/plugins/security_solution/public/overview/components/overview_network/index.test.tsx b/x-pack/plugins/security_solution/public/overview/components/overview_network/index.test.tsx
index 0f6fce1486ee7..c7f7c4f4af254 100644
--- a/x-pack/plugins/security_solution/public/overview/components/overview_network/index.test.tsx
+++ b/x-pack/plugins/security_solution/public/overview/components/overview_network/index.test.tsx
@@ -73,9 +73,9 @@ const mockOpenTimelineQueryResults: MockedProvidedQuery[] = [
'auditbeat-*',
'endgame-*',
'filebeat-*',
+ 'logs-*',
'packetbeat-*',
'winlogbeat-*',
- 'logs-*',
],
inspect: false,
},
diff --git a/x-pack/plugins/security_solution/public/resolver/models/resolver_tree.ts b/x-pack/plugins/security_solution/public/resolver/models/resolver_tree.ts
index cf32988a856b2..446e371832d38 100644
--- a/x-pack/plugins/security_solution/public/resolver/models/resolver_tree.ts
+++ b/x-pack/plugins/security_solution/public/resolver/models/resolver_tree.ts
@@ -9,6 +9,7 @@ import {
ResolverEvent,
ResolverNodeStats,
ResolverLifecycleNode,
+ ResolverChildNode,
} from '../../../common/endpoint/types';
import { uniquePidForProcess } from './process_event';
@@ -60,11 +61,13 @@ export function relatedEventsStats(tree: ResolverTree): Map {
let store: Store;
+ let dispatchTree: (tree: ResolverTree) => void;
beforeEach(() => {
store = createStore(dataReducer, undefined);
+ dispatchTree = (tree) => {
+ const action: DataAction = {
+ type: 'serverReturnedResolverData',
+ payload: {
+ result: tree,
+ databaseDocumentID: '',
+ },
+ };
+ store.dispatch(action);
+ };
});
describe('when data was received and the ancestry and children edges had cursors', () => {
beforeEach(() => {
- const generator = new EndpointDocGenerator('seed');
+ // Generate a 'tree' using the Resolver generator code. This structure isn't the same as what the API returns.
+ const baseTree = generateBaseTree();
const tree = mockResolverTree({
- events: generator.generateTree({ ancestors: 1, generations: 2, children: 2 }).allEvents,
+ events: baseTree.allEvents,
cursors: {
- childrenNextChild: 'aValidChildursor',
+ childrenNextChild: 'aValidChildCursor',
ancestryNextAncestor: 'aValidAncestorCursor',
},
- });
- if (tree) {
- const action: DataAction = {
- type: 'serverReturnedResolverData',
- payload: {
- result: tree,
- databaseDocumentID: '',
- },
- };
- store.dispatch(action);
- }
+ })!;
+ dispatchTree(tree);
});
it('should indicate there are additional ancestor', () => {
expect(selectors.hasMoreAncestors(store.getState())).toBe(true);
@@ -49,4 +53,251 @@ describe('Resolver Data Middleware', () => {
expect(selectors.hasMoreChildren(store.getState())).toBe(true);
});
});
+
+ describe('when data was received with stats mocked for the first child node', () => {
+ let firstChildNodeInTree: TreeNode;
+ let eventStatsForFirstChildNode: { total: number; byCategory: Record };
+ let categoryToOverCount: string;
+ let tree: ResolverTree;
+
+ /**
+ * Compiling stats to use for checking limit warnings and counts of missing events
+ * e.g. Limit warnings should show when number of related events actually displayed
+ * is lower than the estimated count from stats.
+ */
+
+ beforeEach(() => {
+ ({
+ tree,
+ firstChildNodeInTree,
+ eventStatsForFirstChildNode,
+ categoryToOverCount,
+ } = mockedTree());
+ if (tree) {
+ dispatchTree(tree);
+ }
+ });
+
+ describe('and when related events were returned with totals equalling what stat counts indicate they should be', () => {
+ beforeEach(() => {
+ // Return related events for the first child node
+ const relatedAction: DataAction = {
+ type: 'serverReturnedRelatedEventData',
+ payload: {
+ entityID: firstChildNodeInTree.id,
+ events: firstChildNodeInTree.relatedEvents,
+ nextEvent: null,
+ },
+ };
+ store.dispatch(relatedAction);
+ });
+ it('should have the correct related events', () => {
+ const selectedEventsByEntityId = selectors.relatedEventsByEntityId(store.getState());
+ const selectedEventsForFirstChildNode = selectedEventsByEntityId.get(
+ firstChildNodeInTree.id
+ )!.events;
+
+ expect(selectedEventsForFirstChildNode).toBe(firstChildNodeInTree.relatedEvents);
+ });
+ it('should indicate the correct related event count for each category', () => {
+ const selectedRelatedInfo = selectors.relatedEventInfoByEntityId(store.getState());
+ const displayCountsForCategory = selectedRelatedInfo(firstChildNodeInTree.id)
+ ?.numberActuallyDisplayedForCategory!;
+
+ const eventCategoriesForNode: string[] = Object.keys(
+ eventStatsForFirstChildNode.byCategory
+ );
+
+ for (const eventCategory of eventCategoriesForNode) {
+ expect(`${eventCategory}:${displayCountsForCategory(eventCategory)}`).toBe(
+ `${eventCategory}:${eventStatsForFirstChildNode.byCategory[eventCategory]}`
+ );
+ }
+ });
+ /**
+ * The general approach reflected here is to _avoid_ showing a limit warning - even if we hit
+ * the overall related event limit - as long as the number in our category matches what the stats
+ * say we have. E.g. If the stats say you have 20 dns events, and we receive 20 dns events, we
+ * don't need to display a limit warning for that, even if we hit some overall event limit of e.g. 100
+ * while we were fetching the 20.
+ */
+ it('should not indicate the limit has been exceeded because the number of related events received for the category is greater or equal to the stats count', () => {
+ const selectedRelatedInfo = selectors.relatedEventInfoByEntityId(store.getState());
+ const shouldShowLimit = selectedRelatedInfo(firstChildNodeInTree.id)
+ ?.shouldShowLimitForCategory!;
+ for (const typeCounted of Object.keys(eventStatsForFirstChildNode.byCategory)) {
+ expect(shouldShowLimit(typeCounted)).toBe(false);
+ }
+ });
+ it('should not indicate that there are any related events missing because the number of related events received for the category is greater or equal to the stats count', () => {
+ const selectedRelatedInfo = selectors.relatedEventInfoByEntityId(store.getState());
+ const notDisplayed = selectedRelatedInfo(firstChildNodeInTree.id)
+ ?.numberNotDisplayedForCategory!;
+ for (const typeCounted of Object.keys(eventStatsForFirstChildNode.byCategory)) {
+ expect(notDisplayed(typeCounted)).toBe(0);
+ }
+ });
+ });
+ describe('when data was received and stats show more related events than the API can provide', () => {
+ beforeEach(() => {
+ // Add 1 to the stats for an event category so that the selectors think we are missing data.
+ // This mutates `tree`, and then we re-dispatch it
+ eventStatsForFirstChildNode.byCategory[categoryToOverCount] =
+ eventStatsForFirstChildNode.byCategory[categoryToOverCount] + 1;
+
+ if (tree) {
+ dispatchTree(tree);
+ const relatedAction: DataAction = {
+ type: 'serverReturnedRelatedEventData',
+ payload: {
+ entityID: firstChildNodeInTree.id,
+ events: firstChildNodeInTree.relatedEvents,
+ nextEvent: 'aValidNextEventCursor',
+ },
+ };
+ store.dispatch(relatedAction);
+ }
+ });
+ it('should have the correct related events', () => {
+ const selectedEventsByEntityId = selectors.relatedEventsByEntityId(store.getState());
+ const selectedEventsForFirstChildNode = selectedEventsByEntityId.get(
+ firstChildNodeInTree.id
+ )!.events;
+
+ expect(selectedEventsForFirstChildNode).toBe(firstChildNodeInTree.relatedEvents);
+ });
+ it('should indicate the limit has been exceeded because the number of related events received for the category is less than what the stats count said it would be', () => {
+ const selectedRelatedInfo = selectors.relatedEventInfoByEntityId(store.getState());
+ const shouldShowLimit = selectedRelatedInfo(firstChildNodeInTree.id)
+ ?.shouldShowLimitForCategory!;
+ expect(shouldShowLimit(categoryToOverCount)).toBe(true);
+ });
+ it('should indicate that there are related events missing because the number of related events received for the category is less than what the stats count said it would be', () => {
+ const selectedRelatedInfo = selectors.relatedEventInfoByEntityId(store.getState());
+ const notDisplayed = selectedRelatedInfo(firstChildNodeInTree.id)
+ ?.numberNotDisplayedForCategory!;
+ expect(notDisplayed(categoryToOverCount)).toBe(1);
+ });
+ });
+ });
});
+
+function mockedTree() {
+ // Generate a 'tree' using the Resolver generator code. This structure isn't the same as what the API returns.
+ const baseTree = generateBaseTree();
+
+ const { children } = baseTree;
+ const firstChildNodeInTree = [...children.values()][0];
+
+ // The `generateBaseTree` mock doesn't calculate stats (the actual data has them.)
+ // So calculate some stats for just the node that we'll test.
+ const statsResults = compileStatsForChild(firstChildNodeInTree);
+
+ const tree = mockResolverTree({
+ events: baseTree.allEvents,
+ /**
+ * Calculate children from the ResolverTree response using the children of the `Tree` we generated using the Resolver data generator code.
+ * Compile (and attach) stats to the first child node.
+ *
+ * The purpose of `children` here is to set the `actual`
+ * value that the stats values will be compared with
+ * to derive things like the number of missing events and if
+ * related event limits should be shown.
+ */
+ children: [...baseTree.children.values()].map((node: TreeNode) => {
+ // Treat each `TreeNode` as a `ResolverChildNode`.
+ // These types are almost close enough to be used interchangably (for the purposes of this test.)
+ const childNode: Partial = node;
+
+ // `TreeNode` has `id` which is the same as `entityID`.
+ // The `ResolverChildNode` calls the entityID as `entityID`.
+ // Set `entityID` on `childNode` since the code in test relies on it.
+ childNode.entityID = (childNode as TreeNode).id;
+
+ // This should only be true for the first child.
+ if (node.id === firstChildNodeInTree.id) {
+ // attach stats
+ childNode.stats = {
+ events: statsResults.eventStats,
+ totalAlerts: 0,
+ };
+ }
+ return childNode;
+ }) as ResolverChildNode[] /**
+ Cast to ResolverChildNode[] array is needed because incoming
+ TreeNodes from the generator cannot be assigned cleanly to the
+ tree model's expected ResolverChildNode type.
+ */,
+ });
+
+ return {
+ tree: tree!,
+ firstChildNodeInTree,
+ eventStatsForFirstChildNode: statsResults.eventStats,
+ categoryToOverCount: statsResults.firstCategory,
+ };
+}
+
+function generateBaseTree() {
+ const generator = new EndpointDocGenerator('seed');
+ return generator.generateTree({
+ ancestors: 1,
+ generations: 2,
+ children: 3,
+ percentWithRelated: 100,
+ alwaysGenMaxChildrenPerNode: true,
+ });
+}
+
+function compileStatsForChild(
+ node: TreeNode
+): {
+ eventStats: {
+ /** The total number of related events. */
+ total: number;
+ /** A record with the categories of events as keys, and the count of events per category as values. */
+ byCategory: Record;
+ };
+ /** The category of the first event. */
+ firstCategory: string;
+} {
+ const totalRelatedEvents = node.relatedEvents.length;
+ // For the purposes of testing, we pick one category to fake an extra event for
+ // so we can test if the event limit selectors do the right thing.
+
+ let firstCategory: string | undefined;
+
+ const compiledStats = node.relatedEvents.reduce(
+ (counts: Record, relatedEvent) => {
+ // `relatedEvent.event.category` is `string | string[]`.
+ // Wrap it in an array and flatten that array to get a `string[] | [string]`
+ // which we can loop over.
+ const categories: string[] = [relatedEvent.event.category].flat();
+
+ for (const category of categories) {
+ // Set the first category as 'categoryToOverCount'
+ if (firstCategory === undefined) {
+ firstCategory = category;
+ }
+
+ // Increment the count of events with this category
+ counts[category] = counts[category] ? counts[category] + 1 : 1;
+ }
+ return counts;
+ },
+ {}
+ );
+ if (firstCategory === undefined) {
+ throw new Error('there were no related events for the node.');
+ }
+ return {
+ /**
+ * Object to use for the first child nodes stats `events` object?
+ */
+ eventStats: {
+ total: totalRelatedEvents,
+ byCategory: compiledStats,
+ },
+ firstCategory,
+ };
+}
diff --git a/x-pack/plugins/security_solution/public/resolver/store/data/selectors.ts b/x-pack/plugins/security_solution/public/resolver/store/data/selectors.ts
index 9c47c765457e3..990b911e5dbd0 100644
--- a/x-pack/plugins/security_solution/public/resolver/store/data/selectors.ts
+++ b/x-pack/plugins/security_solution/public/resolver/store/data/selectors.ts
@@ -5,7 +5,7 @@
*/
import rbush from 'rbush';
-import { createSelector } from 'reselect';
+import { createSelector, defaultMemoize } from 'reselect';
import {
DataState,
AdjacentProcessMap,
@@ -32,6 +32,7 @@ import {
} from '../../../../common/endpoint/types';
import * as resolverTreeModel from '../../models/resolver_tree';
import { isometricTaxiLayout } from '../../models/indexed_process_tree/isometric_taxi_layout';
+import { allEventCategories } from '../../../../common/endpoint/models/event';
/**
* If there is currently a request.
@@ -167,6 +168,116 @@ export function hasMoreAncestors(state: DataState): boolean {
return tree ? resolverTreeModel.hasMoreAncestors(tree) : false;
}
+interface RelatedInfoFunctions {
+ shouldShowLimitForCategory: (category: string) => boolean;
+ numberNotDisplayedForCategory: (category: string) => number;
+ numberActuallyDisplayedForCategory: (category: string) => number;
+}
+/**
+ * A map of `entity_id`s to functions that provide information about
+ * related events by ECS `.category` Primarily to avoid having business logic
+ * in UI components.
+ */
+export const relatedEventInfoByEntityId: (
+ state: DataState
+) => (entityID: string) => RelatedInfoFunctions | null = createSelector(
+ relatedEventsByEntityId,
+ relatedEventsStats,
+ function selectLineageLimitInfo(
+ /* eslint-disable no-shadow */
+ relatedEventsByEntityId,
+ relatedEventsStats
+ /* eslint-enable no-shadow */
+ ) {
+ if (!relatedEventsStats) {
+ // If there are no related event stats, there are no related event info objects
+ return (entityId: string) => null;
+ }
+ return (entityId) => {
+ const stats = relatedEventsStats.get(entityId);
+ if (!stats) {
+ return null;
+ }
+ const eventsResponseForThisEntry = relatedEventsByEntityId.get(entityId);
+ const hasMoreEvents =
+ eventsResponseForThisEntry && eventsResponseForThisEntry.nextEvent !== null;
+ /**
+ * Get the "aggregate" total for the event category (i.e. _all_ events that would qualify as being "in category")
+ * For a set like `[DNS,File][File,DNS][Registry]` The first and second events would contribute to the aggregate total for DNS being 2.
+ * This is currently aligned with how the backed provides this information.
+ *
+ * @param eventCategory {string} The ECS category like 'file','dns',etc.
+ */
+ const aggregateTotalForCategory = (eventCategory: string): number => {
+ return stats.events.byCategory[eventCategory] || 0;
+ };
+
+ /**
+ * Get all the related events in the category provided.
+ *
+ * @param eventCategory {string} The ECS category like 'file','dns',etc.
+ */
+ const unmemoizedMatchingEventsForCategory = (eventCategory: string): ResolverEvent[] => {
+ if (!eventsResponseForThisEntry) {
+ return [];
+ }
+ return eventsResponseForThisEntry.events.filter((resolverEvent) => {
+ for (const category of [allEventCategories(resolverEvent)].flat()) {
+ if (category === eventCategory) {
+ return true;
+ }
+ }
+ return false;
+ });
+ };
+
+ const matchingEventsForCategory = defaultMemoize(unmemoizedMatchingEventsForCategory);
+
+ /**
+ * The number of events that occurred before the API limit was reached.
+ * The number of events that came back form the API that have `eventCategory` in their list of categories.
+ *
+ * @param eventCategory {string} The ECS category like 'file','dns',etc.
+ */
+ const numberActuallyDisplayedForCategory = (eventCategory: string): number => {
+ return matchingEventsForCategory(eventCategory)?.length || 0;
+ };
+
+ /**
+ * The total number counted by the backend - the number displayed
+ *
+ * @param eventCategory {string} The ECS category like 'file','dns',etc.
+ */
+ const numberNotDisplayedForCategory = (eventCategory: string): number => {
+ return (
+ aggregateTotalForCategory(eventCategory) -
+ numberActuallyDisplayedForCategory(eventCategory)
+ );
+ };
+
+ /**
+ * `true` when the `nextEvent` cursor appeared in the results and we are short on the number needed to
+ * fullfill the aggregate count.
+ *
+ * @param eventCategory {string} The ECS category like 'file','dns',etc.
+ */
+ const shouldShowLimitForCategory = (eventCategory: string): boolean => {
+ if (hasMoreEvents && numberNotDisplayedForCategory(eventCategory) > 0) {
+ return true;
+ }
+ return false;
+ };
+
+ const entryValue = {
+ shouldShowLimitForCategory,
+ numberNotDisplayedForCategory,
+ numberActuallyDisplayedForCategory,
+ };
+ return entryValue;
+ };
+ }
+);
+
/**
* If we need to fetch, this is the ID to fetch.
*/
@@ -285,6 +396,7 @@ export const visibleProcessNodePositionsAndEdgeLineSegments = createSelector(
};
}
);
+
/**
* If there is a pending request that's for a entity ID that doesn't matche the `entityID`, then we should cancel it.
*/
diff --git a/x-pack/plugins/security_solution/public/resolver/store/selectors.ts b/x-pack/plugins/security_solution/public/resolver/store/selectors.ts
index 2bc254d118d33..6e512cfe13f62 100644
--- a/x-pack/plugins/security_solution/public/resolver/store/selectors.ts
+++ b/x-pack/plugins/security_solution/public/resolver/store/selectors.ts
@@ -103,6 +103,16 @@ export const relatedEventsReady = composeSelectors(
dataSelectors.relatedEventsReady
);
+/**
+ * Business logic lookup functions by ECS category by entity id.
+ * Example usage:
+ * const numberOfFileEvents = infoByEntityId.get(`someEntityId`)?.getAggregateTotalForCategory(`file`);
+ */
+export const relatedEventInfoByEntityId = composeSelectors(
+ dataStateSelector,
+ dataSelectors.relatedEventInfoByEntityId
+);
+
/**
* Returns the id of the "current" tree node (fake-focused)
*/
@@ -158,6 +168,16 @@ export const isLoading = composeSelectors(dataStateSelector, dataSelectors.isLoa
*/
export const hasError = composeSelectors(dataStateSelector, dataSelectors.hasError);
+/**
+ * True if the children cursor is not null
+ */
+export const hasMoreChildren = composeSelectors(dataStateSelector, dataSelectors.hasMoreChildren);
+
+/**
+ * True if the ancestor cursor is not null
+ */
+export const hasMoreAncestors = composeSelectors(dataStateSelector, dataSelectors.hasMoreAncestors);
+
/**
* An array containing all the processes currently in the Resolver than can be graphed
*/
diff --git a/x-pack/plugins/security_solution/public/resolver/view/limit_warnings.tsx b/x-pack/plugins/security_solution/public/resolver/view/limit_warnings.tsx
new file mode 100644
index 0000000000000..e3bad8ee2e574
--- /dev/null
+++ b/x-pack/plugins/security_solution/public/resolver/view/limit_warnings.tsx
@@ -0,0 +1,126 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import React from 'react';
+import { EuiCallOut } from '@elastic/eui';
+import { FormattedMessage } from 'react-intl';
+
+const lineageLimitMessage = (
+ <>
+
+ >
+);
+
+const LineageTitleMessage = React.memo(function LineageTitleMessage({
+ numberOfEntries,
+}: {
+ numberOfEntries: number;
+}) {
+ return (
+ <>
+
+ >
+ );
+});
+
+const RelatedEventsLimitMessage = React.memo(function RelatedEventsLimitMessage({
+ category,
+ numberOfEventsMissing,
+}: {
+ numberOfEventsMissing: number;
+ category: string;
+}) {
+ return (
+ <>
+
+ >
+ );
+});
+
+const RelatedLimitTitleMessage = React.memo(function RelatedLimitTitleMessage({
+ category,
+ numberOfEventsDisplayed,
+}: {
+ numberOfEventsDisplayed: number;
+ category: string;
+}) {
+ return (
+ <>
+
+ >
+ );
+});
+
+/**
+ * Limit warning for hitting the /events API limit
+ */
+export const RelatedEventLimitWarning = React.memo(function RelatedEventLimitWarning({
+ className,
+ eventType,
+ numberActuallyDisplayed,
+ numberMissing,
+}: {
+ className?: string;
+ eventType: string;
+ numberActuallyDisplayed: number;
+ numberMissing: number;
+}) {
+ /**
+ * Based on API limits, all related events may not be displayed.
+ */
+ return (
+
+ }
+ >
+
+
+
+
+ );
+});
+
+/**
+ * Limit warning for hitting a limit of nodes in the tree
+ */
+export const LimitWarning = React.memo(function LimitWarning({
+ className,
+ numberDisplayed,
+}: {
+ className?: string;
+ numberDisplayed: number;
+}) {
+ return (
+ }
+ >
+ {lineageLimitMessage}
+
+ );
+});
diff --git a/x-pack/plugins/security_solution/public/resolver/view/panels/panel_content_process_detail.tsx b/x-pack/plugins/security_solution/public/resolver/view/panels/panel_content_process_detail.tsx
index 3127c7132df3d..5d90cd11d31af 100644
--- a/x-pack/plugins/security_solution/public/resolver/view/panels/panel_content_process_detail.tsx
+++ b/x-pack/plugins/security_solution/public/resolver/view/panels/panel_content_process_detail.tsx
@@ -31,7 +31,7 @@ import { useResolverTheme } from '../assets';
const StyledDescriptionList = styled(EuiDescriptionList)`
&.euiDescriptionList.euiDescriptionList--column dt.euiDescriptionList__title.desc-title {
- max-width: 8em;
+ max-width: 10em;
}
`;
@@ -56,73 +56,42 @@ export const ProcessDetails = memo(function ProcessDetails({
const dateTime = eventTime ? formatDate(eventTime) : '';
const createdEntry = {
- title: i18n.translate(
- 'xpack.securitySolution.endpoint.resolver.panel.processDescList.created',
- {
- defaultMessage: 'Created',
- }
- ),
+ title: '@timestamp',
description: dateTime,
};
const pathEntry = {
- title: i18n.translate('xpack.securitySolution.endpoint.resolver.panel.processDescList.path', {
- defaultMessage: 'Path',
- }),
+ title: 'process.executable',
description: processPath(processEvent),
};
const pidEntry = {
- title: i18n.translate('xpack.securitySolution.endpoint.resolver.panel.processDescList.pid', {
- defaultMessage: 'PID',
- }),
+ title: 'process.pid',
description: processPid(processEvent),
};
const userEntry = {
- title: i18n.translate('xpack.securitySolution.endpoint.resolver.panel.processDescList.user', {
- defaultMessage: 'User',
- }),
+ title: 'user.name',
description: (userInfoForProcess(processEvent) as { name: string }).name,
};
const domainEntry = {
- title: i18n.translate(
- 'xpack.securitySolution.endpoint.resolver.panel.processDescList.domain',
- {
- defaultMessage: 'Domain',
- }
- ),
+ title: 'user.domain',
description: (userInfoForProcess(processEvent) as { domain: string }).domain,
};
const parentPidEntry = {
- title: i18n.translate(
- 'xpack.securitySolution.endpoint.resolver.panel.processDescList.parentPid',
- {
- defaultMessage: 'Parent PID',
- }
- ),
+ title: 'process.parent.pid',
description: processParentPid(processEvent),
};
const md5Entry = {
- title: i18n.translate(
- 'xpack.securitySolution.endpoint.resolver.panel.processDescList.md5hash',
- {
- defaultMessage: 'MD5',
- }
- ),
+ title: 'process.hash.md5',
description: md5HashForProcess(processEvent),
};
const commandLineEntry = {
- title: i18n.translate(
- 'xpack.securitySolution.endpoint.resolver.panel.processDescList.commandLine',
- {
- defaultMessage: 'Command Line',
- }
- ),
+ title: 'process.args',
description: argsForProcess(processEvent),
};
diff --git a/x-pack/plugins/security_solution/public/resolver/view/panels/panel_content_process_list.tsx b/x-pack/plugins/security_solution/public/resolver/view/panels/panel_content_process_list.tsx
index 9152649c07abf..0ed677885775f 100644
--- a/x-pack/plugins/security_solution/public/resolver/view/panels/panel_content_process_list.tsx
+++ b/x-pack/plugins/security_solution/public/resolver/view/panels/panel_content_process_list.tsx
@@ -13,6 +13,7 @@ import {
} from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { useSelector } from 'react-redux';
+import styled from 'styled-components';
import * as event from '../../../../common/endpoint/models/event';
import * as selectors from '../../store/selectors';
import { CrumbInfo, formatter, StyledBreadcrumbs } from './panel_content_utilities';
@@ -20,6 +21,27 @@ import { useResolverDispatch } from '../use_resolver_dispatch';
import { SideEffectContext } from '../side_effect_context';
import { CubeForProcess } from './process_cube_icon';
import { ResolverEvent } from '../../../../common/endpoint/types';
+import { LimitWarning } from '../limit_warnings';
+
+const StyledLimitWarning = styled(LimitWarning)`
+ flex-flow: row wrap;
+ display: block;
+ align-items: baseline;
+ margin-top: 1em;
+
+ & .euiCallOutHeader {
+ display: inline;
+ margin-right: 0.25em;
+ }
+
+ & .euiText {
+ display: inline;
+ }
+
+ & .euiText p {
+ display: inline;
+ }
+`;
/**
* The "default" view for the panel: A list of all the processes currently in the graph.
@@ -145,6 +167,7 @@ export const ProcessListWithCounts = memo(function ProcessListWithCounts({
}),
[processNodePositions]
);
+ const numberOfProcesses = processTableView.length;
const crumbs = useMemo(() => {
return [
@@ -160,9 +183,13 @@ export const ProcessListWithCounts = memo(function ProcessListWithCounts({
];
}, []);
+ const children = useSelector(selectors.hasMoreChildren);
+ const ancestors = useSelector(selectors.hasMoreAncestors);
+ const showWarning = children === true || ancestors === true;
return (
<>
+ {showWarning && }
items={processTableView} columns={columns} sorting />
>
diff --git a/x-pack/plugins/security_solution/public/resolver/view/panels/panel_content_related_detail.tsx b/x-pack/plugins/security_solution/public/resolver/view/panels/panel_content_related_detail.tsx
index f27ec56fef697..4544381d94955 100644
--- a/x-pack/plugins/security_solution/public/resolver/view/panels/panel_content_related_detail.tsx
+++ b/x-pack/plugins/security_solution/public/resolver/view/panels/panel_content_related_detail.tsx
@@ -10,7 +10,13 @@ import { EuiSpacer, EuiText, EuiDescriptionList, EuiTextColor, EuiTitle } from '
import styled from 'styled-components';
import { useSelector } from 'react-redux';
import { FormattedMessage } from 'react-intl';
-import { CrumbInfo, formatDate, StyledBreadcrumbs, BoldCode } from './panel_content_utilities';
+import {
+ CrumbInfo,
+ formatDate,
+ StyledBreadcrumbs,
+ BoldCode,
+ StyledTime,
+} from './panel_content_utilities';
import * as event from '../../../../common/endpoint/models/event';
import { ResolverEvent } from '../../../../common/endpoint/types';
import * as selectors from '../../store/selectors';
@@ -308,7 +314,7 @@ export const RelatedEventDetail = memo(function RelatedEventDetail({
return (
<>
-
+
@@ -321,11 +327,13 @@ export const RelatedEventDetail = memo(function RelatedEventDetail({
defaultMessage="{category} {eventType}"
/>
-
+
+
+
@@ -340,14 +348,15 @@ export const RelatedEventDetail = memo(function RelatedEventDetail({
return (
{index === 0 ? null : }
-
-
+
+
{sectionTitle}
+
void;
}
+const StyledRelatedLimitWarning = styled(RelatedEventLimitWarning)`
+ flex-flow: row wrap;
+ display: block;
+ align-items: baseline;
+ margin-top: 1em;
+
+ & .euiCallOutHeader {
+ display: inline;
+ margin-right: 0.25em;
+ }
+
+ & .euiText {
+ display: inline;
+ }
+
+ & .euiText p {
+ display: inline;
+ }
+`;
+
const DisplayList = memo(function DisplayList({
crumbs,
matchingEventEntries,
+ eventType,
+ processEntityId,
}: {
crumbs: Array<{ text: string | JSX.Element; onClick: () => void }>;
matchingEventEntries: MatchingEventEntry[];
+ eventType: string;
+ processEntityId: string;
}) {
+ const relatedLookupsByCategory = useSelector(selectors.relatedEventInfoByEntityId);
+ const lookupsForThisNode = relatedLookupsByCategory(processEntityId);
+ const shouldShowLimitWarning = lookupsForThisNode?.shouldShowLimitForCategory(eventType);
+ const numberDisplayed = lookupsForThisNode?.numberActuallyDisplayedForCategory(eventType);
+ const numberMissing = lookupsForThisNode?.numberNotDisplayedForCategory(eventType);
+
return (
<>
+ {shouldShowLimitWarning && typeof numberDisplayed !== 'undefined' && numberMissing ? (
+
+ ) : null}
<>
{matchingEventEntries.map((eventView, index) => {
@@ -61,11 +106,13 @@ const DisplayList = memo(function DisplayList({
defaultMessage="{category} {eventType}"
/>
-
+
+
+
@@ -242,6 +289,13 @@ export const ProcessEventListNarrowedByType = memo(function ProcessEventListNarr
);
}
- return ;
+ return (
+
+ );
});
ProcessEventListNarrowedByType.displayName = 'ProcessEventListNarrowedByType';
diff --git a/x-pack/plugins/security_solution/public/resolver/view/panels/panel_content_utilities.tsx b/x-pack/plugins/security_solution/public/resolver/view/panels/panel_content_utilities.tsx
index be0ba04c53233..374c4c94c7768 100644
--- a/x-pack/plugins/security_solution/public/resolver/view/panels/panel_content_utilities.tsx
+++ b/x-pack/plugins/security_solution/public/resolver/view/panels/panel_content_utilities.tsx
@@ -1,114 +1,122 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-import { i18n } from '@kbn/i18n';
-import { EuiBreadcrumbs, EuiBreadcrumb, EuiCode, EuiBetaBadge } from '@elastic/eui';
-import styled from 'styled-components';
-import React, { memo } from 'react';
-import { useResolverTheme } from '../assets';
-
-/**
- * A bold version of EuiCode to display certain titles with
- */
-export const BoldCode = styled(EuiCode)`
- &.euiCodeBlock code.euiCodeBlock__code {
- font-weight: 900;
- }
-`;
-
-const BetaHeader = styled(`header`)`
- margin-bottom: 1em;
-`;
-
-/**
- * The two query parameters we read/write on to control which view the table presents:
- */
-export interface CrumbInfo {
- readonly crumbId: string;
- readonly crumbEvent: string;
-}
-
-const ThemedBreadcrumbs = styled(EuiBreadcrumbs)<{ background: string; text: string }>`
- &.euiBreadcrumbs.euiBreadcrumbs--responsive {
- background-color: ${(props) => props.background};
- color: ${(props) => props.text};
- padding: 1em;
- border-radius: 5px;
- }
-
- & .euiBreadcrumbSeparator {
- background: ${(props) => props.text};
- }
-`;
-
-const betaBadgeLabel = i18n.translate(
- 'xpack.securitySolution.enpdoint.resolver.panelutils.betaBadgeLabel',
- {
- defaultMessage: 'BETA',
- }
-);
-
-/**
- * Breadcrumb menu with adjustments per direction from UX team
- */
-export const StyledBreadcrumbs = memo(function StyledBreadcrumbs({
- breadcrumbs,
- truncate,
-}: {
- breadcrumbs: EuiBreadcrumb[];
- truncate?: boolean;
-}) {
- const {
- colorMap: { resolverBreadcrumbBackground, resolverEdgeText },
- } = useResolverTheme();
- return (
- <>
-
-
-
-
- >
- );
-});
-
-/**
- * Long formatter (to second) for DateTime
- */
-export const formatter = new Intl.DateTimeFormat(i18n.getLocale(), {
- year: 'numeric',
- month: '2-digit',
- day: '2-digit',
- hour: '2-digit',
- minute: '2-digit',
- second: '2-digit',
-});
-
-const invalidDateText = i18n.translate(
- 'xpack.securitySolution.enpdoint.resolver.panelutils.invaliddate',
- {
- defaultMessage: 'Invalid Date',
- }
-);
-/**
- * @returns {string} A nicely formatted string for a date
- */
-export function formatDate(
- /** To be passed through Date->Intl.DateTimeFormat */ timestamp: ConstructorParameters<
- typeof Date
- >[0]
-): string {
- const date = new Date(timestamp);
- if (isFinite(date.getTime())) {
- return formatter.format(date);
- } else {
- return invalidDateText;
- }
-}
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { i18n } from '@kbn/i18n';
+import { EuiBreadcrumbs, EuiCode, EuiBetaBadge } from '@elastic/eui';
+import styled from 'styled-components';
+import React, { memo } from 'react';
+import { useResolverTheme } from '../assets';
+
+/**
+ * A bold version of EuiCode to display certain titles with
+ */
+export const BoldCode = styled(EuiCode)`
+ &.euiCodeBlock code.euiCodeBlock__code {
+ font-weight: 900;
+ }
+`;
+
+const BetaHeader = styled(`header`)`
+ margin-bottom: 1em;
+`;
+
+/**
+ * The two query parameters we read/write on to control which view the table presents:
+ */
+export interface CrumbInfo {
+ readonly crumbId: string;
+ readonly crumbEvent: string;
+}
+
+const ThemedBreadcrumbs = styled(EuiBreadcrumbs)<{ background: string; text: string }>`
+ &.euiBreadcrumbs.euiBreadcrumbs--responsive {
+ background-color: ${(props) => props.background};
+ color: ${(props) => props.text};
+ padding: 1em;
+ border-radius: 5px;
+ }
+
+ & .euiBreadcrumbSeparator {
+ background: ${(props) => props.text};
+ }
+`;
+
+const betaBadgeLabel = i18n.translate(
+ 'xpack.securitySolution.enpdoint.resolver.panelutils.betaBadgeLabel',
+ {
+ defaultMessage: 'BETA',
+ }
+);
+
+/**
+ * A component to keep time representations in blocks so they don't wrap
+ * and look bad.
+ */
+export const StyledTime = memo(styled('time')`
+ display: inline-block;
+ text-align: start;
+`);
+
+type Breadcrumbs = Parameters[0]['breadcrumbs'];
+/**
+ * Breadcrumb menu with adjustments per direction from UX team
+ */
+export const StyledBreadcrumbs = memo(function StyledBreadcrumbs({
+ breadcrumbs,
+}: {
+ breadcrumbs: Breadcrumbs;
+}) {
+ const {
+ colorMap: { resolverBreadcrumbBackground, resolverEdgeText },
+ } = useResolverTheme();
+ return (
+ <>
+
+
+
+
+ >
+ );
+});
+
+/**
+ * Long formatter (to second) for DateTime
+ */
+export const formatter = new Intl.DateTimeFormat(i18n.getLocale(), {
+ year: 'numeric',
+ month: '2-digit',
+ day: '2-digit',
+ hour: '2-digit',
+ minute: '2-digit',
+ second: '2-digit',
+});
+
+const invalidDateText = i18n.translate(
+ 'xpack.securitySolution.enpdoint.resolver.panelutils.invaliddate',
+ {
+ defaultMessage: 'Invalid Date',
+ }
+);
+/**
+ * @returns {string} A nicely formatted string for a date
+ */
+export function formatDate(
+ /** To be passed through Date->Intl.DateTimeFormat */ timestamp: ConstructorParameters<
+ typeof Date
+ >[0]
+): string {
+ const date = new Date(timestamp);
+ if (isFinite(date.getTime())) {
+ return formatter.format(date);
+ } else {
+ return invalidDateText;
+ }
+}
diff --git a/x-pack/plugins/security_solution/public/resolver/view/styles.tsx b/x-pack/plugins/security_solution/public/resolver/view/styles.tsx
index 2a1e67f4a9fdc..4cdb29b283f1e 100644
--- a/x-pack/plugins/security_solution/public/resolver/view/styles.tsx
+++ b/x-pack/plugins/security_solution/public/resolver/view/styles.tsx
@@ -48,6 +48,8 @@ export const StyledPanel = styled(Panel)`
overflow: auto;
width: 25em;
max-width: 50%;
+ border-radius: 0;
+ border-top: none;
`;
/**
diff --git a/x-pack/plugins/security_solution/public/timelines/components/timeline/__snapshots__/timeline.test.tsx.snap b/x-pack/plugins/security_solution/public/timelines/components/timeline/__snapshots__/timeline.test.tsx.snap
index e38f6ad022d78..3508e12cb1be1 100644
--- a/x-pack/plugins/security_solution/public/timelines/components/timeline/__snapshots__/timeline.test.tsx.snap
+++ b/x-pack/plugins/security_solution/public/timelines/components/timeline/__snapshots__/timeline.test.tsx.snap
@@ -474,9 +474,9 @@ exports[`Timeline rendering renders correctly against snapshot 1`] = `
"auditbeat-*",
"endgame-*",
"filebeat-*",
+ "logs-*",
"packetbeat-*",
"winlogbeat-*",
- "logs-*",
],
"name": "event.end",
"searchable": true,
diff --git a/x-pack/plugins/security_solution/public/timelines/components/timeline/body/column_headers/__snapshots__/index.test.tsx.snap b/x-pack/plugins/security_solution/public/timelines/components/timeline/body/column_headers/__snapshots__/index.test.tsx.snap
index 2436e71a89b86..a5610cabc1774 100644
--- a/x-pack/plugins/security_solution/public/timelines/components/timeline/body/column_headers/__snapshots__/index.test.tsx.snap
+++ b/x-pack/plugins/security_solution/public/timelines/components/timeline/body/column_headers/__snapshots__/index.test.tsx.snap
@@ -379,9 +379,9 @@ exports[`ColumnHeaders rendering renders correctly against snapshot 1`] = `
"auditbeat-*",
"endgame-*",
"filebeat-*",
+ "logs-*",
"packetbeat-*",
"winlogbeat-*",
- "logs-*",
],
"name": "event.end",
"searchable": true,
diff --git a/x-pack/plugins/security_solution/public/timelines/components/timeline/body/renderers/suricata/__snapshots__/suricata_row_renderer.test.tsx.snap b/x-pack/plugins/security_solution/public/timelines/components/timeline/body/renderers/suricata/__snapshots__/suricata_row_renderer.test.tsx.snap
index cba4b9aa72a25..8672b542eb6c6 100644
--- a/x-pack/plugins/security_solution/public/timelines/components/timeline/body/renderers/suricata/__snapshots__/suricata_row_renderer.test.tsx.snap
+++ b/x-pack/plugins/security_solution/public/timelines/components/timeline/body/renderers/suricata/__snapshots__/suricata_row_renderer.test.tsx.snap
@@ -371,9 +371,9 @@ exports[`suricata_row_renderer renders correctly against snapshot 1`] = `
"auditbeat-*",
"endgame-*",
"filebeat-*",
+ "logs-*",
"packetbeat-*",
"winlogbeat-*",
- "logs-*",
],
"name": "event.end",
"searchable": true,
diff --git a/x-pack/plugins/security_solution/public/timelines/components/timeline/body/renderers/zeek/__snapshots__/zeek_details.test.tsx.snap b/x-pack/plugins/security_solution/public/timelines/components/timeline/body/renderers/zeek/__snapshots__/zeek_details.test.tsx.snap
index e1000637147a8..d13c3de00c780 100644
--- a/x-pack/plugins/security_solution/public/timelines/components/timeline/body/renderers/zeek/__snapshots__/zeek_details.test.tsx.snap
+++ b/x-pack/plugins/security_solution/public/timelines/components/timeline/body/renderers/zeek/__snapshots__/zeek_details.test.tsx.snap
@@ -369,9 +369,9 @@ exports[`ZeekDetails rendering it renders the default ZeekDetails 1`] = `
"auditbeat-*",
"endgame-*",
"filebeat-*",
+ "logs-*",
"packetbeat-*",
"winlogbeat-*",
- "logs-*",
],
"name": "event.end",
"searchable": true,
diff --git a/x-pack/plugins/security_solution/public/timelines/components/timeline/body/renderers/zeek/__snapshots__/zeek_row_renderer.test.tsx.snap b/x-pack/plugins/security_solution/public/timelines/components/timeline/body/renderers/zeek/__snapshots__/zeek_row_renderer.test.tsx.snap
index d4c80441e6037..b8f28026dfdb5 100644
--- a/x-pack/plugins/security_solution/public/timelines/components/timeline/body/renderers/zeek/__snapshots__/zeek_row_renderer.test.tsx.snap
+++ b/x-pack/plugins/security_solution/public/timelines/components/timeline/body/renderers/zeek/__snapshots__/zeek_row_renderer.test.tsx.snap
@@ -371,9 +371,9 @@ exports[`zeek_row_renderer renders correctly against snapshot 1`] = `
"auditbeat-*",
"endgame-*",
"filebeat-*",
+ "logs-*",
"packetbeat-*",
"winlogbeat-*",
- "logs-*",
],
"name": "event.end",
"searchable": true,
diff --git a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/common.ts b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/common.ts
index 71d14eb1226d5..77a5e85b14199 100644
--- a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/common.ts
+++ b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/common.ts
@@ -7,13 +7,13 @@ import { Logger } from 'src/core/server';
export const ArtifactConstants = {
GLOBAL_ALLOWLIST_NAME: 'endpoint-exceptionlist',
- SAVED_OBJECT_TYPE: 'endpoint:user-artifact:v2',
+ SAVED_OBJECT_TYPE: 'endpoint:user-artifact',
SUPPORTED_OPERATING_SYSTEMS: ['linux', 'macos', 'windows'],
SCHEMA_VERSION: 'v1',
};
export const ManifestConstants = {
- SAVED_OBJECT_TYPE: 'endpoint:user-artifact-manifest:v2',
+ SAVED_OBJECT_TYPE: 'endpoint:user-artifact-manifest',
SCHEMA_VERSION: 'v1',
INITIAL_VERSION: 'WzAsMF0=',
};
diff --git a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/saved_object_mappings.ts b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/saved_object_mappings.ts
index 89e974a3d5fd3..0fb433df95de3 100644
--- a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/saved_object_mappings.ts
+++ b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/saved_object_mappings.ts
@@ -45,7 +45,6 @@ export const exceptionsArtifactSavedObjectMappings: SavedObjectsType['mappings']
},
body: {
type: 'binary',
- index: false,
},
},
};
@@ -66,14 +65,14 @@ export const manifestSavedObjectMappings: SavedObjectsType['mappings'] = {
export const exceptionsArtifactType: SavedObjectsType = {
name: exceptionsArtifactSavedObjectType,
- hidden: false, // TODO: should these be hidden?
+ hidden: false,
namespaceType: 'agnostic',
mappings: exceptionsArtifactSavedObjectMappings,
};
export const manifestType: SavedObjectsType = {
name: manifestSavedObjectType,
- hidden: false, // TODO: should these be hidden?
+ hidden: false,
namespaceType: 'agnostic',
mappings: manifestSavedObjectMappings,
};
diff --git a/x-pack/plugins/snapshot_restore/public/application/components/repository_form/type_settings/readonly_settings.tsx b/x-pack/plugins/snapshot_restore/public/application/components/repository_form/type_settings/readonly_settings.tsx
index 309dad366bef8..17cce6efafb6f 100644
--- a/x-pack/plugins/snapshot_restore/public/application/components/repository_form/type_settings/readonly_settings.tsx
+++ b/x-pack/plugins/snapshot_restore/public/application/components/repository_form/type_settings/readonly_settings.tsx
@@ -46,7 +46,7 @@ export const ReadonlySettings: React.FunctionComponent = ({
case 'ftp':
return (
repositories.url.allowed_urls,
diff --git a/x-pack/plugins/translations/translations/ja-JP.json b/x-pack/plugins/translations/translations/ja-JP.json
index 92285d8bf72f8..c1f36372ec94e 100644
--- a/x-pack/plugins/translations/translations/ja-JP.json
+++ b/x-pack/plugins/translations/translations/ja-JP.json
@@ -641,7 +641,6 @@
"data.filter.filterEditor.cancelButtonLabel": "キャンセル",
"data.filter.filterEditor.createCustomLabelInputLabel": "カスタムラベル",
"data.filter.filterEditor.createCustomLabelSwitchLabel": "カスタムラベルを作成しますか?",
- "data.filter.filterEditor.dateFormatHelpLinkLabel": "対応データフォーマット",
"data.filter.filterEditor.doesNotExistOperatorOptionLabel": "存在しません",
"data.filter.filterEditor.editFilterPopupTitle": "フィルターを編集",
"data.filter.filterEditor.editFilterValuesButtonLabel": "フィルター値を編集",
@@ -7472,7 +7471,6 @@
"xpack.infra.logs.analysis.anomaliesSectionLineSeriesName": "15 分ごとのログエントリー (平均)",
"xpack.infra.logs.analysis.anomaliesSectionLoadingAriaLabel": "異常を読み込み中",
"xpack.infra.logs.analysis.anomaliesSectionTitle": "異常",
- "xpack.infra.logs.analysis.anomalySectionNoAnomaliesTitle": "異常が検出されませんでした。",
"xpack.infra.logs.analysis.anomalySectionNoDataBody": "時間範囲を調整する必要があるかもしれません。",
"xpack.infra.logs.analysis.anomalySectionNoDataTitle": "表示するデータがありません。",
"xpack.infra.logs.analysis.jobConfigurationOutdatedCalloutMessage": "異なるソース構成を使用して ML ジョブが作成されました。現在の構成を適用するにはジョブを再作成してください。これにより以前検出された異常が削除されます。",
@@ -7481,14 +7479,6 @@
"xpack.infra.logs.analysis.jobDefinitionOutdatedCalloutTitle": "古い ML ジョブ定義",
"xpack.infra.logs.analysis.jobStoppedCalloutMessage": "ML ジョブが手動またはリソース不足により停止しました。新しいログエントリーはジョブが再起動するまで処理されません。",
"xpack.infra.logs.analysis.jobStoppedCalloutTitle": "ML ジョブが停止しました",
- "xpack.infra.logs.analysis.logRateResultsToolbarText": "{startTime} から {endTime} までの {numberOfLogs} 件のログエントリーを分析しました",
- "xpack.infra.logs.analysis.logRateSectionBucketSpanLabel": "バケットスパン: ",
- "xpack.infra.logs.analysis.logRateSectionBucketSpanValue": "15 分",
- "xpack.infra.logs.analysis.logRateSectionLineSeriesName": "15 分ごとのログエントリー (平均)",
- "xpack.infra.logs.analysis.logRateSectionLoadingAriaLabel": "ログレートの結果を読み込み中",
- "xpack.infra.logs.analysis.logRateSectionNoDataBody": "時間範囲を調整する必要があるかもしれません。",
- "xpack.infra.logs.analysis.logRateSectionNoDataTitle": "表示するデータがありません。",
- "xpack.infra.logs.analysis.logRateSectionTitle": "ログレート",
"xpack.infra.logs.analysis.missingMlResultsPrivilegesBody": "本機能は機械学習ジョブを利用し、そのステータスと結果にアクセスするためには、少なくとも{machineLearningUserRole}ロールが必要です。",
"xpack.infra.logs.analysis.missingMlResultsPrivilegesTitle": "追加の機械学習の権限が必要です",
"xpack.infra.logs.analysis.missingMlSetupPrivilegesBody": "本機能は機械学習ジョブを利用し、設定には{machineLearningAdminRole}ロールが必要です。",
@@ -8042,7 +8032,6 @@
"xpack.ingestManager.agentDetails.viewAgentListTitle": "すべてのエージェント構成を表示",
"xpack.ingestManager.agentEnrollment.cancelButtonLabel": "キャンセル",
"xpack.ingestManager.agentEnrollment.continueButtonLabel": "続行",
- "xpack.ingestManager.agentEnrollment.downloadDescription": "ホストのコンピューターでElasticエージェントをダウンロードします。エージェントバイナリをダウンロードできます。検証署名はElasticの{downloadLink}にあります。",
"xpack.ingestManager.agentEnrollment.downloadLink": "ダウンロードページ",
"xpack.ingestManager.agentEnrollment.fleetNotInitializedText": "エージェントを登録する前に、フリートを設定する必要があります。{link}",
"xpack.ingestManager.agentEnrollment.flyoutTitle": "新しいエージェントを登録",
@@ -8173,7 +8162,6 @@
"xpack.ingestManager.createPackageConfig.agentConfigurationNameLabel": "構成",
"xpack.ingestManager.createPackageConfig.cancelButton": "キャンセル",
"xpack.ingestManager.createPackageConfig.cancelLinkText": "キャンセル",
- "xpack.ingestManager.createPackageConfig.packageNameLabel": "統合",
"xpack.ingestManager.createPackageConfig.pageDescriptionfromConfig": "次の手順に従い、統合をこのエージェント構成に追加します。",
"xpack.ingestManager.createPackageConfig.pageDescriptionfromPackage": "次の手順に従い、この統合をエージェント構成に追加します。",
"xpack.ingestManager.createPackageConfig.pageTitle": "データソースを追加",
@@ -8184,19 +8172,12 @@
"xpack.ingestManager.createPackageConfig.stepConfigure.packageConfigNameInputLabel": "データソース名",
"xpack.ingestManager.createPackageConfig.stepConfigure.packageConfigNamespaceInputLabel": "名前空間",
"xpack.ingestManager.createPackageConfig.stepConfigure.hideStreamsAriaLabel": "{type} ストリームを隠す",
- "xpack.ingestManager.createPackageConfig.stepConfigure.inputConfigErrorsTooltip": "構成エラーを修正してください",
- "xpack.ingestManager.createPackageConfig.stepConfigure.inputLevelErrorsTooltip": "構成エラーを修正してください",
"xpack.ingestManager.createPackageConfig.stepConfigure.inputSettingsDescription": "次の設定はすべてのストリームに適用されます。",
"xpack.ingestManager.createPackageConfig.stepConfigure.inputSettingsTitle": "設定",
"xpack.ingestManager.createPackageConfig.stepConfigure.inputVarFieldOptionalLabel": "オプション",
"xpack.ingestManager.createPackageConfig.stepConfigure.noConfigOptionsMessage": "構成するものがありません",
"xpack.ingestManager.createPackageConfig.stepConfigure.showStreamsAriaLabel": "{type} ストリームを表示",
- "xpack.ingestManager.createPackageConfig.stepConfigure.streamLevelErrorsTooltip": "構成エラーを修正してください",
- "xpack.ingestManager.createPackageConfig.stepConfigure.streamsEnabledCountText": "{count} / {total, plural, one {# ストリーム} other {# ストリーム}}が有効です",
"xpack.ingestManager.createPackageConfig.stepConfigure.toggleAdvancedOptionsButtonText": "高度なオプション",
- "xpack.ingestManager.createPackageConfig.stepConfigure.validationErrorText": "続行する前に、上記のエラーを修正してください",
- "xpack.ingestManager.createPackageConfig.stepConfigure.validationErrorTitle": "データソース構成にエラーがあります",
- "xpack.ingestManager.createPackageConfig.stepDefinePackageConfigTitle": "データソースを定義",
"xpack.ingestManager.createPackageConfig.stepSelectAgentConfigTitle": "エージェント構成を選択する",
"xpack.ingestManager.createPackageConfig.StepSelectConfig.agentConfigAgentsCountText": "{count, plural, one {# エージェント} other {# エージェント}}",
"xpack.ingestManager.createPackageConfig.StepSelectConfig.errorLoadingAgentConfigsTitle": "エージェント構成の読み込みエラー",
@@ -8268,8 +8249,6 @@
"xpack.ingestManager.editPackageConfig.pageDescription": "次の手順に従い、このデータソースを編集します。",
"xpack.ingestManager.editPackageConfig.pageTitle": "データソースを編集",
"xpack.ingestManager.editPackageConfig.saveButton": "データソースを保存",
- "xpack.ingestManager.editPackageConfig.stepConfigurePackageConfigTitle": "収集するデータを選択",
- "xpack.ingestManager.editPackageConfig.stepDefinePackageConfigTitle": "データソースを定義",
"xpack.ingestManager.editPackageConfig.updatedNotificationMessage": "フリートは'{agentConfigName}'構成で使用されているすべてのエージェントに更新をデプロイします。",
"xpack.ingestManager.editPackageConfig.updatedNotificationTitle": "正常に'{packageConfigName}'を更新しました",
"xpack.ingestManager.enrollemntAPIKeyList.emptyMessage": "登録トークンが見つかりません。",
@@ -15258,7 +15237,6 @@
"xpack.snapshotRestore.repositoryForm.typeReadonly.urlLabel": "パス (必須)",
"xpack.snapshotRestore.repositoryForm.typeReadonly.urlSchemeLabel": "スキーム",
"xpack.snapshotRestore.repositoryForm.typeReadonly.urlTitle": "URL",
- "xpack.snapshotRestore.repositoryForm.typeReadonly.urlWhitelistDescription": "この URL は {settingKey} 設定で登録する必要があります。",
"xpack.snapshotRestore.repositoryForm.typeS3.basePathDescription": "レポジトリデータへのバケットパスです。",
"xpack.snapshotRestore.repositoryForm.typeS3.basePathLabel": "ベースパス",
"xpack.snapshotRestore.repositoryForm.typeS3.basePathTitle": "ベースパス",
diff --git a/x-pack/plugins/translations/translations/zh-CN.json b/x-pack/plugins/translations/translations/zh-CN.json
index 457f65e89083d..7e36d5676585c 100644
--- a/x-pack/plugins/translations/translations/zh-CN.json
+++ b/x-pack/plugins/translations/translations/zh-CN.json
@@ -641,7 +641,6 @@
"data.filter.filterEditor.cancelButtonLabel": "取消",
"data.filter.filterEditor.createCustomLabelInputLabel": "定制标签",
"data.filter.filterEditor.createCustomLabelSwitchLabel": "创建定制标签?",
- "data.filter.filterEditor.dateFormatHelpLinkLabel": "已接受日期格式",
"data.filter.filterEditor.doesNotExistOperatorOptionLabel": "不存在",
"data.filter.filterEditor.editFilterPopupTitle": "编辑筛选",
"data.filter.filterEditor.editFilterValuesButtonLabel": "编辑筛选值",
@@ -7477,7 +7476,6 @@
"xpack.infra.logs.analysis.anomaliesSectionLineSeriesName": "每 15 分钟日志条目数(平均值)",
"xpack.infra.logs.analysis.anomaliesSectionLoadingAriaLabel": "正在加载异常",
"xpack.infra.logs.analysis.anomaliesSectionTitle": "异常",
- "xpack.infra.logs.analysis.anomalySectionNoAnomaliesTitle": "未检测到任何异常。",
"xpack.infra.logs.analysis.anomalySectionNoDataBody": "您可能想调整时间范围。",
"xpack.infra.logs.analysis.anomalySectionNoDataTitle": "没有可显示的数据。",
"xpack.infra.logs.analysis.jobConfigurationOutdatedCalloutMessage": "创建 ML 作业时所使用的源配置不同。重新创建作业以应用当前配置。这将移除以前检测到的异常。",
@@ -7486,14 +7484,6 @@
"xpack.infra.logs.analysis.jobDefinitionOutdatedCalloutTitle": "ML 作业定义已过期",
"xpack.infra.logs.analysis.jobStoppedCalloutMessage": "ML 作业已手动停止或由于缺乏资源而停止。作业重新启动后,才会处理新的日志条目。",
"xpack.infra.logs.analysis.jobStoppedCalloutTitle": "ML 作业已停止",
- "xpack.infra.logs.analysis.logRateResultsToolbarText": "从 {startTime} 到 {endTime} 已分析 {numberOfLogs} 个日志条目",
- "xpack.infra.logs.analysis.logRateSectionBucketSpanLabel": "存储桶跨度: ",
- "xpack.infra.logs.analysis.logRateSectionBucketSpanValue": "15 分钟",
- "xpack.infra.logs.analysis.logRateSectionLineSeriesName": "每 15 分钟日志条目数(平均值)",
- "xpack.infra.logs.analysis.logRateSectionLoadingAriaLabel": "正在加载日志速率结果",
- "xpack.infra.logs.analysis.logRateSectionNoDataBody": "您可能想调整时间范围。",
- "xpack.infra.logs.analysis.logRateSectionNoDataTitle": "没有可显示的数据。",
- "xpack.infra.logs.analysis.logRateSectionTitle": "日志速率",
"xpack.infra.logs.analysis.missingMlResultsPrivilegesBody": "此功能使用 Machine Learning 作业,要访问这些作业的状态和结果,至少需要 {machineLearningUserRole} 角色。",
"xpack.infra.logs.analysis.missingMlResultsPrivilegesTitle": "需要额外的 Machine Learning 权限",
"xpack.infra.logs.analysis.missingMlSetupPrivilegesBody": "此功能使用 Machine Learning 作业,这需要 {machineLearningAdminRole} 角色才能设置。",
@@ -8047,7 +8037,6 @@
"xpack.ingestManager.agentDetails.viewAgentListTitle": "查看所有代理配置",
"xpack.ingestManager.agentEnrollment.cancelButtonLabel": "取消",
"xpack.ingestManager.agentEnrollment.continueButtonLabel": "继续",
- "xpack.ingestManager.agentEnrollment.downloadDescription": "在主机计算机上下载 Elastic 代理。可以从 Elastic 的{downloadLink}下载代理二进制文件及其验证签名。",
"xpack.ingestManager.agentEnrollment.downloadLink": "下载页面",
"xpack.ingestManager.agentEnrollment.fleetNotInitializedText": "注册代理前需要设置 Fleet。{link}",
"xpack.ingestManager.agentEnrollment.flyoutTitle": "注册新代理",
@@ -8178,7 +8167,6 @@
"xpack.ingestManager.createPackageConfig.agentConfigurationNameLabel": "配置",
"xpack.ingestManager.createPackageConfig.cancelButton": "取消",
"xpack.ingestManager.createPackageConfig.cancelLinkText": "取消",
- "xpack.ingestManager.createPackageConfig.packageNameLabel": "集成",
"xpack.ingestManager.createPackageConfig.pageDescriptionfromConfig": "按照下面的说明将集成添加此代理配置。",
"xpack.ingestManager.createPackageConfig.pageDescriptionfromPackage": "按照下面的说明将此集成添加代理配置。",
"xpack.ingestManager.createPackageConfig.pageTitle": "添加数据源",
@@ -8189,19 +8177,12 @@
"xpack.ingestManager.createPackageConfig.stepConfigure.packageConfigNameInputLabel": "数据源名称",
"xpack.ingestManager.createPackageConfig.stepConfigure.packageConfigNamespaceInputLabel": "命名空间",
"xpack.ingestManager.createPackageConfig.stepConfigure.hideStreamsAriaLabel": "隐藏 {type} 流",
- "xpack.ingestManager.createPackageConfig.stepConfigure.inputConfigErrorsTooltip": "解决配置错误",
- "xpack.ingestManager.createPackageConfig.stepConfigure.inputLevelErrorsTooltip": "解决配置错误",
"xpack.ingestManager.createPackageConfig.stepConfigure.inputSettingsDescription": "以下设置适用于所有流。",
"xpack.ingestManager.createPackageConfig.stepConfigure.inputSettingsTitle": "设置",
"xpack.ingestManager.createPackageConfig.stepConfigure.inputVarFieldOptionalLabel": "可选",
"xpack.ingestManager.createPackageConfig.stepConfigure.noConfigOptionsMessage": "没有可配置的内容",
"xpack.ingestManager.createPackageConfig.stepConfigure.showStreamsAriaLabel": "显示 {type} 流",
- "xpack.ingestManager.createPackageConfig.stepConfigure.streamLevelErrorsTooltip": "解决配置错误",
- "xpack.ingestManager.createPackageConfig.stepConfigure.streamsEnabledCountText": "{count} / {total, plural, one {# 个流} other {# 个流}}已启用",
"xpack.ingestManager.createPackageConfig.stepConfigure.toggleAdvancedOptionsButtonText": "高级选项",
- "xpack.ingestManager.createPackageConfig.stepConfigure.validationErrorText": "在继续之前请解决上述错误",
- "xpack.ingestManager.createPackageConfig.stepConfigure.validationErrorTitle": "您的数据源配置有错误",
- "xpack.ingestManager.createPackageConfig.stepDefinePackageConfigTitle": "定义您的数据源",
"xpack.ingestManager.createPackageConfig.stepSelectAgentConfigTitle": "选择代理配置",
"xpack.ingestManager.createPackageConfig.StepSelectConfig.agentConfigAgentsCountText": "{count, plural, one {# 个代理} other {# 个代理}}",
"xpack.ingestManager.createPackageConfig.StepSelectConfig.errorLoadingAgentConfigsTitle": "加载代理配置时出错",
@@ -8273,8 +8254,6 @@
"xpack.ingestManager.editPackageConfig.pageDescription": "按照下面的说明编辑此数据源。",
"xpack.ingestManager.editPackageConfig.pageTitle": "编辑数据源",
"xpack.ingestManager.editPackageConfig.saveButton": "保存数据源",
- "xpack.ingestManager.editPackageConfig.stepConfigurePackageConfigTitle": "选择要收集的数据",
- "xpack.ingestManager.editPackageConfig.stepDefinePackageConfigTitle": "定义您的数据源",
"xpack.ingestManager.editPackageConfig.updatedNotificationMessage": "Fleet 会将更新部署到所有使用配置“{agentConfigName}”的代理",
"xpack.ingestManager.editPackageConfig.updatedNotificationTitle": "已成功更新“{packageConfigName}”",
"xpack.ingestManager.enrollemntAPIKeyList.emptyMessage": "未找到任何注册令牌。",
@@ -15264,7 +15243,6 @@
"xpack.snapshotRestore.repositoryForm.typeReadonly.urlLabel": "路径(必填)",
"xpack.snapshotRestore.repositoryForm.typeReadonly.urlSchemeLabel": "方案",
"xpack.snapshotRestore.repositoryForm.typeReadonly.urlTitle": "URL",
- "xpack.snapshotRestore.repositoryForm.typeReadonly.urlWhitelistDescription": "必须在 {settingKey} 设置中注册此 URL。",
"xpack.snapshotRestore.repositoryForm.typeS3.basePathDescription": "存储库数据的存储桶路径。",
"xpack.snapshotRestore.repositoryForm.typeS3.basePathLabel": "基路径",
"xpack.snapshotRestore.repositoryForm.typeS3.basePathTitle": "基路径",
diff --git a/x-pack/test/api_integration/apis/management/index_management/component_templates.ts b/x-pack/test/api_integration/apis/management/index_management/component_templates.ts
index 1a00eaba35aa1..30ec95f208c80 100644
--- a/x-pack/test/api_integration/apis/management/index_management/component_templates.ts
+++ b/x-pack/test/api_integration/apis/management/index_management/component_templates.ts
@@ -78,6 +78,7 @@ export default function ({ getService }: FtrProviderContext) {
expect(testComponentTemplate).to.eql({
name: COMPONENT_NAME,
usedBy: [],
+ isManaged: false,
hasSettings: true,
hasMappings: true,
hasAliases: false,
@@ -96,6 +97,7 @@ export default function ({ getService }: FtrProviderContext) {
...COMPONENT,
_kbnMeta: {
usedBy: [],
+ isManaged: false,
},
});
});
@@ -148,6 +150,7 @@ export default function ({ getService }: FtrProviderContext) {
},
_kbnMeta: {
usedBy: [],
+ isManaged: false,
},
})
.expect(200);
@@ -167,6 +170,7 @@ export default function ({ getService }: FtrProviderContext) {
template: {},
_kbnMeta: {
usedBy: [],
+ isManaged: false,
},
})
.expect(200);
@@ -185,6 +189,7 @@ export default function ({ getService }: FtrProviderContext) {
template: {},
_kbnMeta: {
usedBy: [],
+ isManaged: false,
},
})
.expect(409);
@@ -246,6 +251,7 @@ export default function ({ getService }: FtrProviderContext) {
version: 1,
_kbnMeta: {
usedBy: [],
+ isManaged: false,
},
})
.expect(200);
@@ -267,6 +273,7 @@ export default function ({ getService }: FtrProviderContext) {
version: 1,
_kbnMeta: {
usedBy: [],
+ isManaged: false,
},
})
.expect(404);
diff --git a/x-pack/test/api_integration/apis/metrics_ui/log_entry_highlights.ts b/x-pack/test/api_integration/apis/metrics_ui/log_entry_highlights.ts
index 823c8159a136d..4e6da9d50dc2a 100644
--- a/x-pack/test/api_integration/apis/metrics_ui/log_entry_highlights.ts
+++ b/x-pack/test/api_integration/apis/metrics_ui/log_entry_highlights.ts
@@ -122,9 +122,7 @@ export default function ({ getService }: FtrProviderContext) {
});
});
- // Skipped since it behaves differently in master and in the 7.X branch
- // See https://github.com/elastic/kibana/issues/49959
- it.skip('highlights field columns', async () => {
+ it('highlights field columns', async () => {
const { body } = await supertest
.post(LOG_ENTRIES_HIGHLIGHTS_PATH)
.set(COMMON_HEADERS)
diff --git a/x-pack/test/api_integration/apis/ml/modules/get_module.ts b/x-pack/test/api_integration/apis/ml/modules/get_module.ts
index 5ca496a7a7fe9..cfb3c17ac7f21 100644
--- a/x-pack/test/api_integration/apis/ml/modules/get_module.ts
+++ b/x-pack/test/api_integration/apis/ml/modules/get_module.ts
@@ -25,6 +25,7 @@ const moduleIds = [
'sample_data_weblogs',
'siem_auditbeat',
'siem_auditbeat_auth',
+ 'siem_cloudtrail',
'siem_packetbeat',
'siem_winlogbeat',
'siem_winlogbeat_auth',
diff --git a/x-pack/test/functional/apps/infra/home_page.ts b/x-pack/test/functional/apps/infra/home_page.ts
index 28279d5e5b812..04f289b69bb71 100644
--- a/x-pack/test/functional/apps/infra/home_page.ts
+++ b/x-pack/test/functional/apps/infra/home_page.ts
@@ -4,15 +4,22 @@
* you may not use this file except in compliance with the Elastic License.
*/
+import moment from 'moment';
+import expect from '@kbn/expect/expect.js';
import { FtrProviderContext } from '../../ftr_provider_context';
import { DATES } from './constants';
const DATE_WITH_DATA = DATES.metricsAndLogs.hosts.withData;
const DATE_WITHOUT_DATA = DATES.metricsAndLogs.hosts.withoutData;
+const COMMON_REQUEST_HEADERS = {
+ 'kbn-xsrf': 'some-xsrf-token',
+};
+
export default ({ getPageObjects, getService }: FtrProviderContext) => {
const esArchiver = getService('esArchiver');
const pageObjects = getPageObjects(['common', 'infraHome']);
+ const supertest = getService('supertest');
describe('Home page', function () {
this.tags('includeFirefox');
@@ -46,6 +53,53 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => {
await pageObjects.infraHome.goToTime(DATE_WITHOUT_DATA);
await pageObjects.infraHome.getNoMetricsDataPrompt();
});
+
+ it('records telemetry for hosts', async () => {
+ await pageObjects.infraHome.goToTime(DATE_WITH_DATA);
+ await pageObjects.infraHome.getWaffleMap();
+
+ const resp = await supertest
+ .post(`/api/telemetry/v2/clusters/_stats`)
+ .set(COMMON_REQUEST_HEADERS)
+ .set('Accept', 'application/json')
+ .send({
+ timeRange: {
+ min: moment().subtract(1, 'hour').toISOString(),
+ max: moment().toISOString(),
+ },
+ unencrypted: true,
+ })
+ .expect(200)
+ .then((res: any) => res.body);
+
+ expect(
+ resp[0].stack_stats.kibana.plugins.infraops.last_24_hours.hits.infraops_hosts
+ ).to.be.greaterThan(0);
+ });
+
+ it('records telemetry for docker', async () => {
+ await pageObjects.infraHome.goToTime(DATE_WITH_DATA);
+ await pageObjects.infraHome.getWaffleMap();
+ await pageObjects.infraHome.goToDocker();
+
+ const resp = await supertest
+ .post(`/api/telemetry/v2/clusters/_stats`)
+ .set(COMMON_REQUEST_HEADERS)
+ .set('Accept', 'application/json')
+ .send({
+ timeRange: {
+ min: moment().subtract(1, 'hour').toISOString(),
+ max: moment().toISOString(),
+ },
+ unencrypted: true,
+ })
+ .expect(200)
+ .then((res: any) => res.body);
+
+ expect(
+ resp[0].stack_stats.kibana.plugins.infraops.last_24_hours.hits.infraops_docker
+ ).to.be.greaterThan(0);
+ });
});
});
};
diff --git a/x-pack/test/functional/apps/infra/logs_source_configuration.ts b/x-pack/test/functional/apps/infra/logs_source_configuration.ts
index 7ec06e74289c9..04ffcc4847d54 100644
--- a/x-pack/test/functional/apps/infra/logs_source_configuration.ts
+++ b/x-pack/test/functional/apps/infra/logs_source_configuration.ts
@@ -5,16 +5,22 @@
*/
import expect from '@kbn/expect';
+import moment from 'moment';
import { DATES } from './constants';
import { FtrProviderContext } from '../../ftr_provider_context';
+const COMMON_REQUEST_HEADERS = {
+ 'kbn-xsrf': 'some-xsrf-token',
+};
+
export default ({ getPageObjects, getService }: FtrProviderContext) => {
const esArchiver = getService('esArchiver');
const logsUi = getService('logsUi');
const infraSourceConfigurationForm = getService('infraSourceConfigurationForm');
const pageObjects = getPageObjects(['common', 'infraLogs']);
const retry = getService('retry');
+ const supertest = getService('supertest');
describe('Logs Source Configuration', function () {
before(async () => {
@@ -97,6 +103,35 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => {
expect(logStreamEntryColumns).to.have.length(3);
});
+ it('records telemetry for logs', async () => {
+ await logsUi.logStreamPage.navigateTo({
+ logPosition: {
+ start: DATES.metricsAndLogs.stream.startWithData,
+ end: DATES.metricsAndLogs.stream.endWithData,
+ },
+ });
+
+ await logsUi.logStreamPage.getStreamEntries();
+
+ const resp = await supertest
+ .post(`/api/telemetry/v2/clusters/_stats`)
+ .set(COMMON_REQUEST_HEADERS)
+ .set('Accept', 'application/json')
+ .send({
+ timeRange: {
+ min: moment().subtract(1, 'hour').toISOString(),
+ max: moment().toISOString(),
+ },
+ unencrypted: true,
+ })
+ .expect(200)
+ .then((res: any) => res.body);
+
+ expect(
+ resp[0].stack_stats.kibana.plugins.infraops.last_24_hours.hits.logs
+ ).to.be.greaterThan(0);
+ });
+
it('can change the log columns', async () => {
await pageObjects.infraLogs.navigateToTab('settings');
diff --git a/x-pack/test/functional/es_archives/endpoint/artifacts/api_feature/data.json b/x-pack/test/functional/es_archives/endpoint/artifacts/api_feature/data.json
index bd1010240f86c..ab476660e3ffc 100644
--- a/x-pack/test/functional/es_archives/endpoint/artifacts/api_feature/data.json
+++ b/x-pack/test/functional/es_archives/endpoint/artifacts/api_feature/data.json
@@ -1,12 +1,12 @@
{
"type": "doc",
"value": {
- "id": "endpoint:user-artifact:v2:endpoint-exceptionlist-linux-v1-d2a9c760005b08d43394e59a8701ae75c80881934ccf15a006944452b80f7f9f",
+ "id": "endpoint:user-artifact:endpoint-exceptionlist-linux-v1-d2a9c760005b08d43394e59a8701ae75c80881934ccf15a006944452b80f7f9f",
"index": ".kibana",
"source": {
"references": [
],
- "endpoint:user-artifact:v2": {
+ "endpoint:user-artifact": {
"body": "eJylkM8KwjAMxl9Fci59gN29iicvMqR02QjUbiSpKGPvbiw6ETwpuX1/fh9kBszKhALNcQa9TQgNCJ2nhOA+vJ4wdWaGqJSHPY8RRXxPCb3QkJEtP07IQUe2GOWYSoedqU8qXq16ikGqeAmpPNRtCqIU3WbnDx4WN38d/WvhQqmCXzDlIlojP9CsjLC0bqWtHwhaGN/1jHVkae3u+6N6Sg==",
"created": 1593016187465,
"compressionAlgorithm": "zlib",
@@ -17,7 +17,7 @@
"decodedSha256": "d2a9c760005b08d43394e59a8701ae75c80881934ccf15a006944452b80f7f9f",
"decodedSize": 358
},
- "type": "endpoint:user-artifact:v2",
+ "type": "endpoint:user-artifact",
"updated_at": "2020-06-24T16:29:47.584Z"
}
}
@@ -26,12 +26,12 @@
{
"type": "doc",
"value": {
- "id": "endpoint:user-artifact-manifest:v2:endpoint-manifest-v1",
+ "id": "endpoint:user-artifact-manifest:endpoint-manifest-v1",
"index": ".kibana",
"source": {
"references": [
],
- "endpoint:user-artifact-manifest:v2": {
+ "endpoint:user-artifact-manifest": {
"created": 1593183699663,
"ids": [
"endpoint-exceptionlist-linux-v1-d2a9c760005b08d43394e59a8701ae75c80881934ccf15a006944452b80f7f9f",
@@ -39,7 +39,7 @@
"endpoint-exceptionlist-windows-v1-d801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658"
]
},
- "type": "endpoint:user-artifact-manifest:v2",
+ "type": "endpoint:user-artifact-manifest",
"updated_at": "2020-06-26T15:01:39.704Z"
}
}
diff --git a/x-pack/test/functional/page_objects/infra_home_page.ts b/x-pack/test/functional/page_objects/infra_home_page.ts
index 51dad594f21f5..ef6d2dc02eb80 100644
--- a/x-pack/test/functional/page_objects/infra_home_page.ts
+++ b/x-pack/test/functional/page_objects/infra_home_page.ts
@@ -33,6 +33,29 @@ export function InfraHomePageProvider({ getService }: FtrProviderContext) {
return await testSubjects.find('waffleMap');
},
+ async openInvenotrySwitcher() {
+ await testSubjects.click('openInventorySwitcher');
+ return await testSubjects.find('goToHost');
+ },
+
+ async goToHost() {
+ await testSubjects.click('openInventorySwitcher');
+ await testSubjects.find('goToHost');
+ return await testSubjects.click('goToHost');
+ },
+
+ async goToPods() {
+ await testSubjects.click('openInventorySwitcher');
+ await testSubjects.find('goToHost');
+ return await testSubjects.click('goToPods');
+ },
+
+ async goToDocker() {
+ await testSubjects.click('openInventorySwitcher');
+ await testSubjects.find('goToHost');
+ return await testSubjects.click('goToDocker');
+ },
+
async goToMetricExplorer() {
return await testSubjects.click('infrastructureNavLink_/infrastructure/metrics-explorer');
},
diff --git a/x-pack/test/functional_embedded/tests/iframe_embedded.ts b/x-pack/test/functional_embedded/tests/iframe_embedded.ts
index 9b5c9894a9407..f05d70b6cb3e8 100644
--- a/x-pack/test/functional_embedded/tests/iframe_embedded.ts
+++ b/x-pack/test/functional_embedded/tests/iframe_embedded.ts
@@ -14,7 +14,8 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
const config = getService('config');
const testSubjects = getService('testSubjects');
- describe('in iframe', () => {
+ // Flaky: https://github.com/elastic/kibana/issues/70928
+ describe.skip('in iframe', () => {
it('should open Kibana for logged-in user', async () => {
const isChromeHiddenBefore = await PageObjects.common.isChromeHidden();
expect(isChromeHiddenBefore).to.be(true);
diff --git a/x-pack/test/ingest_manager_api_integration/apis/index.js b/x-pack/test/ingest_manager_api_integration/apis/index.js
index 30c49140c6e2a..81848917f9b05 100644
--- a/x-pack/test/ingest_manager_api_integration/apis/index.js
+++ b/x-pack/test/ingest_manager_api_integration/apis/index.js
@@ -17,5 +17,6 @@ export default function ({ loadTestFile }) {
// Package configs
loadTestFile(require.resolve('./package_config/create'));
+ loadTestFile(require.resolve('./package_config/update'));
});
}
diff --git a/x-pack/test/ingest_manager_api_integration/apis/package_config/create.ts b/x-pack/test/ingest_manager_api_integration/apis/package_config/create.ts
index c7748ab255f43..cae4ff79bdef6 100644
--- a/x-pack/test/ingest_manager_api_integration/apis/package_config/create.ts
+++ b/x-pack/test/ingest_manager_api_integration/apis/package_config/create.ts
@@ -126,5 +126,48 @@ export default function ({ getService }: FtrProviderContext) {
warnAndSkipTest(this, log);
}
});
+
+ it('should return a 500 if there is another package config with the same name', async function () {
+ if (server.enabled) {
+ await supertest
+ .post(`/api/ingest_manager/package_configs`)
+ .set('kbn-xsrf', 'xxxx')
+ .send({
+ name: 'same-name-test-1',
+ description: '',
+ namespace: 'default',
+ config_id: agentConfigId,
+ enabled: true,
+ output_id: '',
+ inputs: [],
+ package: {
+ name: 'filetest',
+ title: 'For File Tests',
+ version: '0.1.0',
+ },
+ })
+ .expect(200);
+ await supertest
+ .post(`/api/ingest_manager/package_configs`)
+ .set('kbn-xsrf', 'xxxx')
+ .send({
+ name: 'same-name-test-1',
+ description: '',
+ namespace: 'default',
+ config_id: agentConfigId,
+ enabled: true,
+ output_id: '',
+ inputs: [],
+ package: {
+ name: 'filetest',
+ title: 'For File Tests',
+ version: '0.1.0',
+ },
+ })
+ .expect(500);
+ } else {
+ warnAndSkipTest(this, log);
+ }
+ });
});
}
diff --git a/x-pack/test/ingest_manager_api_integration/apis/package_config/update.ts b/x-pack/test/ingest_manager_api_integration/apis/package_config/update.ts
new file mode 100644
index 0000000000000..0251fef5f767c
--- /dev/null
+++ b/x-pack/test/ingest_manager_api_integration/apis/package_config/update.ts
@@ -0,0 +1,127 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import expect from '@kbn/expect';
+import { FtrProviderContext } from '../../../api_integration/ftr_provider_context';
+import { warnAndSkipTest } from '../../helpers';
+
+export default function ({ getService }: FtrProviderContext) {
+ const log = getService('log');
+ const supertest = getService('supertest');
+ const dockerServers = getService('dockerServers');
+
+ const server = dockerServers.get('registry');
+ // use function () {} and not () => {} here
+ // because `this` has to point to the Mocha context
+ // see https://mochajs.org/#arrow-functions
+
+ describe('Package Config - update', async function () {
+ let agentConfigId: string;
+ let packageConfigId: string;
+ let packageConfigId2: string;
+
+ before(async function () {
+ const { body: agentConfigResponse } = await supertest
+ .post(`/api/ingest_manager/agent_configs`)
+ .set('kbn-xsrf', 'xxxx')
+ .send({
+ name: 'Test config',
+ namespace: 'default',
+ });
+ agentConfigId = agentConfigResponse.item.id;
+
+ const { body: packageConfigResponse } = await supertest
+ .post(`/api/ingest_manager/package_configs`)
+ .set('kbn-xsrf', 'xxxx')
+ .send({
+ name: 'filetest-1',
+ description: '',
+ namespace: 'default',
+ config_id: agentConfigId,
+ enabled: true,
+ output_id: '',
+ inputs: [],
+ package: {
+ name: 'filetest',
+ title: 'For File Tests',
+ version: '0.1.0',
+ },
+ });
+ packageConfigId = packageConfigResponse.item.id;
+
+ const { body: packageConfigResponse2 } = await supertest
+ .post(`/api/ingest_manager/package_configs`)
+ .set('kbn-xsrf', 'xxxx')
+ .send({
+ name: 'filetest-2',
+ description: '',
+ namespace: 'default',
+ config_id: agentConfigId,
+ enabled: true,
+ output_id: '',
+ inputs: [],
+ package: {
+ name: 'filetest',
+ title: 'For File Tests',
+ version: '0.1.0',
+ },
+ });
+ packageConfigId2 = packageConfigResponse2.item.id;
+ });
+
+ it('should work with valid values', async function () {
+ if (server.enabled) {
+ const { body: apiResponse } = await supertest
+ .put(`/api/ingest_manager/package_configs/${packageConfigId}`)
+ .set('kbn-xsrf', 'xxxx')
+ .send({
+ name: 'filetest-1',
+ description: '',
+ namespace: 'updated_namespace',
+ config_id: agentConfigId,
+ enabled: true,
+ output_id: '',
+ inputs: [],
+ package: {
+ name: 'filetest',
+ title: 'For File Tests',
+ version: '0.1.0',
+ },
+ })
+ .expect(200);
+
+ expect(apiResponse.success).to.be(true);
+ } else {
+ warnAndSkipTest(this, log);
+ }
+ });
+
+ it('should return a 500 if there is another package config with the same name', async function () {
+ if (server.enabled) {
+ await supertest
+ .put(`/api/ingest_manager/package_configs/${packageConfigId2}`)
+ .set('kbn-xsrf', 'xxxx')
+ .send({
+ name: 'filetest-1',
+ description: '',
+ namespace: 'updated_namespace',
+ config_id: agentConfigId,
+ enabled: true,
+ output_id: '',
+ inputs: [],
+ package: {
+ name: 'filetest',
+ title: 'For File Tests',
+ version: '0.1.0',
+ },
+ })
+ .expect(500);
+ } else {
+ warnAndSkipTest(this, log);
+ }
+ });
+ });
+}
diff --git a/yarn.lock b/yarn.lock
index 153f4e89fe969..290713d32d333 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -8512,16 +8512,16 @@ backo2@1.0.2:
resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947"
integrity sha1-MasayLEpNjRj41s+u2n038+6eUc=
-backport@5.4.6:
- version "5.4.6"
- resolved "https://registry.yarnpkg.com/backport/-/backport-5.4.6.tgz#8d8d8cb7c0df4079a40c6f4892f393daa92c1ef8"
- integrity sha512-O3fFmQXKZN5sP6R6GwXeobsEgoFzvnuTGj8/TTTjxt1xA07pfhTY67M16rr0eiDDtuSxAqWMX9Zo+5Q3DuxfpQ==
+backport@5.5.1:
+ version "5.5.1"
+ resolved "https://registry.yarnpkg.com/backport/-/backport-5.5.1.tgz#2eeddbdc4cfc0530119bdb2b0c3c30bc7ef574dd"
+ integrity sha512-vQuGrxxMx9H64ywqsIYUHL8+/xvPeP0nnBa0YQt5S+XqW7etaqOoa5dFW0c77ADdqjfLlGUIvtc2i6UrmqeFUQ==
dependencies:
axios "^0.19.2"
dedent "^0.7.0"
del "^5.1.0"
find-up "^4.1.0"
- inquirer "^7.2.0"
+ inquirer "^7.3.1"
lodash.flatmap "^4.5.0"
lodash.isempty "^4.4.0"
lodash.isstring "^4.0.1"
@@ -8531,7 +8531,7 @@ backport@5.4.6:
safe-json-stringify "^1.2.0"
strip-json-comments "^3.1.0"
winston "^3.3.3"
- yargs "^15.3.1"
+ yargs "^15.4.0"
bail@^1.0.0:
version "1.0.2"
@@ -9710,6 +9710,14 @@ chalk@^3.0.0:
ansi-styles "^4.1.0"
supports-color "^7.1.0"
+chalk@^4.1.0:
+ version "4.1.0"
+ resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.0.tgz#4e14870a618d9e2edd97dd8345fd9d9dc315646a"
+ integrity sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==
+ dependencies:
+ ansi-styles "^4.1.0"
+ supports-color "^7.1.0"
+
chalk@~0.4.0:
version "0.4.0"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-0.4.0.tgz#5199a3ddcd0c1efe23bc08c1b027b06176e0c64f"
@@ -10163,6 +10171,11 @@ cli-width@^2.0.0:
resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.0.tgz#ff19ede8a9a5e579324147b0c11f0fbcbabed639"
integrity sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk=
+cli-width@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6"
+ integrity sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==
+
clipboard@^2.0.0:
version "2.0.4"
resolved "https://registry.yarnpkg.com/clipboard/-/clipboard-2.0.4.tgz#836dafd66cf0fea5d71ce5d5b0bf6e958009112d"
@@ -17870,21 +17883,21 @@ inquirer@^7.0.0:
strip-ansi "^5.1.0"
through "^2.3.6"
-inquirer@^7.2.0:
- version "7.2.0"
- resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-7.2.0.tgz#63ce99d823090de7eb420e4bb05e6f3449aa389a"
- integrity sha512-E0c4rPwr9ByePfNlTIB8z51kK1s2n6jrHuJeEHENl/sbq2G/S1auvibgEwNR4uSyiU+PiYHqSwsgGiXjG8p5ZQ==
+inquirer@^7.3.1:
+ version "7.3.1"
+ resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-7.3.1.tgz#ac6aba1abdfdd5ad34e7069370411edba17f6439"
+ integrity sha512-/+vOpHQHhoh90Znev8BXiuw1TDQ7IDxWsQnFafUEoK5+4uN5Eoz1p+3GqOj/NtzEi9VzWKQcV9Bm+i8moxedsA==
dependencies:
ansi-escapes "^4.2.1"
- chalk "^3.0.0"
+ chalk "^4.1.0"
cli-cursor "^3.1.0"
- cli-width "^2.0.0"
+ cli-width "^3.0.0"
external-editor "^3.0.3"
figures "^3.0.0"
- lodash "^4.17.15"
+ lodash "^4.17.16"
mute-stream "0.0.8"
run-async "^2.4.0"
- rxjs "^6.5.3"
+ rxjs "^6.6.0"
string-width "^4.1.0"
strip-ansi "^6.0.0"
through "^2.3.6"
@@ -20903,7 +20916,7 @@ lodash.uniq@^4.5.0:
resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=
-lodash@4.17.11, lodash@4.17.15, lodash@>4.17.4, lodash@^4, lodash@^4.0.0, lodash@^4.0.1, lodash@^4.10.0, lodash@^4.11.1, lodash@^4.14.0, lodash@^4.15.0, lodash@^4.15.19, lodash@^4.17.0, lodash@^4.17.10, lodash@^4.17.11, lodash@^4.17.12, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.2, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.2.1, lodash@^4.3.0, lodash@^4.6.1, lodash@~4.17.10, lodash@~4.17.15, lodash@~4.17.5:
+lodash@4.17.11, lodash@4.17.15, lodash@>4.17.4, lodash@^4, lodash@^4.0.0, lodash@^4.0.1, lodash@^4.10.0, lodash@^4.11.1, lodash@^4.14.0, lodash@^4.15.0, lodash@^4.15.19, lodash@^4.17.0, lodash@^4.17.10, lodash@^4.17.11, lodash@^4.17.12, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.16, lodash@^4.17.2, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.2.1, lodash@^4.3.0, lodash@^4.6.1, lodash@~4.17.10, lodash@~4.17.15, lodash@~4.17.5:
version "4.17.19"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.19.tgz#e48ddedbe30b3321783c5b4301fbd353bc1e4a4b"
integrity sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==
@@ -26723,11 +26736,6 @@ regjsparser@^0.6.4:
dependencies:
jsesc "~0.5.0"
-regression@2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/regression/-/regression-2.0.1.tgz#8d29c3e8224a10850c35e337e85a8b2fac3b0c87"
- integrity sha1-jSnD6CJKEIUMNeM36FqLL6w7DIc=
-
rehype-parse@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/rehype-parse/-/rehype-parse-6.0.0.tgz#f681555f2598165bee2c778b39f9073d17b16bca"
@@ -27539,7 +27547,7 @@ rxjs-marbles@^5.0.6:
dependencies:
fast-equals "^2.0.0"
-rxjs@6.5.5, rxjs@^6.5.3, rxjs@^6.5.5:
+rxjs@6.5.5, rxjs@^6.5.5:
version "6.5.5"
resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.5.5.tgz#c5c884e3094c8cfee31bf27eb87e54ccfc87f9ec"
integrity sha512-WfQI+1gohdf0Dai/Bbmk5L5ItH5tYqm3ki2c5GdWhKjalzjg93N3avFjVStyZZz+A2Em+ZxKH5bNghw9UeylGQ==
@@ -27560,6 +27568,13 @@ rxjs@^6.1.0, rxjs@^6.3.3, rxjs@^6.4.0, rxjs@^6.5.1:
dependencies:
tslib "^1.9.0"
+rxjs@^6.6.0:
+ version "6.6.0"
+ resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.0.tgz#af2901eedf02e3a83ffa7f886240ff9018bbec84"
+ integrity sha512-3HMA8z/Oz61DUHe+SdOiQyzIf4tOx5oQHmMir7IZEu6TMqCLHT4LRcmNaUS0NwOz8VLvmmBduMsoaUvMaIiqzg==
+ dependencies:
+ tslib "^1.9.0"
+
safe-buffer@5.1.1:
version "5.1.1"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853"
@@ -33416,7 +33431,7 @@ yargs-parser@^11.1.1:
camelcase "^5.0.0"
decamelize "^1.2.0"
-yargs-parser@^18.1.1:
+yargs-parser@^18.1.1, yargs-parser@^18.1.2:
version "18.1.3"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0"
integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==
@@ -33529,6 +33544,23 @@ yargs@^15.0.2, yargs@^15.1.0, yargs@^15.3.1, yargs@~15.3.1:
y18n "^4.0.0"
yargs-parser "^18.1.1"
+yargs@^15.4.0:
+ version "15.4.1"
+ resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8"
+ integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==
+ dependencies:
+ cliui "^6.0.0"
+ decamelize "^1.2.0"
+ find-up "^4.1.0"
+ get-caller-file "^2.0.1"
+ require-directory "^2.1.1"
+ require-main-filename "^2.0.0"
+ set-blocking "^2.0.0"
+ string-width "^4.2.0"
+ which-module "^2.0.0"
+ y18n "^4.0.0"
+ yargs-parser "^18.1.2"
+
yargs@^3.15.0:
version "3.32.0"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.32.0.tgz#03088e9ebf9e756b69751611d2a5ef591482c995"