From 4c58018d337330348b858d32cc01834e3e722c43 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Fri, 17 Jul 2020 16:27:25 -0400 Subject: [PATCH 1/9] skip flaky suite (#72339) --- .../cypress/integration/timeline_local_storage.spec.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/plugins/security_solution/cypress/integration/timeline_local_storage.spec.ts b/x-pack/plugins/security_solution/cypress/integration/timeline_local_storage.spec.ts index 383ebe2220585..7c047459c56cc 100644 --- a/x-pack/plugins/security_solution/cypress/integration/timeline_local_storage.spec.ts +++ b/x-pack/plugins/security_solution/cypress/integration/timeline_local_storage.spec.ts @@ -13,7 +13,8 @@ import { TABLE_COLUMN_EVENTS_MESSAGE } from '../screens/hosts/external_events'; import { waitsForEventsToBeLoaded, openEventsViewerFieldsBrowser } from '../tasks/hosts/events'; import { removeColumn, resetFields } from '../tasks/timeline'; -describe('persistent timeline', () => { +// Failing: See https://github.com/elastic/kibana/issues/72339 +describe.skip('persistent timeline', () => { before(() => { loginAndWaitForPage(HOSTS_URL); openEvents(); From 5356941f22ccfb358f4cffab6a20bfb5a1bc9cff Mon Sep 17 00:00:00 2001 From: Madison Caldwell Date: Fri, 17 Jul 2020 16:51:28 -0400 Subject: [PATCH 2/9] [Security Solution][Endpoint][Exceptions] Only write manifest to policy when there are changes (#72000) * Refactor security_solution policy creation callback - part 1 * Fix manifest dispatch * Change how dispatches are performed * simplify manifest types * Remove unused mock * Fix tests * one place to construct artifact ids * fixing linter exceptions * Add tests for stable hashes * Additional testing and type cleanup * Remove unnecessary log * Minor fixup * jsdoc * type fixup * Additional type adjustments --- x-pack/plugins/ingest_manager/common/mocks.ts | 87 ++++++ .../common/endpoint/schema/common.ts | 5 + .../common/endpoint/schema/manifest.ts | 46 ++- .../endpoint/ingest_integration.test.ts | 105 ++++--- .../server/endpoint/ingest_integration.ts | 156 +++++----- .../server/endpoint/lib/artifacts/common.ts | 16 +- .../endpoint/lib/artifacts/lists.test.ts | 94 +++++- .../server/endpoint/lib/artifacts/lists.ts | 33 +- .../endpoint/lib/artifacts/manifest.test.ts | 158 +++++----- .../server/endpoint/lib/artifacts/manifest.ts | 97 +++++- .../lib/artifacts/manifest_entry.test.ts | 28 +- .../endpoint/lib/artifacts/manifest_entry.ts | 3 +- .../server/endpoint/lib/artifacts/mocks.ts | 68 +++++ .../server/endpoint/lib/artifacts/task.ts | 84 +++-- .../server/endpoint/mocks.ts | 25 +- .../artifacts/download_exception_list.ts | 6 +- .../endpoint/schemas/artifacts/lists.mock.ts | 4 +- .../schemas/artifacts/saved_objects.mock.ts | 54 ++-- .../schemas/artifacts/saved_objects.ts | 41 ++- .../artifacts/artifact_client.test.ts | 9 +- .../services/artifacts/artifact_client.ts | 28 +- .../artifacts/manifest_client.test.ts | 5 +- .../services/artifacts/manifest_client.ts | 9 +- .../manifest_manager/manifest_manager.mock.ts | 81 ++--- .../manifest_manager/manifest_manager.test.ts | 209 +++++++++---- .../manifest_manager/manifest_manager.ts | 286 ++++++++---------- 26 files changed, 1154 insertions(+), 583 deletions(-) create mode 100644 x-pack/plugins/security_solution/server/endpoint/lib/artifacts/mocks.ts diff --git a/x-pack/plugins/ingest_manager/common/mocks.ts b/x-pack/plugins/ingest_manager/common/mocks.ts index e85364f2bb672..236324b11c580 100644 --- a/x-pack/plugins/ingest_manager/common/mocks.ts +++ b/x-pack/plugins/ingest_manager/common/mocks.ts @@ -44,3 +44,90 @@ export const createPackageConfigMock = (): PackageConfig => { ], }; }; + +export const createPackageConfigWithInitialManifestMock = (): PackageConfig => { + const packageConfig = createPackageConfigMock(); + packageConfig.inputs[0].config!.artifact_manifest = { + value: { + artifacts: { + 'endpoint-exceptionlist-linux-v1': { + compression_algorithm: 'zlib', + encryption_algorithm: 'none', + decoded_sha256: 'd801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658', + encoded_sha256: 'f8e6afa1d5662f5b37f83337af774b5785b5b7f1daee08b7b00c2d6813874cda', + decoded_size: 14, + encoded_size: 22, + relative_url: + '/api/endpoint/artifacts/download/endpoint-exceptionlist-linux-v1/d801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658', + }, + 'endpoint-exceptionlist-macos-v1': { + compression_algorithm: 'zlib', + encryption_algorithm: 'none', + decoded_sha256: 'd801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658', + encoded_sha256: 'f8e6afa1d5662f5b37f83337af774b5785b5b7f1daee08b7b00c2d6813874cda', + decoded_size: 14, + encoded_size: 22, + relative_url: + '/api/endpoint/artifacts/download/endpoint-exceptionlist-macos-v1/d801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658', + }, + 'endpoint-exceptionlist-windows-v1': { + compression_algorithm: 'zlib', + encryption_algorithm: 'none', + decoded_sha256: 'd801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658', + encoded_sha256: 'f8e6afa1d5662f5b37f83337af774b5785b5b7f1daee08b7b00c2d6813874cda', + decoded_size: 14, + encoded_size: 22, + relative_url: + '/api/endpoint/artifacts/download/endpoint-exceptionlist-windows-v1/d801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658', + }, + }, + manifest_version: 'a9b7ef358a363f327f479e31efc4f228b2277a7fb4d1914ca9b4e7ca9ffcf537', + schema_version: 'v1', + }, + }; + return packageConfig; +}; + +export const createPackageConfigWithManifestMock = (): PackageConfig => { + const packageConfig = createPackageConfigMock(); + packageConfig.inputs[0].config!.artifact_manifest = { + value: { + artifacts: { + 'endpoint-exceptionlist-linux-v1': { + compression_algorithm: 'zlib', + encryption_algorithm: 'none', + decoded_sha256: '0a5a2013a79f9e60682472284a1be45ab1ff68b9b43426d00d665016612c15c8', + encoded_sha256: '57941169bb2c5416f9bd7224776c8462cb9a2be0fe8b87e6213e77a1d29be824', + decoded_size: 292, + encoded_size: 131, + relative_url: + '/api/endpoint/artifacts/download/endpoint-exceptionlist-linux-v1/0a5a2013a79f9e60682472284a1be45ab1ff68b9b43426d00d665016612c15c8', + }, + 'endpoint-exceptionlist-macos-v1': { + compression_algorithm: 'zlib', + encryption_algorithm: 'none', + decoded_sha256: '96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', + encoded_sha256: '975382ab55d019cbab0bbac207a54e2a7d489fad6e8f6de34fc6402e5ef37b1e', + decoded_size: 432, + encoded_size: 147, + relative_url: + '/api/endpoint/artifacts/download/endpoint-exceptionlist-macos-v1/96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', + }, + 'endpoint-exceptionlist-windows-v1': { + compression_algorithm: 'zlib', + encryption_algorithm: 'none', + decoded_sha256: '96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', + encoded_sha256: '975382ab55d019cbab0bbac207a54e2a7d489fad6e8f6de34fc6402e5ef37b1e', + decoded_size: 432, + encoded_size: 147, + relative_url: + '/api/endpoint/artifacts/download/endpoint-exceptionlist-windows-v1/96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', + }, + }, + manifest_version: '520f6cf88b3f36a065c6ca81058d5f8690aadadf6fe857f8dec4cc37589e7283', + schema_version: 'v1', + }, + }; + + return packageConfig; +}; diff --git a/x-pack/plugins/security_solution/common/endpoint/schema/common.ts b/x-pack/plugins/security_solution/common/endpoint/schema/common.ts index 014673ebe6398..8f2ea1f8a6452 100644 --- a/x-pack/plugins/security_solution/common/endpoint/schema/common.ts +++ b/x-pack/plugins/security_solution/common/endpoint/schema/common.ts @@ -12,6 +12,11 @@ export const compressionAlgorithm = t.keyof({ }); export type CompressionAlgorithm = t.TypeOf; +export const compressionAlgorithmDispatch = t.keyof({ + zlib: null, +}); +export type CompressionAlgorithmDispatch = t.TypeOf; + export const encryptionAlgorithm = t.keyof({ none: null, }); diff --git a/x-pack/plugins/security_solution/common/endpoint/schema/manifest.ts b/x-pack/plugins/security_solution/common/endpoint/schema/manifest.ts index 1c8916dfdd5bb..f8bb8b70f2d5b 100644 --- a/x-pack/plugins/security_solution/common/endpoint/schema/manifest.ts +++ b/x-pack/plugins/security_solution/common/endpoint/schema/manifest.ts @@ -7,6 +7,7 @@ import * as t from 'io-ts'; import { compressionAlgorithm, + compressionAlgorithmDispatch, encryptionAlgorithm, identifier, manifestSchemaVersion, @@ -16,25 +17,60 @@ import { size, } from './common'; -export const manifestEntrySchema = t.exact( +export const manifestEntryBaseSchema = t.exact( t.type({ relative_url: relativeUrl, decoded_sha256: sha256, decoded_size: size, encoded_sha256: sha256, encoded_size: size, - compression_algorithm: compressionAlgorithm, encryption_algorithm: encryptionAlgorithm, }) ); -export const manifestSchema = t.exact( +export const manifestEntrySchema = t.intersection([ + manifestEntryBaseSchema, + t.exact( + t.type({ + compression_algorithm: compressionAlgorithm, + }) + ), +]); +export type ManifestEntrySchema = t.TypeOf; + +export const manifestEntryDispatchSchema = t.intersection([ + manifestEntryBaseSchema, + t.exact( + t.type({ + compression_algorithm: compressionAlgorithmDispatch, + }) + ), +]); +export type ManifestEntryDispatchSchema = t.TypeOf; + +export const manifestBaseSchema = t.exact( t.type({ manifest_version: manifestVersion, schema_version: manifestSchemaVersion, - artifacts: t.record(identifier, manifestEntrySchema), }) ); -export type ManifestEntrySchema = t.TypeOf; +export const manifestSchema = t.intersection([ + manifestBaseSchema, + t.exact( + t.type({ + artifacts: t.record(identifier, manifestEntrySchema), + }) + ), +]); export type ManifestSchema = t.TypeOf; + +export const manifestDispatchSchema = t.intersection([ + manifestBaseSchema, + t.exact( + t.type({ + artifacts: t.record(identifier, manifestEntryDispatchSchema), + }) + ), +]); +export type ManifestDispatchSchema = t.TypeOf; diff --git a/x-pack/plugins/security_solution/server/endpoint/ingest_integration.test.ts b/x-pack/plugins/security_solution/server/endpoint/ingest_integration.test.ts index bb035a19f33d6..be749b2ebd25a 100644 --- a/x-pack/plugins/security_solution/server/endpoint/ingest_integration.test.ts +++ b/x-pack/plugins/security_solution/server/endpoint/ingest_integration.test.ts @@ -4,87 +4,122 @@ * you may not use this file except in compliance with the Elastic License. */ -// eslint-disable-next-line @kbn/eslint/no-restricted-paths -import { loggerMock } from 'src/core/server/logging/logger.mock'; +import { loggingSystemMock } from 'src/core/server/mocks'; import { createNewPackageConfigMock } from '../../../ingest_manager/common/mocks'; import { factory as policyConfigFactory } from '../../common/endpoint/models/policy_config'; -import { getManifestManagerMock } from './services/artifacts/manifest_manager/manifest_manager.mock'; +import { + getManifestManagerMock, + ManifestManagerMockType, +} from './services/artifacts/manifest_manager/manifest_manager.mock'; import { getPackageConfigCreateCallback } from './ingest_integration'; +import { ManifestConstants } from './lib/artifacts'; describe('ingest_integration tests ', () => { describe('ingest_integration sanity checks', () => { - test('policy is updated with manifest', async () => { - const logger = loggerMock.create(); - const manifestManager = getManifestManagerMock(); + test('policy is updated with initial manifest', async () => { + const logger = loggingSystemMock.create().get('ingest_integration.test'); + const manifestManager = getManifestManagerMock({ + mockType: ManifestManagerMockType.InitialSystemState, + }); + const callback = getPackageConfigCreateCallback(logger, manifestManager); - const policyConfig = createNewPackageConfigMock(); - const newPolicyConfig = await callback(policyConfig); + const policyConfig = createNewPackageConfigMock(); // policy config without manifest + const newPolicyConfig = await callback(policyConfig); // policy config WITH manifest + expect(newPolicyConfig.inputs[0]!.type).toEqual('endpoint'); expect(newPolicyConfig.inputs[0]!.config!.policy.value).toEqual(policyConfigFactory()); expect(newPolicyConfig.inputs[0]!.config!.artifact_manifest.value).toEqual({ artifacts: { 'endpoint-exceptionlist-linux-v1': { compression_algorithm: 'zlib', - decoded_sha256: '1a8295e6ccb93022c6f5ceb8997b29f2912389b3b38f52a8f5a2ff7b0154b1bc', - decoded_size: 287, - encoded_sha256: 'c3dec543df1177561ab2aa74a37997ea3c1d748d532a597884f5a5c16670d56c', - encoded_size: 133, + decoded_sha256: 'd801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658', + decoded_size: 14, + encoded_sha256: 'f8e6afa1d5662f5b37f83337af774b5785b5b7f1daee08b7b00c2d6813874cda', + encoded_size: 22, + encryption_algorithm: 'none', + relative_url: + '/api/endpoint/artifacts/download/endpoint-exceptionlist-linux-v1/d801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658', + }, + 'endpoint-exceptionlist-macos-v1': { + compression_algorithm: 'zlib', + decoded_sha256: 'd801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658', + decoded_size: 14, + encoded_sha256: 'f8e6afa1d5662f5b37f83337af774b5785b5b7f1daee08b7b00c2d6813874cda', + encoded_size: 22, + encryption_algorithm: 'none', + relative_url: + '/api/endpoint/artifacts/download/endpoint-exceptionlist-macos-v1/d801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658', + }, + 'endpoint-exceptionlist-windows-v1': { + compression_algorithm: 'zlib', + decoded_sha256: 'd801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658', + decoded_size: 14, + encoded_sha256: 'f8e6afa1d5662f5b37f83337af774b5785b5b7f1daee08b7b00c2d6813874cda', + encoded_size: 22, encryption_algorithm: 'none', relative_url: - '/api/endpoint/artifacts/download/endpoint-exceptionlist-linux-v1/1a8295e6ccb93022c6f5ceb8997b29f2912389b3b38f52a8f5a2ff7b0154b1bc', + '/api/endpoint/artifacts/download/endpoint-exceptionlist-windows-v1/d801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658', }, }, - manifest_version: 'WzAsMF0=', + manifest_version: 'a9b7ef358a363f327f479e31efc4f228b2277a7fb4d1914ca9b4e7ca9ffcf537', schema_version: 'v1', }); }); - test('policy is returned even if error is encountered during artifact sync', async () => { - const logger = loggerMock.create(); + test('policy is returned even if error is encountered during artifact creation', async () => { + const logger = loggingSystemMock.create().get('ingest_integration.test'); const manifestManager = getManifestManagerMock(); - manifestManager.syncArtifacts = jest.fn().mockRejectedValue([new Error('error updating')]); - const lastDispatched = await manifestManager.getLastDispatchedManifest(); + manifestManager.pushArtifacts = jest.fn().mockResolvedValue([new Error('error updating')]); + const lastComputed = await manifestManager.getLastComputedManifest( + ManifestConstants.SCHEMA_VERSION + ); + const callback = getPackageConfigCreateCallback(logger, manifestManager); const policyConfig = createNewPackageConfigMock(); const newPolicyConfig = await callback(policyConfig); + expect(newPolicyConfig.inputs[0]!.type).toEqual('endpoint'); expect(newPolicyConfig.inputs[0]!.config!.policy.value).toEqual(policyConfigFactory()); expect(newPolicyConfig.inputs[0]!.config!.artifact_manifest.value).toEqual( - lastDispatched.toEndpointFormat() + lastComputed!.toEndpointFormat() ); }); - test('initial policy creation succeeds if snapshot retrieval fails', async () => { - const logger = loggerMock.create(); - const manifestManager = getManifestManagerMock(); - const lastDispatched = await manifestManager.getLastDispatchedManifest(); - manifestManager.getSnapshot = jest.fn().mockResolvedValue(null); + test('initial policy creation succeeds if manifest retrieval fails', async () => { + const logger = loggingSystemMock.create().get('ingest_integration.test'); + const manifestManager = getManifestManagerMock({ + mockType: ManifestManagerMockType.InitialSystemState, + }); + const lastComputed = await manifestManager.getLastComputedManifest( + ManifestConstants.SCHEMA_VERSION + ); + expect(lastComputed).toEqual(null); + + manifestManager.buildNewManifest = jest.fn().mockRejectedValue(new Error('abcd')); const callback = getPackageConfigCreateCallback(logger, manifestManager); const policyConfig = createNewPackageConfigMock(); const newPolicyConfig = await callback(policyConfig); + expect(newPolicyConfig.inputs[0]!.type).toEqual('endpoint'); expect(newPolicyConfig.inputs[0]!.config!.policy.value).toEqual(policyConfigFactory()); - expect(newPolicyConfig.inputs[0]!.config!.artifact_manifest.value).toEqual( - lastDispatched.toEndpointFormat() - ); }); test('subsequent policy creations succeed', async () => { - const logger = loggerMock.create(); + const logger = loggingSystemMock.create().get('ingest_integration.test'); const manifestManager = getManifestManagerMock(); - const snapshot = await manifestManager.getSnapshot(); - manifestManager.getLastDispatchedManifest = jest.fn().mockResolvedValue(snapshot!.manifest); - manifestManager.getSnapshot = jest.fn().mockResolvedValue({ - manifest: snapshot!.manifest, - diffs: [], - }); + const lastComputed = await manifestManager.getLastComputedManifest( + ManifestConstants.SCHEMA_VERSION + ); + + manifestManager.buildNewManifest = jest.fn().mockResolvedValue(lastComputed); // no diffs const callback = getPackageConfigCreateCallback(logger, manifestManager); const policyConfig = createNewPackageConfigMock(); const newPolicyConfig = await callback(policyConfig); + expect(newPolicyConfig.inputs[0]!.type).toEqual('endpoint'); expect(newPolicyConfig.inputs[0]!.config!.policy.value).toEqual(policyConfigFactory()); expect(newPolicyConfig.inputs[0]!.config!.artifact_manifest.value).toEqual( - snapshot!.manifest.toEndpointFormat() + lastComputed!.toEndpointFormat() ); }); }); diff --git a/x-pack/plugins/security_solution/server/endpoint/ingest_integration.ts b/x-pack/plugins/security_solution/server/endpoint/ingest_integration.ts index e2522ac4af778..11d4b12d0b76a 100644 --- a/x-pack/plugins/security_solution/server/endpoint/ingest_integration.ts +++ b/x-pack/plugins/security_solution/server/endpoint/ingest_integration.ts @@ -8,9 +8,63 @@ import { Logger } from '../../../../../src/core/server'; import { NewPackageConfig } from '../../../ingest_manager/common/types/models'; import { factory as policyConfigFactory } from '../../common/endpoint/models/policy_config'; import { NewPolicyData } from '../../common/endpoint/types'; -import { ManifestManager, ManifestSnapshot } from './services/artifacts'; +import { ManifestManager } from './services/artifacts'; +import { Manifest } from './lib/artifacts'; import { reportErrors, ManifestConstants } from './lib/artifacts/common'; -import { ManifestSchemaVersion } from '../../common/endpoint/schema/common'; +import { InternalArtifactCompleteSchema } from './schemas/artifacts'; +import { manifestDispatchSchema } from '../../common/endpoint/schema/manifest'; + +const getManifest = async (logger: Logger, manifestManager: ManifestManager): Promise => { + let manifest: Manifest | null = null; + + try { + manifest = await manifestManager.getLastComputedManifest(ManifestConstants.SCHEMA_VERSION); + + // If we have not yet computed a manifest, then we have to do so now. This should only happen + // once. + if (manifest == null) { + // New computed manifest based on current state of exception list + const newManifest = await manifestManager.buildNewManifest(ManifestConstants.SCHEMA_VERSION); + const diffs = newManifest.diff(Manifest.getDefault(ManifestConstants.SCHEMA_VERSION)); + + // Compress new artifacts + const adds = diffs.filter((diff) => diff.type === 'add').map((diff) => diff.id); + for (const artifactId of adds) { + const compressError = await newManifest.compressArtifact(artifactId); + if (compressError) { + throw compressError; + } + } + + // Persist new artifacts + const artifacts = adds + .map((artifactId) => newManifest.getArtifact(artifactId)) + .filter((artifact): artifact is InternalArtifactCompleteSchema => artifact !== undefined); + if (artifacts.length !== adds.length) { + throw new Error('Invalid artifact encountered.'); + } + const persistErrors = await manifestManager.pushArtifacts(artifacts); + if (persistErrors.length) { + reportErrors(logger, persistErrors); + throw new Error('Unable to persist new artifacts.'); + } + + // Commit the manifest state + if (diffs.length) { + const error = await manifestManager.commit(newManifest); + if (error) { + throw error; + } + } + + manifest = newManifest; + } + } catch (err) { + logger.error(err); + } + + return manifest ?? Manifest.getDefault(ManifestConstants.SCHEMA_VERSION); +}; /** * Callback to handle creation of PackageConfigs in Ingest Manager @@ -31,85 +85,37 @@ export const getPackageConfigCreateCallback = ( // follow the types/schema expected let updatedPackageConfig = newPackageConfig as NewPolicyData; - // get current manifest from SO (last dispatched) - const manifest = ( - await manifestManager.getLastDispatchedManifest(ManifestConstants.SCHEMA_VERSION) - )?.toEndpointFormat() ?? { - manifest_version: 'default', - schema_version: ManifestConstants.SCHEMA_VERSION as ManifestSchemaVersion, - artifacts: {}, - }; + // Get most recent manifest + const manifest = await getManifest(logger, manifestManager); + const serializedManifest = manifest.toEndpointFormat(); + if (!manifestDispatchSchema.is(serializedManifest)) { + // This should not happen. + // But if it does, we log it and return it anyway. + logger.error('Invalid manifest'); + } // Until we get the Default Policy Configuration in the Endpoint package, // we will add it here manually at creation time. - if (newPackageConfig.inputs.length === 0) { - updatedPackageConfig = { - ...newPackageConfig, - inputs: [ - { - type: 'endpoint', - enabled: true, - streams: [], - config: { - artifact_manifest: { - value: manifest, - }, - policy: { - value: policyConfigFactory(), - }, + updatedPackageConfig = { + ...newPackageConfig, + inputs: [ + { + type: 'endpoint', + enabled: true, + streams: [], + config: { + artifact_manifest: { + value: serializedManifest, + }, + policy: { + value: policyConfigFactory(), }, }, - ], - }; - } - - let snapshot: ManifestSnapshot | null = null; - let success = true; - try { - // Try to get most up-to-date manifest data. - - // get snapshot based on exception-list-agnostic SOs - // with diffs from last dispatched manifest, if it exists - snapshot = await manifestManager.getSnapshot({ initialize: true }); - - if (snapshot && snapshot.diffs.length) { - // create new artifacts - const errors = await manifestManager.syncArtifacts(snapshot, 'add'); - if (errors.length) { - reportErrors(logger, errors); - throw new Error('Error writing new artifacts.'); - } - } - - if (snapshot) { - updatedPackageConfig.inputs[0].config.artifact_manifest = { - value: snapshot.manifest.toEndpointFormat(), - }; - } - - return updatedPackageConfig; - } catch (err) { - success = false; - logger.error(err); - return updatedPackageConfig; - } finally { - if (success && snapshot !== null) { - try { - if (snapshot.diffs.length > 0) { - // TODO: let's revisit the way this callback happens... use promises? - // only commit when we know the package config was created - await manifestManager.commit(snapshot.manifest); + }, + ], + }; - // clean up old artifacts - await manifestManager.syncArtifacts(snapshot, 'delete'); - } - } catch (err) { - logger.error(err); - } - } else if (snapshot === null) { - logger.error('No manifest snapshot available.'); - } - } + return updatedPackageConfig; }; return handlePackageConfigCreate; diff --git a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/common.ts b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/common.ts index 77a5e85b14199..7298a9bfa72a6 100644 --- a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/common.ts +++ b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/common.ts @@ -4,6 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ import { Logger } from 'src/core/server'; +import { + InternalArtifactSchema, + InternalArtifactCompleteSchema, + internalArtifactCompleteSchema, +} from '../../schemas/artifacts'; export const ArtifactConstants = { GLOBAL_ALLOWLIST_NAME: 'endpoint-exceptionlist', @@ -15,7 +20,16 @@ export const ArtifactConstants = { export const ManifestConstants = { SAVED_OBJECT_TYPE: 'endpoint:user-artifact-manifest', SCHEMA_VERSION: 'v1', - INITIAL_VERSION: 'WzAsMF0=', +}; + +export const getArtifactId = (artifact: InternalArtifactSchema) => { + return `${artifact.identifier}-${artifact.decodedSha256}`; +}; + +export const isCompleteArtifact = ( + artifact: InternalArtifactSchema +): artifact is InternalArtifactCompleteSchema => { + return internalArtifactCompleteSchema.is(artifact); }; export const reportErrors = (logger: Logger, errors: Error[]) => { diff --git a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/lists.test.ts b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/lists.test.ts index 1a19306b2fd60..d3d073efa73c1 100644 --- a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/lists.test.ts +++ b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/lists.test.ts @@ -9,7 +9,8 @@ import { listMock } from '../../../../../lists/server/mocks'; import { getFoundExceptionListItemSchemaMock } from '../../../../../lists/common/schemas/response/found_exception_list_item_schema.mock'; import { getExceptionListItemSchemaMock } from '../../../../../lists/common/schemas/response/exception_list_item_schema.mock'; import { EntriesArray, EntryList } from '../../../../../lists/common/schemas/types/entries'; -import { getFullEndpointExceptionList } from './lists'; +import { buildArtifact, getFullEndpointExceptionList } from './lists'; +import { TranslatedEntry, TranslatedExceptionListItem } from '../../schemas/artifacts'; describe('buildEventTypeSignal', () => { let mockExceptionClient: ExceptionListClient; @@ -340,4 +341,95 @@ describe('buildEventTypeSignal', () => { const resp = await getFullEndpointExceptionList(mockExceptionClient, 'linux', 'v1'); expect(resp.entries.length).toEqual(0); }); + + test('it should return a stable hash regardless of order of entries', async () => { + const translatedEntries: TranslatedEntry[] = [ + { + entries: [ + { + field: 'some.nested.field', + operator: 'included', + type: 'exact_cased', + value: 'some value', + }, + ], + field: 'some.parentField', + type: 'nested', + }, + { + field: 'nested.field', + operator: 'included', + type: 'exact_cased', + value: 'some value', + }, + ]; + const translatedEntriesReversed = translatedEntries.reverse(); + + const translatedExceptionList = { + entries: [ + { + type: 'simple', + entries: translatedEntries, + }, + ], + }; + + const translatedExceptionListReversed = { + entries: [ + { + type: 'simple', + entries: translatedEntriesReversed, + }, + ], + }; + + const artifact1 = await buildArtifact(translatedExceptionList, 'linux', 'v1'); + const artifact2 = await buildArtifact(translatedExceptionListReversed, 'linux', 'v1'); + expect(artifact1.decodedSha256).toEqual(artifact2.decodedSha256); + }); + + test('it should return a stable hash regardless of order of items', async () => { + const translatedItems: TranslatedExceptionListItem[] = [ + { + type: 'simple', + entries: [ + { + entries: [ + { + field: 'some.nested.field', + operator: 'included', + type: 'exact_cased', + value: 'some value', + }, + ], + field: 'some.parentField', + type: 'nested', + }, + ], + }, + { + type: 'simple', + entries: [ + { + field: 'nested.field', + operator: 'included', + type: 'exact_cased', + value: 'some value', + }, + ], + }, + ]; + + const translatedExceptionList = { + entries: translatedItems, + }; + + const translatedExceptionListReversed = { + entries: translatedItems.reverse(), + }; + + const artifact1 = await buildArtifact(translatedExceptionList, 'linux', 'v1'); + const artifact2 = await buildArtifact(translatedExceptionListReversed, 'linux', 'v1'); + expect(artifact1.decodedSha256).toEqual(artifact2.decodedSha256); + }); }); diff --git a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/lists.ts b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/lists.ts index e6fd4bad97c5f..68fa2a0511a48 100644 --- a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/lists.ts +++ b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/lists.ts @@ -25,6 +25,8 @@ import { translatedEntryMatchMatcher, translatedEntryMatchAnyMatcher, TranslatedExceptionListItem, + internalArtifactCompleteSchema, + InternalArtifactCompleteSchema, } from '../../schemas'; import { ArtifactConstants } from './common'; @@ -32,7 +34,7 @@ export async function buildArtifact( exceptions: WrappedTranslatedExceptionList, os: string, schemaVersion: string -): Promise { +): Promise { const exceptionsBuffer = Buffer.from(JSON.stringify(exceptions)); const sha256 = createHash('sha256').update(exceptionsBuffer.toString()).digest('hex'); @@ -45,11 +47,32 @@ export async function buildArtifact( encodedSha256: sha256, decodedSize: exceptionsBuffer.byteLength, encodedSize: exceptionsBuffer.byteLength, - created: Date.now(), body: exceptionsBuffer.toString('base64'), }; } +export async function maybeCompressArtifact( + uncompressedArtifact: InternalArtifactSchema +): Promise { + const compressedArtifact = { ...uncompressedArtifact }; + if (internalArtifactCompleteSchema.is(uncompressedArtifact)) { + const compressedExceptionList = await compressExceptionList( + Buffer.from(uncompressedArtifact.body, 'base64') + ); + compressedArtifact.body = compressedExceptionList.toString('base64'); + compressedArtifact.encodedSize = compressedExceptionList.byteLength; + compressedArtifact.compressionAlgorithm = 'zlib'; + compressedArtifact.encodedSha256 = createHash('sha256') + .update(compressedExceptionList) + .digest('hex'); + } + return compressedArtifact; +} + +export function isCompressed(artifact: InternalArtifactSchema) { + return artifact.compressionAlgorithm === 'zlib'; +} + export async function getFullEndpointExceptionList( eClient: ExceptionListClient, os: string, @@ -136,7 +159,7 @@ function translateItem( const itemSet = new Set(); return { type: item.type, - entries: item.entries.reduce((translatedEntries: TranslatedEntry[], entry) => { + entries: item.entries.reduce((translatedEntries, entry) => { const translatedEntry = translateEntry(schemaVersion, entry); if (translatedEntry !== undefined && translatedEntryType.is(translatedEntry)) { const itemHash = createHash('sha256').update(JSON.stringify(translatedEntry)).digest('hex'); @@ -156,8 +179,8 @@ function translateEntry( ): TranslatedEntry | undefined { switch (entry.type) { case 'nested': { - const nestedEntries = entry.entries.reduce( - (entries: TranslatedEntryNestedEntry[], nestedEntry) => { + const nestedEntries = entry.entries.reduce( + (entries, nestedEntry) => { const translatedEntry = translateEntry(schemaVersion, nestedEntry); if (nestedEntry !== undefined && translatedEntryNestedEntry.is(translatedEntry)) { entries.push(translatedEntry); diff --git a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/manifest.test.ts b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/manifest.test.ts index e1f6bac2620ea..95587c6fc105d 100644 --- a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/manifest.test.ts +++ b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/manifest.test.ts @@ -5,103 +5,125 @@ */ import { ManifestSchemaVersion } from '../../../../common/endpoint/schema/common'; -import { InternalArtifactSchema } from '../../schemas'; -import { - getInternalArtifactMock, - getInternalArtifactMockWithDiffs, -} from '../../schemas/artifacts/saved_objects.mock'; -import { ManifestConstants } from './common'; +import { InternalArtifactCompleteSchema } from '../../schemas'; +import { ManifestConstants, getArtifactId } from './common'; import { Manifest } from './manifest'; +import { + getMockArtifacts, + getMockManifest, + getMockManifestWithDiffs, + getEmptyMockManifest, +} from './mocks'; describe('manifest', () => { describe('Manifest object sanity checks', () => { - const artifacts: InternalArtifactSchema[] = []; - const now = new Date(); + let artifacts: InternalArtifactCompleteSchema[] = []; let manifest1: Manifest; let manifest2: Manifest; + let emptyManifest: Manifest; beforeAll(async () => { - const artifactLinux = await getInternalArtifactMock('linux', 'v1'); - const artifactMacos = await getInternalArtifactMock('macos', 'v1'); - const artifactWindows = await getInternalArtifactMock('windows', 'v1'); - artifacts.push(artifactLinux); - artifacts.push(artifactMacos); - artifacts.push(artifactWindows); - - manifest1 = new Manifest(now, 'v1', ManifestConstants.INITIAL_VERSION); - manifest1.addEntry(artifactLinux); - manifest1.addEntry(artifactMacos); - manifest1.addEntry(artifactWindows); - manifest1.setVersion('abcd'); - - const newArtifactLinux = await getInternalArtifactMockWithDiffs('linux', 'v1'); - manifest2 = new Manifest(new Date(), 'v1', ManifestConstants.INITIAL_VERSION); - manifest2.addEntry(newArtifactLinux); - manifest2.addEntry(artifactMacos); - manifest2.addEntry(artifactWindows); + artifacts = await getMockArtifacts({ compress: true }); + manifest1 = await getMockManifest({ compress: true }); + manifest2 = await getMockManifestWithDiffs({ compress: true }); + emptyManifest = await getEmptyMockManifest({ compress: true }); }); test('Can create manifest with valid schema version', () => { - const manifest = new Manifest(new Date(), 'v1', ManifestConstants.INITIAL_VERSION); + const manifest = new Manifest('v1'); expect(manifest).toBeInstanceOf(Manifest); }); test('Cannot create manifest with invalid schema version', () => { expect(() => { - new Manifest( - new Date(), - 'abcd' as ManifestSchemaVersion, - ManifestConstants.INITIAL_VERSION - ); + new Manifest('abcd' as ManifestSchemaVersion); }).toThrow(); }); + test('Empty manifest transforms correctly to expected endpoint format', async () => { + expect(emptyManifest.toEndpointFormat()).toStrictEqual({ + artifacts: { + 'endpoint-exceptionlist-linux-v1': { + compression_algorithm: 'zlib', + encryption_algorithm: 'none', + decoded_sha256: 'd801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658', + encoded_sha256: 'f8e6afa1d5662f5b37f83337af774b5785b5b7f1daee08b7b00c2d6813874cda', + decoded_size: 14, + encoded_size: 22, + relative_url: + '/api/endpoint/artifacts/download/endpoint-exceptionlist-linux-v1/d801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658', + }, + 'endpoint-exceptionlist-macos-v1': { + compression_algorithm: 'zlib', + encryption_algorithm: 'none', + decoded_sha256: 'd801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658', + encoded_sha256: 'f8e6afa1d5662f5b37f83337af774b5785b5b7f1daee08b7b00c2d6813874cda', + decoded_size: 14, + encoded_size: 22, + relative_url: + '/api/endpoint/artifacts/download/endpoint-exceptionlist-macos-v1/d801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658', + }, + 'endpoint-exceptionlist-windows-v1': { + compression_algorithm: 'zlib', + encryption_algorithm: 'none', + decoded_sha256: 'd801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658', + encoded_sha256: 'f8e6afa1d5662f5b37f83337af774b5785b5b7f1daee08b7b00c2d6813874cda', + decoded_size: 14, + encoded_size: 22, + relative_url: + '/api/endpoint/artifacts/download/endpoint-exceptionlist-windows-v1/d801aa1fb7ddcc330a5e3173372ea6af4a3d08ec58074478e85aa5603e926658', + }, + }, + manifest_version: 'a9b7ef358a363f327f479e31efc4f228b2277a7fb4d1914ca9b4e7ca9ffcf537', + schema_version: 'v1', + }); + }); + test('Manifest transforms correctly to expected endpoint format', async () => { expect(manifest1.toEndpointFormat()).toStrictEqual({ artifacts: { 'endpoint-exceptionlist-linux-v1': { - compression_algorithm: 'none', + compression_algorithm: 'zlib', encryption_algorithm: 'none', - decoded_sha256: '5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', - encoded_sha256: '5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', - decoded_size: 430, - encoded_size: 430, + decoded_sha256: '96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', + encoded_sha256: '975382ab55d019cbab0bbac207a54e2a7d489fad6e8f6de34fc6402e5ef37b1e', + decoded_size: 432, + encoded_size: 147, relative_url: - '/api/endpoint/artifacts/download/endpoint-exceptionlist-linux-v1/5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', + '/api/endpoint/artifacts/download/endpoint-exceptionlist-linux-v1/96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', }, 'endpoint-exceptionlist-macos-v1': { - compression_algorithm: 'none', + compression_algorithm: 'zlib', encryption_algorithm: 'none', - decoded_sha256: '5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', - encoded_sha256: '5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', - decoded_size: 430, - encoded_size: 430, + decoded_sha256: '96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', + encoded_sha256: '975382ab55d019cbab0bbac207a54e2a7d489fad6e8f6de34fc6402e5ef37b1e', + decoded_size: 432, + encoded_size: 147, relative_url: - '/api/endpoint/artifacts/download/endpoint-exceptionlist-macos-v1/5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', + '/api/endpoint/artifacts/download/endpoint-exceptionlist-macos-v1/96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', }, 'endpoint-exceptionlist-windows-v1': { - compression_algorithm: 'none', + compression_algorithm: 'zlib', encryption_algorithm: 'none', - decoded_sha256: '5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', - encoded_sha256: '5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', - decoded_size: 430, - encoded_size: 430, + decoded_sha256: '96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', + encoded_sha256: '975382ab55d019cbab0bbac207a54e2a7d489fad6e8f6de34fc6402e5ef37b1e', + decoded_size: 432, + encoded_size: 147, relative_url: - '/api/endpoint/artifacts/download/endpoint-exceptionlist-windows-v1/5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', + '/api/endpoint/artifacts/download/endpoint-exceptionlist-windows-v1/96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', }, }, - manifest_version: 'abcd', + manifest_version: 'a7f4760bfa2662e85e30fe4fb8c01b4c4a20938c76ab21d3c5a3e781e547cce7', schema_version: 'v1', }); }); test('Manifest transforms correctly to expected saved object format', async () => { expect(manifest1.toSavedObject()).toStrictEqual({ - created: now.getTime(), ids: [ - 'endpoint-exceptionlist-linux-v1-5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', - 'endpoint-exceptionlist-macos-v1-5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', - 'endpoint-exceptionlist-windows-v1-5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', + 'endpoint-exceptionlist-linux-v1-96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', + 'endpoint-exceptionlist-macos-v1-96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', + 'endpoint-exceptionlist-windows-v1-96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', ], }); }); @@ -111,12 +133,12 @@ describe('manifest', () => { expect(diffs).toEqual([ { id: - 'endpoint-exceptionlist-linux-v1-5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', + 'endpoint-exceptionlist-linux-v1-96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', type: 'delete', }, { id: - 'endpoint-exceptionlist-linux-v1-3d3546e94f70493021ee845be32c66e36ea7a720c64b4d608d8029fe949f7e51', + 'endpoint-exceptionlist-linux-v1-0a5a2013a79f9e60682472284a1be45ab1ff68b9b43426d00d665016612c15c8', type: 'add', }, ]); @@ -124,7 +146,7 @@ describe('manifest', () => { test('Manifest returns data for given artifact', async () => { const artifact = artifacts[0]; - const returned = manifest1.getArtifact(`${artifact.identifier}-${artifact.decodedSha256}`); + const returned = manifest1.getArtifact(getArtifactId(artifact)); expect(returned).toEqual(artifact); }); @@ -132,39 +154,35 @@ describe('manifest', () => { const entries = manifest1.getEntries(); const keys = Object.keys(entries); expect(keys).toEqual([ - 'endpoint-exceptionlist-linux-v1-5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', - 'endpoint-exceptionlist-macos-v1-5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', - 'endpoint-exceptionlist-windows-v1-5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', + 'endpoint-exceptionlist-linux-v1-96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', + 'endpoint-exceptionlist-macos-v1-96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', + 'endpoint-exceptionlist-windows-v1-96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', ]); }); test('Manifest returns true if contains artifact', async () => { const found = manifest1.contains( - 'endpoint-exceptionlist-macos-v1-5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735' + 'endpoint-exceptionlist-macos-v1-96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3' ); expect(found).toEqual(true); }); test('Manifest can be created from list of artifacts', async () => { - const oldManifest = new Manifest( - new Date(), - ManifestConstants.SCHEMA_VERSION, - ManifestConstants.INITIAL_VERSION - ); + const oldManifest = new Manifest(ManifestConstants.SCHEMA_VERSION); const manifest = Manifest.fromArtifacts(artifacts, 'v1', oldManifest); expect( manifest.contains( - 'endpoint-exceptionlist-linux-v1-5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735' + 'endpoint-exceptionlist-linux-v1-96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3' ) ).toEqual(true); expect( manifest.contains( - 'endpoint-exceptionlist-macos-v1-5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735' + 'endpoint-exceptionlist-macos-v1-96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3' ) ).toEqual(true); expect( manifest.contains( - 'endpoint-exceptionlist-windows-v1-5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735' + 'endpoint-exceptionlist-windows-v1-96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3' ) ).toEqual(true); }); diff --git a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/manifest.ts b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/manifest.ts index 576ecb08d6923..6ece2bf0f48e8 100644 --- a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/manifest.ts +++ b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/manifest.ts @@ -4,15 +4,22 @@ * you may not use this file except in compliance with the Elastic License. */ +import { createHash } from 'crypto'; import { validate } from '../../../../common/validate'; -import { InternalArtifactSchema, InternalManifestSchema } from '../../schemas/artifacts'; +import { + InternalArtifactSchema, + InternalManifestSchema, + internalArtifactCompleteSchema, + InternalArtifactCompleteSchema, +} from '../../schemas/artifacts'; import { manifestSchemaVersion, ManifestSchemaVersion, } from '../../../../common/endpoint/schema/common'; import { ManifestSchema, manifestSchema } from '../../../../common/endpoint/schema/manifest'; -import { ManifestConstants } from './common'; import { ManifestEntry } from './manifest_entry'; +import { maybeCompressArtifact, isCompressed } from './lists'; +import { getArtifactId } from './common'; export interface ManifestDiff { type: string; @@ -20,15 +27,13 @@ export interface ManifestDiff { } export class Manifest { - private created: Date; private entries: Record; private schemaVersion: ManifestSchemaVersion; // For concurrency control - private version: string; + private version: string | undefined; - constructor(created: Date, schemaVersion: string, version: string) { - this.created = created; + constructor(schemaVersion: string, version?: string) { this.entries = {}; this.version = version; @@ -38,20 +43,24 @@ export class Manifest { ); if (errors != null || validated === null) { - throw new Error(`Invalid manifest version: ${schemaVersion}`); + throw new Error(`Invalid manifest schema version: ${schemaVersion}`); } this.schemaVersion = validated; } + public static getDefault(schemaVersion: string) { + return new Manifest(schemaVersion); + } + public static fromArtifacts( - artifacts: InternalArtifactSchema[], + artifacts: InternalArtifactCompleteSchema[], schemaVersion: string, oldManifest: Manifest ): Manifest { - const manifest = new Manifest(new Date(), schemaVersion, oldManifest.getVersion()); + const manifest = new Manifest(schemaVersion, oldManifest.getSoVersion()); artifacts.forEach((artifact) => { - const id = `${artifact.identifier}-${artifact.decodedSha256}`; + const id = getArtifactId(artifact); const existingArtifact = oldManifest.getArtifact(id); if (existingArtifact) { manifest.addEntry(existingArtifact); @@ -62,15 +71,70 @@ export class Manifest { return manifest; } + public static fromPkgConfig(manifestPkgConfig: ManifestSchema): Manifest | null { + if (manifestSchema.is(manifestPkgConfig)) { + const manifest = new Manifest(manifestPkgConfig.schema_version); + for (const [identifier, artifactRecord] of Object.entries(manifestPkgConfig.artifacts)) { + const artifact = { + identifier, + compressionAlgorithm: artifactRecord.compression_algorithm, + encryptionAlgorithm: artifactRecord.encryption_algorithm, + decodedSha256: artifactRecord.decoded_sha256, + decodedSize: artifactRecord.decoded_size, + encodedSha256: artifactRecord.encoded_sha256, + encodedSize: artifactRecord.encoded_size, + }; + manifest.addEntry(artifact); + } + return manifest; + } else { + return null; + } + } + + public async compressArtifact(id: string): Promise { + try { + const artifact = this.getArtifact(id); + if (artifact == null) { + throw new Error(`Corrupted manifest detected. Artifact ${id} not in manifest.`); + } + + const compressedArtifact = await maybeCompressArtifact(artifact); + if (!isCompressed(compressedArtifact)) { + throw new Error(`Unable to compress artifact: ${id}`); + } else if (!internalArtifactCompleteSchema.is(compressedArtifact)) { + throw new Error(`Incomplete artifact detected: ${id}`); + } + this.addEntry(compressedArtifact); + } catch (err) { + return err; + } + return null; + } + + public equals(manifest: Manifest): boolean { + return this.getSha256() === manifest.getSha256(); + } + + public getSha256(): string { + let sha256 = createHash('sha256'); + Object.keys(this.entries) + .sort() + .forEach((docId) => { + sha256 = sha256.update(docId); + }); + return sha256.digest('hex'); + } + public getSchemaVersion(): ManifestSchemaVersion { return this.schemaVersion; } - public getVersion(): string { + public getSoVersion(): string | undefined { return this.version; } - public setVersion(version: string) { + public setSoVersion(version: string) { this.version = version; } @@ -87,8 +151,12 @@ export class Manifest { return this.entries; } + public getEntry(artifactId: string): ManifestEntry | undefined { + return this.entries[artifactId]; + } + public getArtifact(artifactId: string): InternalArtifactSchema | undefined { - return this.entries[artifactId]?.getArtifact(); + return this.getEntry(artifactId)?.getArtifact(); } public diff(manifest: Manifest): ManifestDiff[] { @@ -111,7 +179,7 @@ export class Manifest { public toEndpointFormat(): ManifestSchema { const manifestObj: ManifestSchema = { - manifest_version: this.version ?? ManifestConstants.INITIAL_VERSION, + manifest_version: this.getSha256(), schema_version: this.schemaVersion, artifacts: {}, }; @@ -130,7 +198,6 @@ export class Manifest { public toSavedObject(): InternalManifestSchema { return { - created: this.created.getTime(), ids: Object.keys(this.entries), }; } diff --git a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/manifest_entry.test.ts b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/manifest_entry.test.ts index 7ea2a07210c55..d7bd57547de0a 100644 --- a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/manifest_entry.test.ts +++ b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/manifest_entry.test.ts @@ -14,7 +14,7 @@ describe('manifest_entry', () => { let manifestEntry: ManifestEntry; beforeAll(async () => { - artifact = await getInternalArtifactMock('windows', 'v1'); + artifact = await getInternalArtifactMock('windows', 'v1', { compress: true }); manifestEntry = new ManifestEntry(artifact); }); @@ -24,7 +24,7 @@ describe('manifest_entry', () => { test('Correct doc_id is returned', () => { expect(manifestEntry.getDocId()).toEqual( - 'endpoint-exceptionlist-windows-v1-5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735' + 'endpoint-exceptionlist-windows-v1-96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3' ); }); @@ -34,21 +34,21 @@ describe('manifest_entry', () => { test('Correct sha256 is returned', () => { expect(manifestEntry.getEncodedSha256()).toEqual( - '5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735' + '975382ab55d019cbab0bbac207a54e2a7d489fad6e8f6de34fc6402e5ef37b1e' ); expect(manifestEntry.getDecodedSha256()).toEqual( - '5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735' + '96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3' ); }); test('Correct size is returned', () => { - expect(manifestEntry.getEncodedSize()).toEqual(430); - expect(manifestEntry.getDecodedSize()).toEqual(430); + expect(manifestEntry.getEncodedSize()).toEqual(147); + expect(manifestEntry.getDecodedSize()).toEqual(432); }); test('Correct url is returned', () => { expect(manifestEntry.getUrl()).toEqual( - '/api/endpoint/artifacts/download/endpoint-exceptionlist-windows-v1/5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735' + '/api/endpoint/artifacts/download/endpoint-exceptionlist-windows-v1/96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3' ); }); @@ -58,17 +58,15 @@ describe('manifest_entry', () => { test('Correct record is returned', () => { expect(manifestEntry.getRecord()).toEqual({ - compression_algorithm: 'none', + compression_algorithm: 'zlib', encryption_algorithm: 'none', - decoded_sha256: '5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', - encoded_sha256: '5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', - decoded_size: 430, - encoded_size: 430, + decoded_sha256: '96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', + encoded_sha256: '975382ab55d019cbab0bbac207a54e2a7d489fad6e8f6de34fc6402e5ef37b1e', + decoded_size: 432, + encoded_size: 147, relative_url: - '/api/endpoint/artifacts/download/endpoint-exceptionlist-windows-v1/5f16e5e338c53e77cfa945c17c11b175c3967bf109aa87131de41fb93b149735', + '/api/endpoint/artifacts/download/endpoint-exceptionlist-windows-v1/96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', }); }); - - // TODO: add test for entry with compression }); }); diff --git a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/manifest_entry.ts b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/manifest_entry.ts index b35e0c2b9ad6e..b6c103e24f024 100644 --- a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/manifest_entry.ts +++ b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/manifest_entry.ts @@ -7,6 +7,7 @@ import { InternalArtifactSchema } from '../../schemas/artifacts'; import { CompressionAlgorithm } from '../../../../common/endpoint/schema/common'; import { ManifestEntrySchema } from '../../../../common/endpoint/schema/manifest'; +import { getArtifactId } from './common'; export class ManifestEntry { private artifact: InternalArtifactSchema; @@ -16,7 +17,7 @@ export class ManifestEntry { } public getDocId(): string { - return `${this.getIdentifier()}-${this.getDecodedSha256()}`; + return getArtifactId(this.artifact); } public getIdentifier(): string { diff --git a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/mocks.ts b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/mocks.ts new file mode 100644 index 0000000000000..097151ee835ba --- /dev/null +++ b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/mocks.ts @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { InternalArtifactCompleteSchema } from '../../schemas/artifacts'; +import { + getInternalArtifactMock, + getInternalArtifactMockWithDiffs, + getEmptyInternalArtifactMock, +} from '../../schemas/artifacts/saved_objects.mock'; +import { ArtifactConstants } from './common'; +import { Manifest } from './manifest'; + +export const getMockArtifacts = async (opts?: { compress: boolean }) => { + return Promise.all( + ArtifactConstants.SUPPORTED_OPERATING_SYSTEMS.map>( + async (os) => { + return getInternalArtifactMock(os, 'v1', opts); + } + ) + ); +}; + +export const getMockArtifactsWithDiff = async (opts?: { compress: boolean }) => { + return Promise.all( + ArtifactConstants.SUPPORTED_OPERATING_SYSTEMS.map>( + async (os) => { + if (os === 'linux') { + return getInternalArtifactMockWithDiffs(os, 'v1'); + } + return getInternalArtifactMock(os, 'v1', opts); + } + ) + ); +}; + +export const getEmptyMockArtifacts = async (opts?: { compress: boolean }) => { + return Promise.all( + ArtifactConstants.SUPPORTED_OPERATING_SYSTEMS.map>( + async (os) => { + return getEmptyInternalArtifactMock(os, 'v1', opts); + } + ) + ); +}; + +export const getMockManifest = async (opts?: { compress: boolean }) => { + const manifest = new Manifest('v1'); + const artifacts = await getMockArtifacts(opts); + artifacts.forEach((artifact) => manifest.addEntry(artifact)); + return manifest; +}; + +export const getMockManifestWithDiffs = async (opts?: { compress: boolean }) => { + const manifest = new Manifest('v1'); + const artifacts = await getMockArtifactsWithDiff(opts); + artifacts.forEach((artifact) => manifest.addEntry(artifact)); + return manifest; +}; + +export const getEmptyMockManifest = async (opts?: { compress: boolean }) => { + const manifest = new Manifest('v1'); + const artifacts = await getEmptyMockArtifacts(opts); + artifacts.forEach((artifact) => manifest.addEntry(artifact)); + return manifest; +}; diff --git a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/task.ts b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/task.ts index 583f4499f591b..ba164059866ea 100644 --- a/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/task.ts +++ b/x-pack/plugins/security_solution/server/endpoint/lib/artifacts/task.ts @@ -11,7 +11,8 @@ import { TaskManagerStartContract, } from '../../../../../task_manager/server'; import { EndpointAppContext } from '../../types'; -import { reportErrors } from './common'; +import { reportErrors, ManifestConstants } from './common'; +import { InternalArtifactCompleteSchema } from '../../schemas/artifacts'; export const ManifestTaskConstants = { TIMEOUT: '1m', @@ -89,37 +90,66 @@ export class ManifestTask { return; } - let errors: Error[] = []; try { - // get snapshot based on exception-list-agnostic SOs - // with diffs from last dispatched manifest - const snapshot = await manifestManager.getSnapshot(); - if (snapshot && snapshot.diffs.length > 0) { - // create new artifacts - errors = await manifestManager.syncArtifacts(snapshot, 'add'); - if (errors.length) { - reportErrors(this.logger, errors); - throw new Error('Error writing new artifacts.'); - } - // write to ingest-manager package config - errors = await manifestManager.dispatch(snapshot.manifest); - if (errors.length) { - reportErrors(this.logger, errors); - throw new Error('Error dispatching manifest.'); + // Last manifest we computed, which was saved to ES + const oldManifest = await manifestManager.getLastComputedManifest( + ManifestConstants.SCHEMA_VERSION + ); + if (oldManifest == null) { + this.logger.debug('User manifest not available yet.'); + return; + } + + // New computed manifest based on current state of exception list + const newManifest = await manifestManager.buildNewManifest( + ManifestConstants.SCHEMA_VERSION, + oldManifest + ); + const diffs = newManifest.diff(oldManifest); + + // Compress new artifacts + const adds = diffs.filter((diff) => diff.type === 'add').map((diff) => diff.id); + for (const artifactId of adds) { + const compressError = await newManifest.compressArtifact(artifactId); + if (compressError) { + throw compressError; } - // commit latest manifest state to user-artifact-manifest SO - const error = await manifestManager.commit(snapshot.manifest); + } + + // Persist new artifacts + const artifacts = adds + .map((artifactId) => newManifest.getArtifact(artifactId)) + .filter((artifact): artifact is InternalArtifactCompleteSchema => artifact !== undefined); + if (artifacts.length !== adds.length) { + throw new Error('Invalid artifact encountered.'); + } + const persistErrors = await manifestManager.pushArtifacts(artifacts); + if (persistErrors.length) { + reportErrors(this.logger, persistErrors); + throw new Error('Unable to persist new artifacts.'); + } + + // Commit latest manifest state, if different + if (diffs.length) { + const error = await manifestManager.commit(newManifest); if (error) { - reportErrors(this.logger, [error]); - throw new Error('Error committing manifest.'); - } - // clean up old artifacts - errors = await manifestManager.syncArtifacts(snapshot, 'delete'); - if (errors.length) { - reportErrors(this.logger, errors); - throw new Error('Error cleaning up outdated artifacts.'); + throw error; } } + + // Try dispatching to ingest-manager package configs + const dispatchErrors = await manifestManager.tryDispatch(newManifest); + if (dispatchErrors.length) { + reportErrors(this.logger, dispatchErrors); + throw new Error('Error dispatching manifest.'); + } + + // Try to clean up superceded artifacts + const deletes = diffs.filter((diff) => diff.type === 'delete').map((diff) => diff.id); + const deleteErrors = await manifestManager.deleteArtifacts(deletes); + if (deleteErrors.length) { + reportErrors(this.logger, deleteErrors); + } } catch (err) { this.logger.error(err); } diff --git a/x-pack/plugins/security_solution/server/endpoint/mocks.ts b/x-pack/plugins/security_solution/server/endpoint/mocks.ts index 6a8c26e08d9dd..9ca447d53bf45 100644 --- a/x-pack/plugins/security_solution/server/endpoint/mocks.ts +++ b/x-pack/plugins/security_solution/server/endpoint/mocks.ts @@ -6,8 +6,6 @@ import { ILegacyScopedClusterClient, SavedObjectsClientContract } from 'kibana/server'; import { loggingSystemMock, savedObjectsServiceMock } from 'src/core/server/mocks'; -// eslint-disable-next-line @kbn/eslint/no-restricted-paths -import { loggerMock } from 'src/core/server/logging/logger.mock'; import { xpackMocks } from '../../../../mocks'; import { AgentService, @@ -15,28 +13,24 @@ import { ExternalCallback, } from '../../../ingest_manager/server'; import { createPackageConfigServiceMock } from '../../../ingest_manager/server/mocks'; -import { ConfigType } from '../config'; import { createMockConfig } from '../lib/detection_engine/routes/__mocks__'; import { EndpointAppContextService, EndpointAppContextServiceStartContract, } from './endpoint_app_context_services'; -import { - ManifestManagerMock, - getManifestManagerMock, -} from './services/artifacts/manifest_manager/manifest_manager.mock'; +import { ManifestManager } from './services/artifacts/manifest_manager/manifest_manager'; +import { getManifestManagerMock } from './services/artifacts/manifest_manager/manifest_manager.mock'; import { EndpointAppContext } from './types'; /** * Creates a mocked EndpointAppContext. */ export const createMockEndpointAppContext = ( - mockManifestManager?: ManifestManagerMock + mockManifestManager?: ManifestManager ): EndpointAppContext => { return { logFactory: loggingSystemMock.create(), - // @ts-ignore - config: createMockConfig() as ConfigType, + config: () => Promise.resolve(createMockConfig()), service: createMockEndpointAppContextService(mockManifestManager), }; }; @@ -45,16 +39,15 @@ export const createMockEndpointAppContext = ( * Creates a mocked EndpointAppContextService */ export const createMockEndpointAppContextService = ( - mockManifestManager?: ManifestManagerMock + mockManifestManager?: ManifestManager ): jest.Mocked => { - return { + return ({ start: jest.fn(), stop: jest.fn(), getAgentService: jest.fn(), - // @ts-ignore - getManifestManager: mockManifestManager ?? jest.fn(), + getManifestManager: jest.fn().mockReturnValue(mockManifestManager ?? jest.fn()), getScopedSavedObjectsClient: jest.fn(), - }; + } as unknown) as jest.Mocked; }; /** @@ -65,7 +58,7 @@ export const createMockEndpointAppContextServiceStartContract = (): jest.Mocked< > => { return { agentService: createMockAgentService(), - logger: loggerMock.create(), + logger: loggingSystemMock.create().get('mock_endpoint_app_context'), savedObjectsStart: savedObjectsServiceMock.createStartContract(), manifestManager: getManifestManagerMock(), registerIngestCallback: jest.fn< diff --git a/x-pack/plugins/security_solution/server/endpoint/routes/artifacts/download_exception_list.ts b/x-pack/plugins/security_solution/server/endpoint/routes/artifacts/download_exception_list.ts index 1b364a04a4272..218f7c059da48 100644 --- a/x-pack/plugins/security_solution/server/endpoint/routes/artifacts/download_exception_list.ts +++ b/x-pack/plugins/security_solution/server/endpoint/routes/artifacts/download_exception_list.ts @@ -20,7 +20,7 @@ import { DownloadArtifactRequestParamsSchema, downloadArtifactRequestParamsSchema, downloadArtifactResponseSchema, - InternalArtifactSchema, + InternalArtifactCompleteSchema, } from '../../schemas/artifacts'; import { EndpointAppContext } from '../../types'; @@ -86,8 +86,8 @@ export function registerDownloadExceptionListRoute( } else { logger.debug(`Cache MISS artifact ${id}`); return scopedSOClient - .get(ArtifactConstants.SAVED_OBJECT_TYPE, id) - .then((artifact: SavedObject) => { + .get(ArtifactConstants.SAVED_OBJECT_TYPE, id) + .then((artifact: SavedObject) => { const body = Buffer.from(artifact.attributes.body, 'base64'); cache.set(id, body); return buildAndValidateResponse(artifact.attributes.identifier, body); diff --git a/x-pack/plugins/security_solution/server/endpoint/schemas/artifacts/lists.mock.ts b/x-pack/plugins/security_solution/server/endpoint/schemas/artifacts/lists.mock.ts index 343b192163479..2cef1f3be69c1 100644 --- a/x-pack/plugins/security_solution/server/endpoint/schemas/artifacts/lists.mock.ts +++ b/x-pack/plugins/security_solution/server/endpoint/schemas/artifacts/lists.mock.ts @@ -15,13 +15,13 @@ export const getTranslatedExceptionListMock = (): WrappedTranslatedExceptionList { entries: [ { - field: 'some.not.nested.field', + field: 'some.nested.field', operator: 'included', type: 'exact_cased', value: 'some value', }, ], - field: 'some.field', + field: 'some.parentField', type: 'nested', }, { diff --git a/x-pack/plugins/security_solution/server/endpoint/schemas/artifacts/saved_objects.mock.ts b/x-pack/plugins/security_solution/server/endpoint/schemas/artifacts/saved_objects.mock.ts index 183a819807ed2..d95627601a183 100644 --- a/x-pack/plugins/security_solution/server/endpoint/schemas/artifacts/saved_objects.mock.ts +++ b/x-pack/plugins/security_solution/server/endpoint/schemas/artifacts/saved_objects.mock.ts @@ -4,37 +4,53 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ArtifactConstants, buildArtifact } from '../../lib/artifacts'; +import { buildArtifact, maybeCompressArtifact, isCompressed } from '../../lib/artifacts'; import { getTranslatedExceptionListMock } from './lists.mock'; -import { InternalArtifactSchema, InternalManifestSchema } from './saved_objects'; +import { + InternalManifestSchema, + internalArtifactCompleteSchema, + InternalArtifactCompleteSchema, +} from './saved_objects'; + +const compressArtifact = async (artifact: InternalArtifactCompleteSchema) => { + const compressedArtifact = await maybeCompressArtifact(artifact); + if (!isCompressed(compressedArtifact)) { + throw new Error(`Unable to compress artifact: ${artifact.identifier}`); + } else if (!internalArtifactCompleteSchema.is(compressedArtifact)) { + throw new Error(`Incomplete artifact detected: ${artifact.identifier}`); + } + return compressedArtifact; +}; export const getInternalArtifactMock = async ( os: string, - schemaVersion: string -): Promise => { - return buildArtifact(getTranslatedExceptionListMock(), os, schemaVersion); + schemaVersion: string, + opts?: { compress: boolean } +): Promise => { + const artifact = await buildArtifact(getTranslatedExceptionListMock(), os, schemaVersion); + return opts?.compress ? compressArtifact(artifact) : artifact; }; -export const getInternalArtifactMockWithDiffs = async ( +export const getEmptyInternalArtifactMock = async ( os: string, - schemaVersion: string -): Promise => { - const mock = getTranslatedExceptionListMock(); - mock.entries.pop(); - return buildArtifact(mock, os, schemaVersion); + schemaVersion: string, + opts?: { compress: boolean } +): Promise => { + const artifact = await buildArtifact({ entries: [] }, os, schemaVersion); + return opts?.compress ? compressArtifact(artifact) : artifact; }; -export const getInternalArtifactsMock = async ( +export const getInternalArtifactMockWithDiffs = async ( os: string, - schemaVersion: string -): Promise => { - // @ts-ignore - return ArtifactConstants.SUPPORTED_OPERATING_SYSTEMS.map(async () => { - await buildArtifact(getTranslatedExceptionListMock(), os, schemaVersion); - }); + schemaVersion: string, + opts?: { compress: boolean } +): Promise => { + const mock = getTranslatedExceptionListMock(); + mock.entries.pop(); + const artifact = await buildArtifact(mock, os, schemaVersion); + return opts?.compress ? compressArtifact(artifact) : artifact; }; export const getInternalManifestMock = (): InternalManifestSchema => ({ - created: Date.now(), ids: [], }); diff --git a/x-pack/plugins/security_solution/server/endpoint/schemas/artifacts/saved_objects.ts b/x-pack/plugins/security_solution/server/endpoint/schemas/artifacts/saved_objects.ts index aa11f4409269a..4dea916dcb436 100644 --- a/x-pack/plugins/security_solution/server/endpoint/schemas/artifacts/saved_objects.ts +++ b/x-pack/plugins/security_solution/server/endpoint/schemas/artifacts/saved_objects.ts @@ -16,7 +16,7 @@ import { created } from './common'; export const body = t.string; // base64 -export const internalArtifactSchema = t.exact( +export const internalArtifactRecordSchema = t.exact( t.type({ identifier, compressionAlgorithm, @@ -25,18 +25,49 @@ export const internalArtifactSchema = t.exact( decodedSize: size, encodedSha256: sha256, encodedSize: size, - created, - body, }) ); +export type InternalArtifactRecordSchema = t.TypeOf; +export const internalArtifactAdditionalFields = { + body, +}; + +export const internalArtifactSchema = t.intersection([ + internalArtifactRecordSchema, + t.partial(internalArtifactAdditionalFields), +]); export type InternalArtifactSchema = t.TypeOf; +export const internalArtifactCompleteSchema = t.intersection([ + internalArtifactRecordSchema, + t.exact(t.type(internalArtifactAdditionalFields)), +]); +export type InternalArtifactCompleteSchema = t.TypeOf; + +export const internalArtifactCreateSchema = t.intersection([ + internalArtifactCompleteSchema, + t.exact( + t.type({ + created, + }) + ), +]); +export type InternalArtifactCreateSchema = t.TypeOf; + export const internalManifestSchema = t.exact( t.type({ - created, ids: t.array(identifier), }) ); - export type InternalManifestSchema = t.TypeOf; + +export const internalManifestCreateSchema = t.intersection([ + internalManifestSchema, + t.exact( + t.type({ + created, + }) + ), +]); +export type InternalManifestCreateSchema = t.TypeOf; diff --git a/x-pack/plugins/security_solution/server/endpoint/services/artifacts/artifact_client.test.ts b/x-pack/plugins/security_solution/server/endpoint/services/artifacts/artifact_client.test.ts index 3e3b12c04d65c..0787231e242cb 100644 --- a/x-pack/plugins/security_solution/server/endpoint/services/artifacts/artifact_client.test.ts +++ b/x-pack/plugins/security_solution/server/endpoint/services/artifacts/artifact_client.test.ts @@ -5,7 +5,7 @@ */ import { savedObjectsClientMock } from 'src/core/server/mocks'; -import { ArtifactConstants } from '../../lib/artifacts'; +import { ArtifactConstants, getArtifactId } from '../../lib/artifacts'; import { getInternalArtifactMock } from '../../schemas/artifacts/saved_objects.mock'; import { getArtifactClientMock } from './artifact_client.mock'; import { ArtifactClient } from './artifact_client'; @@ -31,8 +31,11 @@ describe('artifact_client', () => { await artifactClient.createArtifact(artifact); expect(savedObjectsClient.create).toHaveBeenCalledWith( ArtifactConstants.SAVED_OBJECT_TYPE, - artifact, - { id: artifactClient.getArtifactId(artifact) } + { + ...artifact, + created: expect.any(Number), + }, + { id: getArtifactId(artifact) } ); }); diff --git a/x-pack/plugins/security_solution/server/endpoint/services/artifacts/artifact_client.ts b/x-pack/plugins/security_solution/server/endpoint/services/artifacts/artifact_client.ts index ca53a891c4d6b..6138b4fb7e6dc 100644 --- a/x-pack/plugins/security_solution/server/endpoint/services/artifacts/artifact_client.ts +++ b/x-pack/plugins/security_solution/server/endpoint/services/artifacts/artifact_client.ts @@ -5,8 +5,11 @@ */ import { SavedObject, SavedObjectsClientContract } from 'src/core/server'; -import { ArtifactConstants } from '../../lib/artifacts'; -import { InternalArtifactSchema } from '../../schemas/artifacts'; +import { ArtifactConstants, getArtifactId } from '../../lib/artifacts'; +import { + InternalArtifactCompleteSchema, + InternalArtifactCreateSchema, +} from '../../schemas/artifacts'; export class ArtifactClient { private savedObjectsClient: SavedObjectsClientContract; @@ -15,24 +18,23 @@ export class ArtifactClient { this.savedObjectsClient = savedObjectsClient; } - public getArtifactId(artifact: InternalArtifactSchema) { - return `${artifact.identifier}-${artifact.decodedSha256}`; - } - - public async getArtifact(id: string): Promise> { - return this.savedObjectsClient.get( + public async getArtifact(id: string): Promise> { + return this.savedObjectsClient.get( ArtifactConstants.SAVED_OBJECT_TYPE, id ); } public async createArtifact( - artifact: InternalArtifactSchema - ): Promise> { - return this.savedObjectsClient.create( + artifact: InternalArtifactCompleteSchema + ): Promise> { + return this.savedObjectsClient.create( ArtifactConstants.SAVED_OBJECT_TYPE, - artifact, - { id: this.getArtifactId(artifact) } + { + ...artifact, + created: Date.now(), + }, + { id: getArtifactId(artifact) } ); } diff --git a/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_client.test.ts b/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_client.test.ts index fe3f193bc8ff5..6db29289e983d 100644 --- a/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_client.test.ts +++ b/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_client.test.ts @@ -38,7 +38,10 @@ describe('manifest_client', () => { await manifestClient.createManifest(manifest); expect(savedObjectsClient.create).toHaveBeenCalledWith( ManifestConstants.SAVED_OBJECT_TYPE, - manifest, + { + ...manifest, + created: expect.any(Number), + }, { id: manifestClient.getManifestId() } ); }); diff --git a/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_client.ts b/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_client.ts index 45182841e56fc..385f115e6301a 100644 --- a/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_client.ts +++ b/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_client.ts @@ -15,7 +15,7 @@ import { } from '../../../../common/endpoint/schema/common'; import { validate } from '../../../../common/validate'; import { ManifestConstants } from '../../lib/artifacts'; -import { InternalManifestSchema } from '../../schemas/artifacts'; +import { InternalManifestSchema, InternalManifestCreateSchema } from '../../schemas/artifacts'; interface UpdateManifestOpts { version: string; @@ -57,9 +57,12 @@ export class ManifestClient { public async createManifest( manifest: InternalManifestSchema ): Promise> { - return this.savedObjectsClient.create( + return this.savedObjectsClient.create( ManifestConstants.SAVED_OBJECT_TYPE, - manifest, + { + ...manifest, + created: Date.now(), + }, { id: this.getManifestId() } ); } diff --git a/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_manager/manifest_manager.mock.ts b/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_manager/manifest_manager.mock.ts index 3e4fee8871b8a..08cdb9816a1c1 100644 --- a/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_manager/manifest_manager.mock.ts +++ b/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_manager/manifest_manager.mock.ts @@ -6,53 +6,34 @@ import { savedObjectsClientMock, loggingSystemMock } from 'src/core/server/mocks'; import { Logger } from 'src/core/server'; -import { createPackageConfigMock } from '../../../../../../ingest_manager/common/mocks'; +import { + createPackageConfigWithManifestMock, + createPackageConfigWithInitialManifestMock, +} from '../../../../../../ingest_manager/common/mocks'; import { PackageConfigServiceInterface } from '../../../../../../ingest_manager/server'; import { createPackageConfigServiceMock } from '../../../../../../ingest_manager/server/mocks'; -import { getFoundExceptionListItemSchemaMock } from '../../../../../../lists/common/schemas/response/found_exception_list_item_schema.mock'; import { listMock } from '../../../../../../lists/server/mocks'; -import { - ExceptionsCache, - Manifest, - buildArtifact, - getFullEndpointExceptionList, -} from '../../../lib/artifacts'; -import { ManifestConstants } from '../../../lib/artifacts/common'; -import { InternalArtifactSchema } from '../../../schemas/artifacts'; +import { ExceptionsCache } from '../../../lib/artifacts'; import { getArtifactClientMock } from '../artifact_client.mock'; import { getManifestClientMock } from '../manifest_client.mock'; import { ManifestManager } from './manifest_manager'; +import { + getMockManifest, + getMockArtifactsWithDiff, + getEmptyMockArtifacts, +} from '../../../lib/artifacts/mocks'; -async function mockBuildExceptionListArtifacts( - os: string, - schemaVersion: string -): Promise { - const mockExceptionClient = listMock.getExceptionListClient(); - const first = getFoundExceptionListItemSchemaMock(); - mockExceptionClient.findExceptionListItem = jest.fn().mockReturnValueOnce(first); - const exceptions = await getFullEndpointExceptionList(mockExceptionClient, os, schemaVersion); - return [await buildArtifact(exceptions, os, schemaVersion)]; -} - -export class ManifestManagerMock extends ManifestManager { - protected buildExceptionListArtifacts = jest - .fn() - .mockResolvedValue(mockBuildExceptionListArtifacts('linux', 'v1')); - - public getLastDispatchedManifest = jest - .fn() - .mockResolvedValue(new Manifest(new Date(), 'v1', ManifestConstants.INITIAL_VERSION)); - - protected getManifestClient = jest - .fn() - .mockReturnValue(getManifestClientMock(this.savedObjectsClient)); +export enum ManifestManagerMockType { + InitialSystemState, + NormalFlow, } export const getManifestManagerMock = (opts?: { + mockType?: ManifestManagerMockType; cache?: ExceptionsCache; packageConfigService?: jest.Mocked; savedObjectsClient?: ReturnType; -}): ManifestManagerMock => { +}): ManifestManager => { let cache = new ExceptionsCache(5); if (opts?.cache !== undefined) { cache = opts.cache; @@ -64,7 +45,11 @@ export const getManifestManagerMock = (opts?: { } packageConfigService.list = jest.fn().mockResolvedValue({ total: 1, - items: [{ version: 'abcd', ...createPackageConfigMock() }], + items: [ + { version: 'policy-1-version', ...createPackageConfigWithManifestMock() }, + { version: 'policy-2-version', ...createPackageConfigWithInitialManifestMock() }, + { version: 'policy-3-version', ...createPackageConfigWithInitialManifestMock() }, + ], }); let savedObjectsClient = savedObjectsClientMock.create(); @@ -72,6 +57,32 @@ export const getManifestManagerMock = (opts?: { savedObjectsClient = opts.savedObjectsClient; } + class ManifestManagerMock extends ManifestManager { + protected buildExceptionListArtifacts = jest.fn().mockImplementation(() => { + const mockType = opts?.mockType ?? ManifestManagerMockType.NormalFlow; + switch (mockType) { + case ManifestManagerMockType.InitialSystemState: + return getEmptyMockArtifacts(); + case ManifestManagerMockType.NormalFlow: + return getMockArtifactsWithDiff(); + } + }); + + public getLastComputedManifest = jest.fn().mockImplementation(() => { + const mockType = opts?.mockType ?? ManifestManagerMockType.NormalFlow; + switch (mockType) { + case ManifestManagerMockType.InitialSystemState: + return null; + case ManifestManagerMockType.NormalFlow: + return getMockManifest({ compress: true }); + } + }); + + protected getManifestClient = jest + .fn() + .mockReturnValue(getManifestClientMock(this.savedObjectsClient)); + } + const manifestManager = new ManifestManagerMock({ artifactClient: getArtifactClientMock(savedObjectsClient), cache, diff --git a/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_manager/manifest_manager.test.ts b/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_manager/manifest_manager.test.ts index 80d325ece765c..ff331f7d017f4 100644 --- a/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_manager/manifest_manager.test.ts +++ b/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_manager/manifest_manager.test.ts @@ -10,40 +10,71 @@ import { createPackageConfigServiceMock } from '../../../../../../ingest_manager import { ArtifactConstants, ManifestConstants, - Manifest, ExceptionsCache, + isCompleteArtifact, } from '../../../lib/artifacts'; import { getManifestManagerMock } from './manifest_manager.mock'; describe('manifest_manager', () => { describe('ManifestManager sanity checks', () => { - test('ManifestManager can snapshot manifest', async () => { + test('ManifestManager can retrieve and diff manifests', async () => { const manifestManager = getManifestManagerMock(); - const snapshot = await manifestManager.getSnapshot(); - expect(snapshot!.diffs).toEqual([ + const oldManifest = await manifestManager.getLastComputedManifest( + ManifestConstants.SCHEMA_VERSION + ); + const newManifest = await manifestManager.buildNewManifest( + ManifestConstants.SCHEMA_VERSION, + oldManifest! + ); + expect(newManifest.diff(oldManifest!)).toEqual([ { id: - 'endpoint-exceptionlist-linux-v1-1a8295e6ccb93022c6f5ceb8997b29f2912389b3b38f52a8f5a2ff7b0154b1bc', + 'endpoint-exceptionlist-linux-v1-96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', + type: 'delete', + }, + { + id: + 'endpoint-exceptionlist-linux-v1-0a5a2013a79f9e60682472284a1be45ab1ff68b9b43426d00d665016612c15c8', type: 'add', }, ]); - expect(snapshot!.manifest).toBeInstanceOf(Manifest); }); test('ManifestManager populates cache properly', async () => { const cache = new ExceptionsCache(5); const manifestManager = getManifestManagerMock({ cache }); - const snapshot = await manifestManager.getSnapshot(); - expect(snapshot!.diffs).toEqual([ + const oldManifest = await manifestManager.getLastComputedManifest( + ManifestConstants.SCHEMA_VERSION + ); + const newManifest = await manifestManager.buildNewManifest( + ManifestConstants.SCHEMA_VERSION, + oldManifest! + ); + const diffs = newManifest.diff(oldManifest!); + expect(diffs).toEqual([ { id: - 'endpoint-exceptionlist-linux-v1-1a8295e6ccb93022c6f5ceb8997b29f2912389b3b38f52a8f5a2ff7b0154b1bc', + 'endpoint-exceptionlist-linux-v1-96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', + type: 'delete', + }, + { + id: + 'endpoint-exceptionlist-linux-v1-0a5a2013a79f9e60682472284a1be45ab1ff68b9b43426d00d665016612c15c8', type: 'add', }, ]); - await manifestManager.syncArtifacts(snapshot!, 'add'); - const diff = snapshot!.diffs[0]; - const entry = JSON.parse(inflateSync(cache.get(diff!.id)! as Buffer).toString()); + + const newArtifactId = diffs[1].id; + await newManifest.compressArtifact(newArtifactId); + const artifact = newManifest.getArtifact(newArtifactId)!; + + if (isCompleteArtifact(artifact)) { + await manifestManager.pushArtifacts([artifact]); // caches the artifact + } else { + throw new Error('Artifact is missing a body.'); + } + + const entry = JSON.parse(inflateSync(cache.get(newArtifactId)! as Buffer).toString()); expect(entry).toEqual({ entries: [ { @@ -52,7 +83,7 @@ describe('manifest_manager', () => { { entries: [ { - field: 'nested.field', + field: 'some.nested.field', operator: 'included', type: 'exact_cased', value: 'some value', @@ -73,28 +104,77 @@ describe('manifest_manager', () => { }); }); + test('ManifestManager cannot dispatch incomplete (uncompressed) artifact', async () => { + const packageConfigService = createPackageConfigServiceMock(); + const manifestManager = getManifestManagerMock({ packageConfigService }); + const oldManifest = await manifestManager.getLastComputedManifest( + ManifestConstants.SCHEMA_VERSION + ); + const newManifest = await manifestManager.buildNewManifest( + ManifestConstants.SCHEMA_VERSION, + oldManifest! + ); + const dispatchErrors = await manifestManager.tryDispatch(newManifest); + expect(dispatchErrors.length).toEqual(1); + expect(dispatchErrors[0].message).toEqual('Invalid manifest'); + }); + test('ManifestManager can dispatch manifest', async () => { const packageConfigService = createPackageConfigServiceMock(); const manifestManager = getManifestManagerMock({ packageConfigService }); - const snapshot = await manifestManager.getSnapshot(); - const dispatchErrors = await manifestManager.dispatch(snapshot!.manifest); + const oldManifest = await manifestManager.getLastComputedManifest( + ManifestConstants.SCHEMA_VERSION + ); + const newManifest = await manifestManager.buildNewManifest( + ManifestConstants.SCHEMA_VERSION, + oldManifest! + ); + const diffs = newManifest.diff(oldManifest!); + const newArtifactId = diffs[1].id; + await newManifest.compressArtifact(newArtifactId); + + const dispatchErrors = await manifestManager.tryDispatch(newManifest); + expect(dispatchErrors).toEqual([]); - const entries = snapshot!.manifest.getEntries(); - const artifact = Object.values(entries)[0].getArtifact(); + + // 2 policies updated... 1 is already up-to-date + expect(packageConfigService.update.mock.calls.length).toEqual(2); + expect( packageConfigService.update.mock.calls[0][2].inputs[0].config!.artifact_manifest.value ).toEqual({ - manifest_version: ManifestConstants.INITIAL_VERSION, + manifest_version: '520f6cf88b3f36a065c6ca81058d5f8690aadadf6fe857f8dec4cc37589e7283', schema_version: 'v1', artifacts: { - [artifact.identifier]: { - compression_algorithm: 'none', + 'endpoint-exceptionlist-linux-v1': { + compression_algorithm: 'zlib', encryption_algorithm: 'none', - decoded_sha256: artifact.decodedSha256, - encoded_sha256: artifact.encodedSha256, - decoded_size: artifact.decodedSize, - encoded_size: artifact.encodedSize, - relative_url: `/api/endpoint/artifacts/download/${artifact.identifier}/${artifact.decodedSha256}`, + decoded_sha256: '0a5a2013a79f9e60682472284a1be45ab1ff68b9b43426d00d665016612c15c8', + encoded_sha256: '57941169bb2c5416f9bd7224776c8462cb9a2be0fe8b87e6213e77a1d29be824', + decoded_size: 292, + encoded_size: 131, + relative_url: + '/api/endpoint/artifacts/download/endpoint-exceptionlist-linux-v1/0a5a2013a79f9e60682472284a1be45ab1ff68b9b43426d00d665016612c15c8', + }, + 'endpoint-exceptionlist-macos-v1': { + compression_algorithm: 'zlib', + encryption_algorithm: 'none', + decoded_sha256: '96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', + encoded_sha256: '975382ab55d019cbab0bbac207a54e2a7d489fad6e8f6de34fc6402e5ef37b1e', + decoded_size: 432, + encoded_size: 147, + relative_url: + '/api/endpoint/artifacts/download/endpoint-exceptionlist-macos-v1/96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', + }, + 'endpoint-exceptionlist-windows-v1': { + compression_algorithm: 'zlib', + encryption_algorithm: 'none', + decoded_sha256: '96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', + encoded_sha256: '975382ab55d019cbab0bbac207a54e2a7d489fad6e8f6de34fc6402e5ef37b1e', + decoded_size: 432, + encoded_size: 147, + relative_url: + '/api/endpoint/artifacts/download/endpoint-exceptionlist-windows-v1/96b76a1a911662053a1562ac14c4ff1e87c2ff550d6fe52e1e0b3790526597d3', }, }, }); @@ -103,29 +183,20 @@ describe('manifest_manager', () => { test('ManifestManager fails to dispatch on conflict', async () => { const packageConfigService = createPackageConfigServiceMock(); const manifestManager = getManifestManagerMock({ packageConfigService }); - const snapshot = await manifestManager.getSnapshot(); - packageConfigService.update.mockRejectedValue({ status: 409 }); - const dispatchErrors = await manifestManager.dispatch(snapshot!.manifest); + const oldManifest = await manifestManager.getLastComputedManifest( + ManifestConstants.SCHEMA_VERSION + ); + const newManifest = await manifestManager.buildNewManifest( + ManifestConstants.SCHEMA_VERSION, + oldManifest! + ); + const diffs = newManifest.diff(oldManifest!); + const newArtifactId = diffs[1].id; + await newManifest.compressArtifact(newArtifactId); + + packageConfigService.update.mockRejectedValueOnce({ status: 409 }); + const dispatchErrors = await manifestManager.tryDispatch(newManifest); expect(dispatchErrors).toEqual([{ status: 409 }]); - const entries = snapshot!.manifest.getEntries(); - const artifact = Object.values(entries)[0].getArtifact(); - expect( - packageConfigService.update.mock.calls[0][2].inputs[0].config!.artifact_manifest.value - ).toEqual({ - manifest_version: ManifestConstants.INITIAL_VERSION, - schema_version: 'v1', - artifacts: { - [artifact.identifier]: { - compression_algorithm: 'none', - encryption_algorithm: 'none', - decoded_sha256: artifact.decodedSha256, - encoded_sha256: artifact.encodedSha256, - decoded_size: artifact.decodedSize, - encoded_size: artifact.encodedSize, - relative_url: `/api/endpoint/artifacts/download/${artifact.identifier}/${artifact.decodedSha256}`, - }, - }, - }); }); test('ManifestManager can commit manifest', async () => { @@ -134,37 +205,43 @@ describe('manifest_manager', () => { savedObjectsClient, }); - const snapshot = await manifestManager.getSnapshot(); - await manifestManager.syncArtifacts(snapshot!, 'add'); - - const diff = { - id: 'abcd', - type: 'delete', - }; - snapshot!.diffs.push(diff); - - const dispatched = await manifestManager.dispatch(snapshot!.manifest); - expect(dispatched).toEqual([]); + const oldManifest = await manifestManager.getLastComputedManifest( + ManifestConstants.SCHEMA_VERSION + ); + const newManifest = await manifestManager.buildNewManifest( + ManifestConstants.SCHEMA_VERSION, + oldManifest! + ); + const diffs = newManifest.diff(oldManifest!); + const oldArtifactId = diffs[0].id; + const newArtifactId = diffs[1].id; + await newManifest.compressArtifact(newArtifactId); - await manifestManager.commit(snapshot!.manifest); + const artifact = newManifest.getArtifact(newArtifactId)!; + if (isCompleteArtifact(artifact)) { + await manifestManager.pushArtifacts([artifact]); + } else { + throw new Error('Artifact is missing a body.'); + } - await manifestManager.syncArtifacts(snapshot!, 'delete'); + await manifestManager.commit(newManifest); + await manifestManager.deleteArtifacts([oldArtifactId]); // created new artifact expect(savedObjectsClient.create.mock.calls[0][0]).toEqual( ArtifactConstants.SAVED_OBJECT_TYPE ); - // deleted old artifact - expect(savedObjectsClient.delete).toHaveBeenCalledWith( - ArtifactConstants.SAVED_OBJECT_TYPE, - 'abcd' - ); - // committed new manifest expect(savedObjectsClient.create.mock.calls[1][0]).toEqual( ManifestConstants.SAVED_OBJECT_TYPE ); + + // deleted old artifact + expect(savedObjectsClient.delete).toHaveBeenCalledWith( + ArtifactConstants.SAVED_OBJECT_TYPE, + oldArtifactId + ); }); }); }); diff --git a/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_manager/manifest_manager.ts b/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_manager/manifest_manager.ts index c8cad32ab746e..2501f07cb26e0 100644 --- a/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_manager/manifest_manager.ts +++ b/x-pack/plugins/security_solution/server/endpoint/services/artifacts/manifest_manager/manifest_manager.ts @@ -5,10 +5,11 @@ */ import { Logger, SavedObjectsClientContract } from 'src/core/server'; -import { createHash } from 'crypto'; import { PackageConfigServiceInterface } from '../../../../../../ingest_manager/server'; import { ExceptionListClient } from '../../../../../../lists/server'; import { ManifestSchemaVersion } from '../../../../../common/endpoint/schema/common'; +import { manifestDispatchSchema } from '../../../../../common/endpoint/schema/manifest'; + import { ArtifactConstants, ManifestConstants, @@ -17,11 +18,14 @@ import { getFullEndpointExceptionList, ExceptionsCache, ManifestDiff, + getArtifactId, } from '../../../lib/artifacts'; -import { InternalArtifactSchema } from '../../../schemas/artifacts'; +import { + InternalArtifactCompleteSchema, + internalArtifactCompleteSchema, +} from '../../../schemas/artifacts'; import { ArtifactClient } from '../artifact_client'; import { ManifestClient } from '../manifest_client'; -import { compressExceptionList } from '../../../lib/artifacts/lists'; export interface ManifestManagerContext { savedObjectsClient: SavedObjectsClientContract; @@ -73,82 +77,86 @@ export class ManifestManager { * state of exception-list-agnostic SOs. * * @param schemaVersion The schema version of the artifact - * @returns {Promise} An array of uncompressed artifacts built from exception-list-agnostic SOs. + * @returns {Promise} An array of uncompressed artifacts built from exception-list-agnostic SOs. * @throws Throws/rejects if there are errors building the list. */ protected async buildExceptionListArtifacts( schemaVersion: string - ): Promise { - // TODO: should wrap in try/catch? - return ArtifactConstants.SUPPORTED_OPERATING_SYSTEMS.reduce( - async (acc: Promise, os) => { - const exceptionList = await getFullEndpointExceptionList( - this.exceptionListClient, - os, - schemaVersion - ); - const artifacts = await acc; - const artifact = await buildArtifact(exceptionList, os, schemaVersion); - artifacts.push(artifact); - return Promise.resolve(artifacts); - }, - Promise.resolve([]) - ); + ): Promise { + return ArtifactConstants.SUPPORTED_OPERATING_SYSTEMS.reduce< + Promise + >(async (acc, os) => { + const exceptionList = await getFullEndpointExceptionList( + this.exceptionListClient, + os, + schemaVersion + ); + const artifacts = await acc; + const artifact = await buildArtifact(exceptionList, os, schemaVersion); + return Promise.resolve([...artifacts, artifact]); + }, Promise.resolve([])); } /** - * Writes new artifact SOs based on provided snapshot. + * Writes new artifact SO. * - * @param snapshot A ManifestSnapshot to use for writing the artifacts. - * @returns {Promise} Any errors encountered. + * @param artifact An InternalArtifactCompleteSchema representing the artifact. + * @returns {Promise} An error, if encountered, or null. */ - private async writeArtifacts(snapshot: ManifestSnapshot): Promise { - const errors: Error[] = []; - for (const diff of snapshot.diffs) { - const artifact = snapshot.manifest.getArtifact(diff.id); - if (artifact === undefined) { - throw new Error( - `Corrupted manifest detected. Diff contained artifact ${diff.id} not in manifest.` - ); + protected async pushArtifact(artifact: InternalArtifactCompleteSchema): Promise { + const artifactId = getArtifactId(artifact); + try { + // Write the artifact SO + await this.artifactClient.createArtifact(artifact); + + // Cache the compressed body of the artifact + this.cache.set(artifactId, Buffer.from(artifact.body, 'base64')); + } catch (err) { + if (err.status === 409) { + this.logger.debug(`Tried to create artifact ${artifactId}, but it already exists.`); + } else { + return err; } + } - const compressedArtifact = await compressExceptionList(Buffer.from(artifact.body, 'base64')); - artifact.body = compressedArtifact.toString('base64'); - artifact.encodedSize = compressedArtifact.byteLength; - artifact.compressionAlgorithm = 'zlib'; - artifact.encodedSha256 = createHash('sha256').update(compressedArtifact).digest('hex'); + return null; + } - try { - // Write the artifact SO - await this.artifactClient.createArtifact(artifact); - // Cache the compressed body of the artifact - this.cache.set(diff.id, Buffer.from(artifact.body, 'base64')); - } catch (err) { - if (err.status === 409) { - this.logger.debug(`Tried to create artifact ${diff.id}, but it already exists.`); - } else { - // TODO: log error here? + /** + * Writes new artifact SOs. + * + * @param artifacts An InternalArtifactCompleteSchema array representing the artifacts. + * @returns {Promise} Any errors encountered. + */ + public async pushArtifacts(artifacts: InternalArtifactCompleteSchema[]): Promise { + const errors: Error[] = []; + for (const artifact of artifacts) { + if (internalArtifactCompleteSchema.is(artifact)) { + const err = await this.pushArtifact(artifact); + if (err) { errors.push(err); } + } else { + errors.push(new Error(`Incomplete artifact: ${getArtifactId(artifact)}`)); } } return errors; } /** - * Deletes old artifact SOs based on provided snapshot. + * Deletes outdated artifact SOs. + * + * The artifact may still remain in the cache. * - * @param snapshot A ManifestSnapshot to use for deleting the artifacts. + * @param artifactIds The IDs of the artifact to delete.. * @returns {Promise} Any errors encountered. */ - private async deleteArtifacts(snapshot: ManifestSnapshot): Promise { + public async deleteArtifacts(artifactIds: string[]): Promise { const errors: Error[] = []; - for (const diff of snapshot.diffs) { + for (const artifactId of artifactIds) { try { - // Delete the artifact SO - await this.artifactClient.deleteArtifact(diff.id); - // TODO: should we delete the cache entry here? - this.logger.info(`Cleaned up artifact ${diff.id}`); + await this.artifactClient.deleteArtifact(artifactId); + this.logger.info(`Cleaned up artifact ${artifactId}`); } catch (err) { errors.push(err); } @@ -157,14 +165,14 @@ export class ManifestManager { } /** - * Returns the last dispatched manifest based on the current state of the + * Returns the last computed manifest based on the state of the * user-artifact-manifest SO. * * @param schemaVersion The schema version of the manifest. - * @returns {Promise} The last dispatched manifest, or null if does not exist. + * @returns {Promise} The last computed manifest, or null if does not exist. * @throws Throws/rejects if there is an unexpected error retrieving the manifest. */ - public async getLastDispatchedManifest(schemaVersion: string): Promise { + public async getLastComputedManifest(schemaVersion: string): Promise { try { const manifestClient = this.getManifestClient(schemaVersion); const manifestSo = await manifestClient.getManifest(); @@ -173,11 +181,7 @@ export class ManifestManager { throw new Error('No version returned for manifest.'); } - const manifest = new Manifest( - new Date(manifestSo.attributes.created), - schemaVersion, - manifestSo.version - ); + const manifest = new Manifest(schemaVersion, manifestSo.version); for (const id of manifestSo.attributes.ids) { const artifactSo = await this.artifactClient.getArtifact(id); @@ -193,89 +197,42 @@ export class ManifestManager { } /** - * Snapshots a manifest based on current state of exception-list-agnostic SOs. + * Builds a new manifest based on the current user exception list. * - * @param opts Optional parameters for snapshot retrieval. - * @param opts.initialize Initialize a new Manifest when no manifest SO can be retrieved. - * @returns {Promise} A snapshot of the manifest, or null if not initialized. + * @param schemaVersion The schema version of the manifest. + * @param baselineManifest A baseline manifest to use for initializing pre-existing artifacts. + * @returns {Promise} A new Manifest object reprenting the current exception list. */ - public async getSnapshot(opts?: ManifestSnapshotOpts): Promise { - try { - let oldManifest: Manifest | null; - - // Get the last-dispatched manifest - oldManifest = await this.getLastDispatchedManifest(ManifestConstants.SCHEMA_VERSION); - - if (oldManifest === null && opts !== undefined && opts.initialize) { - oldManifest = new Manifest( - new Date(), - ManifestConstants.SCHEMA_VERSION, - ManifestConstants.INITIAL_VERSION - ); // create empty manifest - } else if (oldManifest == null) { - this.logger.debug('Manifest does not exist yet. Waiting...'); - return null; - } - - // Build new exception list artifacts - const artifacts = await this.buildExceptionListArtifacts(ArtifactConstants.SCHEMA_VERSION); - - // Build new manifest - const newManifest = Manifest.fromArtifacts( - artifacts, - ManifestConstants.SCHEMA_VERSION, - oldManifest - ); - - // Get diffs - const diffs = newManifest.diff(oldManifest); + public async buildNewManifest( + schemaVersion: string, + baselineManifest?: Manifest + ): Promise { + // Build new exception list artifacts + const artifacts = await this.buildExceptionListArtifacts(ArtifactConstants.SCHEMA_VERSION); + + // Build new manifest + const manifest = Manifest.fromArtifacts( + artifacts, + ManifestConstants.SCHEMA_VERSION, + baselineManifest ?? Manifest.getDefault(schemaVersion) + ); - return { - manifest: newManifest, - diffs, - }; - } catch (err) { - this.logger.error(err); - return null; - } + return manifest; } /** - * Syncs artifacts based on provided snapshot. - * - * Creates artifacts that do not yet exist and cleans up old artifacts that have been - * superceded by this snapshot. + * Dispatches the manifest by writing it to the endpoint package config, if different + * from the manifest already in the config. * - * @param snapshot A ManifestSnapshot to use for sync. + * @param manifest The Manifest to dispatch. * @returns {Promise} Any errors encountered. */ - public async syncArtifacts( - snapshot: ManifestSnapshot, - diffType: 'add' | 'delete' - ): Promise { - const filteredDiffs = snapshot.diffs.filter((diff) => { - return diff.type === diffType; - }); - - const tmpSnapshot = { ...snapshot }; - tmpSnapshot.diffs = filteredDiffs; - - if (diffType === 'add') { - return this.writeArtifacts(tmpSnapshot); - } else if (diffType === 'delete') { - return this.deleteArtifacts(tmpSnapshot); + public async tryDispatch(manifest: Manifest): Promise { + const serializedManifest = manifest.toEndpointFormat(); + if (!manifestDispatchSchema.is(serializedManifest)) { + return [new Error('Invalid manifest')]; } - return [new Error(`Unsupported diff type: ${diffType}`)]; - } - - /** - * Dispatches the manifest by writing it to the endpoint package config. - * - * @param manifest The Manifest to dispatch. - * @returns {Promise} Any errors encountered. - */ - public async dispatch(manifest: Manifest): Promise { let paging = true; let page = 1; const errors: Error[] = []; @@ -293,16 +250,25 @@ export class ManifestManager { const artifactManifest = newPackageConfig.inputs[0].config.artifact_manifest ?? { value: {}, }; - artifactManifest.value = manifest.toEndpointFormat(); - newPackageConfig.inputs[0].config.artifact_manifest = artifactManifest; - - try { - await this.packageConfigService.update(this.savedObjectsClient, id, newPackageConfig); - this.logger.debug( - `Updated package config ${id} with manifest version ${manifest.getVersion()}` - ); - } catch (err) { - errors.push(err); + + const oldManifest = + Manifest.fromPkgConfig(artifactManifest.value) ?? + Manifest.getDefault(ManifestConstants.SCHEMA_VERSION); + if (!manifest.equals(oldManifest)) { + newPackageConfig.inputs[0].config.artifact_manifest = { + value: serializedManifest, + }; + + try { + await this.packageConfigService.update(this.savedObjectsClient, id, newPackageConfig); + this.logger.debug( + `Updated package config ${id} with manifest version ${manifest.getSha256()}` + ); + } catch (err) { + errors.push(err); + } + } else { + this.logger.debug(`No change in package config: ${id}`); } } else { errors.push(new Error(`Package config ${id} has no config.`)); @@ -317,46 +283,32 @@ export class ManifestManager { } /** - * Commits a manifest to indicate that it has been dispatched. + * Commits a manifest to indicate that a new version has been computed. * * @param manifest The Manifest to commit. - * @returns {Promise} An error if encountered, or null if successful. + * @returns {Promise} An error, if encountered, or null. */ public async commit(manifest: Manifest): Promise { try { const manifestClient = this.getManifestClient(manifest.getSchemaVersion()); // Commit the new manifest - if (manifest.getVersion() === ManifestConstants.INITIAL_VERSION) { - await manifestClient.createManifest(manifest.toSavedObject()); + const manifestSo = manifest.toSavedObject(); + const version = manifest.getSoVersion(); + + if (version == null) { + await manifestClient.createManifest(manifestSo); } else { - const version = manifest.getVersion(); - if (version === ManifestConstants.INITIAL_VERSION) { - throw new Error('Updating existing manifest with baseline version. Bad state.'); - } - await manifestClient.updateManifest(manifest.toSavedObject(), { + await manifestClient.updateManifest(manifestSo, { version, }); } - this.logger.info(`Committed manifest ${manifest.getVersion()}`); + this.logger.info(`Committed manifest ${manifest.getSha256()}`); } catch (err) { return err; } return null; } - - /** - * Confirms that a packageConfig exists with provided name. - */ - public async confirmPackageConfigExists(name: string) { - // TODO: what if there are multiple results? uh oh. - const { total } = await this.packageConfigService.list(this.savedObjectsClient, { - page: 1, - perPage: 20, - kuery: `ingest-package-configs.name:${name}`, - }); - return total > 0; - } } From 466380e3b6e5541041d6479d28f9fdf336ff5a8b Mon Sep 17 00:00:00 2001 From: Spencer Date: Fri, 17 Jul 2020 13:53:54 -0700 Subject: [PATCH 3/9] [kbn/dev-utils] add RunWithCommands utility (#72311) Co-authored-by: spalger --- packages/kbn-dev-utils/src/index.ts | 2 +- packages/kbn-dev-utils/src/run/cleanup.ts | 94 +++++++++ packages/kbn-dev-utils/src/run/flags.test.ts | 18 +- packages/kbn-dev-utils/src/run/flags.ts | 79 ++++--- packages/kbn-dev-utils/src/run/help.test.ts | 199 ++++++++++++++++++ packages/kbn-dev-utils/src/run/help.ts | 150 +++++++++++++ packages/kbn-dev-utils/src/run/index.ts | 7 +- packages/kbn-dev-utils/src/run/run.ts | 119 ++++------- .../src/run/run_with_commands.test.ts | 77 +++++++ .../src/run/run_with_commands.ts | 136 ++++++++++++ 10 files changed, 743 insertions(+), 138 deletions(-) create mode 100644 packages/kbn-dev-utils/src/run/cleanup.ts create mode 100644 packages/kbn-dev-utils/src/run/help.test.ts create mode 100644 packages/kbn-dev-utils/src/run/help.ts create mode 100644 packages/kbn-dev-utils/src/run/run_with_commands.test.ts create mode 100644 packages/kbn-dev-utils/src/run/run_with_commands.ts diff --git a/packages/kbn-dev-utils/src/index.ts b/packages/kbn-dev-utils/src/index.ts index 3e9e6238df9dc..582526f939e42 100644 --- a/packages/kbn-dev-utils/src/index.ts +++ b/packages/kbn-dev-utils/src/index.ts @@ -33,9 +33,9 @@ export { KBN_P12_PATH, KBN_P12_PASSWORD, } from './certs'; -export { run, createFailError, createFlagError, combineErrors, isFailError, Flags } from './run'; export { REPO_ROOT } from './repo_root'; export { KbnClient } from './kbn_client'; +export * from './run'; export * from './axios'; export * from './stdio'; export * from './ci_stats_reporter'; diff --git a/packages/kbn-dev-utils/src/run/cleanup.ts b/packages/kbn-dev-utils/src/run/cleanup.ts new file mode 100644 index 0000000000000..84c3bbcb591d2 --- /dev/null +++ b/packages/kbn-dev-utils/src/run/cleanup.ts @@ -0,0 +1,94 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { inspect } from 'util'; + +import exitHook from 'exit-hook'; + +import { ToolingLog } from '../tooling_log'; +import { isFailError } from './fail'; + +export type CleanupTask = () => void; + +export class Cleanup { + static setup(log: ToolingLog, helpText: string) { + const onUnhandledRejection = (error: any) => { + log.error('UNHANDLED PROMISE REJECTION'); + log.error( + error instanceof Error + ? error + : new Error(`non-Error type rejection value: ${inspect(error)}`) + ); + process.exit(1); + }; + + process.on('unhandledRejection', onUnhandledRejection); + + const cleanup = new Cleanup(log, helpText, [ + () => process.removeListener('unhandledRejection', onUnhandledRejection), + ]); + + cleanup.add(exitHook(() => cleanup.execute())); + + return cleanup; + } + + constructor( + private readonly log: ToolingLog, + public helpText: string, + private readonly tasks: CleanupTask[] + ) {} + + add(task: CleanupTask) { + this.tasks.push(task); + } + + execute(topLevelError?: any) { + const tasks = this.tasks.slice(0); + this.tasks.length = 0; + + for (const task of tasks) { + try { + task(); + } catch (error) { + this.onError(error); + } + } + + if (topLevelError) { + this.onError(topLevelError); + } + } + + private onError(error: any) { + if (isFailError(error)) { + this.log.error(error.message); + + if (error.showHelp) { + this.log.write(this.helpText); + } + + process.exitCode = error.exitCode; + } else { + this.log.error('UNHANDLED ERROR'); + this.log.error(error); + process.exitCode = 1; + } + } +} diff --git a/packages/kbn-dev-utils/src/run/flags.test.ts b/packages/kbn-dev-utils/src/run/flags.test.ts index c730067a84f46..f6ff70b7abeb4 100644 --- a/packages/kbn-dev-utils/src/run/flags.test.ts +++ b/packages/kbn-dev-utils/src/run/flags.test.ts @@ -22,14 +22,12 @@ import { getFlags } from './flags'; it('gets flags correctly', () => { expect( getFlags(['-a', '--abc=bcd', '--foo=bar', '--no-bar', '--foo=baz', '--box', 'yes', '-zxy'], { - flags: { - boolean: ['x'], - string: ['abc'], - alias: { - x: 'extra', - }, - allowUnexpected: true, + boolean: ['x'], + string: ['abc'], + alias: { + x: 'extra', }, + allowUnexpected: true, }) ).toMatchInlineSnapshot(` Object { @@ -60,10 +58,8 @@ it('gets flags correctly', () => { it('guesses types for unexpected flags', () => { expect( getFlags(['-abc', '--abc=bcd', '--no-foo', '--bar'], { - flags: { - allowUnexpected: true, - guessTypesForUnexpectedFlags: true, - }, + allowUnexpected: true, + guessTypesForUnexpectedFlags: true, }) ).toMatchInlineSnapshot(` Object { diff --git a/packages/kbn-dev-utils/src/run/flags.ts b/packages/kbn-dev-utils/src/run/flags.ts index c809a40d8512b..12642bceca15a 100644 --- a/packages/kbn-dev-utils/src/run/flags.ts +++ b/packages/kbn-dev-utils/src/run/flags.ts @@ -17,12 +17,9 @@ * under the License. */ -import { relative } from 'path'; - -import dedent from 'dedent'; import getopts from 'getopts'; -import { Options } from './run'; +import { RunOptions } from './run'; export interface Flags { verbose: boolean; @@ -36,23 +33,52 @@ export interface Flags { [key: string]: undefined | boolean | string | string[]; } -export function getFlags(argv: string[], options: Options): Flags { +export interface FlagOptions { + allowUnexpected?: boolean; + guessTypesForUnexpectedFlags?: boolean; + help?: string; + alias?: { [key: string]: string | string[] }; + boolean?: string[]; + string?: string[]; + default?: { [key: string]: any }; +} + +export function mergeFlagOptions(global: FlagOptions = {}, local: FlagOptions = {}): FlagOptions { + return { + alias: { + ...global.alias, + ...local.alias, + }, + boolean: [...(global.boolean || []), ...(local.boolean || [])], + string: [...(global.string || []), ...(local.string || [])], + default: { + ...global.alias, + ...local.alias, + }, + + help: local.help, + + allowUnexpected: !!(global.allowUnexpected || local.allowUnexpected), + guessTypesForUnexpectedFlags: !!(global.allowUnexpected || local.allowUnexpected), + }; +} + +export function getFlags(argv: string[], flagOptions: RunOptions['flags'] = {}): Flags { const unexpectedNames = new Set(); - const flagOpts = options.flags || {}; const { verbose, quiet, silent, debug, help, _, ...others } = getopts(argv, { - string: flagOpts.string, - boolean: [...(flagOpts.boolean || []), 'verbose', 'quiet', 'silent', 'debug', 'help'], + string: flagOptions.string, + boolean: [...(flagOptions.boolean || []), 'verbose', 'quiet', 'silent', 'debug', 'help'], alias: { - ...(flagOpts.alias || {}), + ...flagOptions.alias, v: 'verbose', }, - default: flagOpts.default, + default: flagOptions.default, unknown: (name: string) => { unexpectedNames.add(name); - return flagOpts.guessTypesForUnexpectedFlags; + return !!flagOptions.guessTypesForUnexpectedFlags; }, - } as any); + }); const unexpected: string[] = []; for (const unexpectedName of unexpectedNames) { @@ -119,32 +145,3 @@ export function getFlags(argv: string[], options: Options): Flags { ...others, }; } - -export function getHelp(options: Options) { - const usage = options.usage || `node ${relative(process.cwd(), process.argv[1])}`; - - const optionHelp = ( - dedent(options?.flags?.help || '') + - '\n' + - dedent` - --verbose, -v Log verbosely - --debug Log debug messages (less than verbose) - --quiet Only log errors - --silent Don't log anything - --help Show this message - ` - ) - .split('\n') - .filter(Boolean) - .join('\n '); - - return ` - ${usage} - - ${dedent(options.description || 'Runs a dev task') - .split('\n') - .join('\n ')} - - Options: - ${optionHelp + '\n\n'}`; -} diff --git a/packages/kbn-dev-utils/src/run/help.test.ts b/packages/kbn-dev-utils/src/run/help.test.ts new file mode 100644 index 0000000000000..27be7ad28b81a --- /dev/null +++ b/packages/kbn-dev-utils/src/run/help.test.ts @@ -0,0 +1,199 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { getCommandLevelHelp, getHelp, getHelpForAllCommands } from './help'; +import { Command } from './run_with_commands'; + +const fooCommand: Command = { + description: ` + Some thing that we wrote to help us execute things. + + Example: + + foo = bar = baz + + Are you getting it? + `, + name: 'foo', + run: () => {}, + flags: { + help: ` + --foo Some flag + --bar Another flag + Secondary info + --baz, -b Hey hello + `, + }, + usage: 'foo [...names]', +}; + +const barCommand: Command = { + description: ` + Some other thing that we wrote to help us execute things. + `, + name: 'bar', + run: () => {}, + flags: { + help: ` + --baz, -b Hey hello + `, + }, + usage: 'bar [...names]', +}; + +describe('getHelp()', () => { + it('returns the expected output', () => { + expect( + getHelp({ + description: fooCommand.description, + flagHelp: fooCommand.flags?.help, + usage: ` + node scripts/foo --bar --baz + `, + }) + ).toMatchInlineSnapshot(` + " + node scripts/foo --bar --baz + + Some thing that we wrote to help us execute things. + + Example: + + foo = bar = baz + + Are you getting it? + + Options: + --foo Some flag + --bar Another flag + Secondary info + --baz, -b Hey hello + --verbose, -v Log verbosely + --debug Log debug messages (less than verbose) + --quiet Only log errors + --silent Don't log anything + --help Show this message + + " + `); + }); +}); + +describe('getCommandLevelHelp()', () => { + it('returns the expected output', () => { + expect( + getCommandLevelHelp({ + command: fooCommand, + globalFlagHelp: ` + --global-flag some flag that applies to all commands + `, + }) + ).toMatchInlineSnapshot(` + " + node node_modules/jest-worker/build/workers/processChild.js foo [...names] + + Some thing that we wrote to help us execute things. + + Example: + + foo = bar = baz + + Are you getting it? + + Command-specific options: + --foo Some flag + --bar Another flag + Secondary info + --baz, -b Hey hello + + Global options: + --global-flag some flag that applies to all commands + --verbose, -v Log verbosely + --debug Log debug messages (less than verbose) + --quiet Only log errors + --silent Don't log anything + --help Show this message + + To see the help for other commands run: + node node_modules/jest-worker/build/workers/processChild.js help [command] + + To see the list of commands run: + node node_modules/jest-worker/build/workers/processChild.js --help + + " + `); + }); +}); + +describe('getHelpForAllCommands()', () => { + it('returns the expected output', () => { + expect( + getHelpForAllCommands({ + commands: [fooCommand, barCommand], + globalFlagHelp: ` + --global-flag some flag that applies to all commands + `, + usage: ` + node scripts/my_cli + `, + }) + ).toMatchInlineSnapshot(` + " + node scripts/my_cli [command] [...args] + + Runs a dev task + + Commands: + foo [...names] + Some thing that we wrote to help us execute things. + + Example: + + foo = bar = baz + + Are you getting it? + + Options: + --foo Some flag + --bar Another flag + Secondary info + --baz, -b Hey hello + + bar [...names] + Some other thing that we wrote to help us execute things. + + Options: + --baz, -b Hey hello + + + Global options: + --global-flag some flag that applies to all commands + --verbose, -v Log verbosely + --debug Log debug messages (less than verbose) + --quiet Only log errors + --silent Don't log anything + --help Show this message + + To show the help information about a specific command run: + node scripts/my_cli help [command] + + " + `); + }); +}); diff --git a/packages/kbn-dev-utils/src/run/help.ts b/packages/kbn-dev-utils/src/run/help.ts new file mode 100644 index 0000000000000..351c01da5ebe2 --- /dev/null +++ b/packages/kbn-dev-utils/src/run/help.ts @@ -0,0 +1,150 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import Path from 'path'; + +import 'core-js/features/string/repeat'; +import dedent from 'dedent'; + +import { Command } from './run_with_commands'; + +const DEFAULT_GLOBAL_USAGE = `node ${Path.relative(process.cwd(), process.argv[1])}`; +export const GLOBAL_FLAGS = dedent` + --verbose, -v Log verbosely + --debug Log debug messages (less than verbose) + --quiet Only log errors + --silent Don't log anything + --help Show this message +`; + +export function indent(str: string, depth: number) { + const prefix = ' '.repeat(depth); + return str + .split('\n') + .map((line, i) => `${i > 0 ? `\n${prefix}` : ''}${line}`) + .join(''); +} + +export function joinAndTrimLines(...strings: Array) { + return strings.filter(Boolean).join('\n').split('\n').filter(Boolean).join(`\n`); +} + +export function getHelp({ + description, + usage, + flagHelp, +}: { + description?: string; + usage?: string; + flagHelp?: string; +}) { + const optionHelp = joinAndTrimLines(dedent(flagHelp || ''), GLOBAL_FLAGS); + + return ` + ${dedent(usage || '') || DEFAULT_GLOBAL_USAGE} + + ${indent(dedent(description || 'Runs a dev task'), 2)} + + Options: + ${indent(optionHelp, 4)}\n\n`; +} + +export function getCommandLevelHelp({ + usage, + globalFlagHelp, + command, +}: { + usage?: string; + globalFlagHelp?: string; + command: Command; +}) { + const globalUsage = dedent(usage || '') || DEFAULT_GLOBAL_USAGE; + const globalHelp = joinAndTrimLines(dedent(globalFlagHelp || ''), GLOBAL_FLAGS); + + const commandUsage = dedent(command.usage || '') || `${command.name} [...args]`; + const commandFlags = joinAndTrimLines(dedent(command.flags?.help || '')); + + return ` + ${globalUsage} ${commandUsage} + + ${indent(dedent(command.description || 'Runs a dev task'), 2)} + + Command-specific options: + ${indent(commandFlags, 4)} + + Global options: + ${indent(globalHelp, 4)} + + To see the help for other commands run: + ${globalUsage} help [command] + + To see the list of commands run: + ${globalUsage} --help\n\n`; +} + +export function getHelpForAllCommands({ + description, + usage, + globalFlagHelp, + commands, +}: { + description?: string; + usage?: string; + globalFlagHelp?: string; + commands: Array>; +}) { + const globalUsage = dedent(usage || '') || DEFAULT_GLOBAL_USAGE; + const globalHelp = joinAndTrimLines(dedent(globalFlagHelp || ''), GLOBAL_FLAGS); + + const commandsHelp = commands + .map((command) => { + const options = command.flags?.help + ? '\n' + + dedent` + Options: + ${indent( + joinAndTrimLines(dedent(command.flags?.help || '')), + ' '.length + )} + ` + + '\n' + : ''; + + return [ + dedent(command.usage || '') || command.name, + ` ${indent(dedent(command.description || 'Runs a dev task'), 2)}`, + ...([indent(options, 2)] || []), + ].join('\n'); + }) + .join('\n'); + + return ` + ${globalUsage} [command] [...args] + + ${indent(dedent(description || 'Runs a dev task'), 2)} + + Commands: + ${indent(commandsHelp, 4)} + + Global options: + ${indent(globalHelp, 4)} + + To show the help information about a specific command run: + ${globalUsage} help [command]\n\n`; +} diff --git a/packages/kbn-dev-utils/src/run/index.ts b/packages/kbn-dev-utils/src/run/index.ts index 5e1a42deefffb..070ce740bf202 100644 --- a/packages/kbn-dev-utils/src/run/index.ts +++ b/packages/kbn-dev-utils/src/run/index.ts @@ -17,6 +17,7 @@ * under the License. */ -export { run } from './run'; -export { Flags } from './flags'; -export { createFailError, createFlagError, combineErrors, isFailError } from './fail'; +export * from './run'; +export * from './run_with_commands'; +export * from './flags'; +export * from './fail'; diff --git a/packages/kbn-dev-utils/src/run/run.ts b/packages/kbn-dev-utils/src/run/run.ts index 029d428565163..2a844bcbc27eb 100644 --- a/packages/kbn-dev-utils/src/run/run.ts +++ b/packages/kbn-dev-utils/src/run/run.ts @@ -17,48 +17,37 @@ * under the License. */ -import { inspect } from 'util'; - -// @ts-ignore @types are outdated and module is super simple -import exitHook from 'exit-hook'; - import { pickLevelFromFlags, ToolingLog, LogLevel } from '../tooling_log'; -import { createFlagError, isFailError } from './fail'; -import { Flags, getFlags, getHelp } from './flags'; +import { createFlagError } from './fail'; +import { Flags, getFlags, FlagOptions } from './flags'; import { ProcRunner, withProcRunner } from '../proc_runner'; +import { getHelp } from './help'; +import { CleanupTask, Cleanup } from './cleanup'; -type CleanupTask = () => void; -type RunFn = (args: { +export interface RunContext { log: ToolingLog; flags: Flags; procRunner: ProcRunner; addCleanupTask: (task: CleanupTask) => void; -}) => Promise | void; +} +export type RunFn = (context: RunContext) => Promise | void; -export interface Options { +export interface RunOptions { usage?: string; description?: string; log?: { defaultLevel?: LogLevel; }; - flags?: { - allowUnexpected?: boolean; - guessTypesForUnexpectedFlags?: boolean; - help?: string; - alias?: { [key: string]: string | string[] }; - boolean?: string[]; - string?: string[]; - default?: { [key: string]: any }; - }; + flags?: FlagOptions; } -export async function run(fn: RunFn, options: Options = {}) { - const flags = getFlags(process.argv.slice(2), options); - - if (flags.help) { - process.stderr.write(getHelp(options)); - process.exit(1); - } +export async function run(fn: RunFn, options: RunOptions = {}) { + const flags = getFlags(process.argv.slice(2), options.flags); + const helpText = getHelp({ + description: options.description, + usage: options.usage, + flagHelp: options.flags?.help, + }); const log = new ToolingLog({ level: pickLevelFromFlags(flags, { @@ -67,67 +56,33 @@ export async function run(fn: RunFn, options: Options = {}) { writeTo: process.stdout, }); - process.on('unhandledRejection', (error) => { - log.error('UNHANDLED PROMISE REJECTION'); - log.error( - error instanceof Error - ? error - : new Error(`non-Error type rejection value: ${inspect(error)}`) - ); - process.exit(1); - }); - - const handleErrorWithoutExit = (error: any) => { - if (isFailError(error)) { - log.error(error.message); - - if (error.showHelp) { - log.write(getHelp(options)); - } - - process.exitCode = error.exitCode; - } else { - log.error('UNHANDLED ERROR'); - log.error(error); - process.exitCode = 1; - } - }; - - const doCleanup = () => { - const tasks = cleanupTasks.slice(0); - cleanupTasks.length = 0; + if (flags.help) { + log.write(helpText); + process.exit(); + } - for (const task of tasks) { - try { - task(); - } catch (error) { - handleErrorWithoutExit(error); - } - } - }; + const cleanup = Cleanup.setup(log, helpText); - const unhookExit: CleanupTask = exitHook(doCleanup); - const cleanupTasks: CleanupTask[] = [unhookExit]; + if (!options.flags?.allowUnexpected && flags.unexpected.length) { + const error = createFlagError(`Unknown flag(s) "${flags.unexpected.join('", "')}"`); + cleanup.execute(error); + return; + } try { - if (!options.flags?.allowUnexpected && flags.unexpected.length) { - throw createFlagError(`Unknown flag(s) "${flags.unexpected.join('", "')}"`); - } - - try { - await withProcRunner(log, async (procRunner) => { - await fn({ - log, - flags, - procRunner, - addCleanupTask: (task: CleanupTask) => cleanupTasks.push(task), - }); + await withProcRunner(log, async (procRunner) => { + await fn({ + log, + flags, + procRunner, + addCleanupTask: cleanup.add.bind(cleanup), }); - } finally { - doCleanup(); - } + }); } catch (error) { - handleErrorWithoutExit(error); + cleanup.execute(error); + // process.exitCode is set by `cleanup` when necessary process.exit(); + } finally { + cleanup.execute(); } } diff --git a/packages/kbn-dev-utils/src/run/run_with_commands.test.ts b/packages/kbn-dev-utils/src/run/run_with_commands.test.ts new file mode 100644 index 0000000000000..eb7708998751c --- /dev/null +++ b/packages/kbn-dev-utils/src/run/run_with_commands.test.ts @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { RunWithCommands } from './run_with_commands'; +import { ToolingLog, ToolingLogCollectingWriter } from '../tooling_log'; +import { ProcRunner } from '../proc_runner'; + +const testLog = new ToolingLog(); +const testLogWriter = new ToolingLogCollectingWriter(); +testLog.setWriters([testLogWriter]); + +const testCli = new RunWithCommands({ + usage: 'node scripts/test_cli [...options]', + description: 'test cli', + extendContext: async () => { + return { + extraContext: true, + }; + }, + globalFlags: { + boolean: ['some-bool'], + help: ` + --some-bool description + `, + }, +}); + +beforeEach(() => { + process.argv = ['node', 'scripts/test_cli', 'foo', '--some-bool']; + jest.clearAllMocks(); +}); + +it('extends the context using extendContext()', async () => { + const context: any = await new Promise((resolve) => { + testCli.command({ name: 'foo', description: 'some command', run: resolve }).execute(); + }); + + expect(context).toEqual({ + log: expect.any(ToolingLog), + flags: expect.any(Object), + addCleanupTask: expect.any(Function), + procRunner: expect.any(ProcRunner), + extraContext: true, + }); + + expect(context.flags).toMatchInlineSnapshot(` + Object { + "_": Array [ + "foo", + ], + "debug": false, + "help": false, + "quiet": false, + "silent": false, + "some-bool": true, + "unexpected": Array [], + "v": false, + "verbose": false, + } + `); +}); diff --git a/packages/kbn-dev-utils/src/run/run_with_commands.ts b/packages/kbn-dev-utils/src/run/run_with_commands.ts new file mode 100644 index 0000000000000..9fb069e4b2d35 --- /dev/null +++ b/packages/kbn-dev-utils/src/run/run_with_commands.ts @@ -0,0 +1,136 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { ToolingLog, pickLevelFromFlags } from '../tooling_log'; +import { RunContext, RunOptions } from './run'; +import { getFlags, FlagOptions, mergeFlagOptions } from './flags'; +import { Cleanup } from './cleanup'; +import { getHelpForAllCommands, getCommandLevelHelp } from './help'; +import { createFlagError } from './fail'; +import { withProcRunner } from '../proc_runner'; + +export type CommandRunFn = (context: RunContext & T) => Promise | void; + +export interface Command { + name: string; + run: CommandRunFn; + description: RunOptions['description']; + usage?: RunOptions['usage']; + flags?: FlagOptions; +} + +export interface RunWithCommandsOptions { + log?: RunOptions['log']; + description?: RunOptions['description']; + usage?: RunOptions['usage']; + globalFlags?: FlagOptions; + extendContext?(context: RunContext): Promise | T; +} + +export class RunWithCommands { + constructor( + private readonly options: RunWithCommandsOptions, + private readonly commands: Array> = [] + ) {} + + command(options: Command) { + return new RunWithCommands(this.options, this.commands.concat(options)); + } + + async execute() { + const globalFlags = getFlags(process.argv.slice(2), { + allowUnexpected: true, + }); + + const isHelpCommand = globalFlags._[0] === 'help'; + const commandName = isHelpCommand ? globalFlags._[1] : globalFlags._[0]; + const command = this.commands.find((c) => c.name === commandName); + const log = new ToolingLog({ + level: pickLevelFromFlags(globalFlags, { + default: this.options.log?.defaultLevel, + }), + writeTo: process.stdout, + }); + + const globalHelp = getHelpForAllCommands({ + description: this.options.description, + usage: this.options.usage, + globalFlagHelp: this.options.globalFlags?.help, + commands: this.commands, + }); + const cleanup = Cleanup.setup(log, globalHelp); + + if (!command) { + if (globalFlags.help) { + log.write(globalHelp); + process.exit(); + } + + const error = createFlagError( + commandName ? `unknown command [${commandName}]` : `missing command name` + ); + cleanup.execute(error); + process.exit(1); + } + + const commandFlagOptions = mergeFlagOptions(this.options.globalFlags, command.flags); + const commandFlags = getFlags(process.argv.slice(2), commandFlagOptions); + const commandHelp = getCommandLevelHelp({ + usage: this.options.usage, + globalFlagHelp: this.options.globalFlags?.help, + command, + }); + cleanup.helpText = commandHelp; + + if (commandFlags.help || isHelpCommand) { + cleanup.execute(); + log.write(commandHelp); + process.exit(); + } + + if (!commandFlagOptions.allowUnexpected && commandFlags.unexpected.length) { + cleanup.execute(createFlagError(`Unknown flag(s) "${commandFlags.unexpected.join('", "')}"`)); + return; + } + + try { + await withProcRunner(log, async (procRunner) => { + const context: RunContext = { + log, + flags: commandFlags, + procRunner, + addCleanupTask: cleanup.add, + }; + + const extendedContext = { + ...context, + ...(this.options.extendContext ? await this.options.extendContext(context) : ({} as T)), + }; + + await command.run(extendedContext); + }); + } catch (error) { + cleanup.execute(error); + // exitCode is set by `cleanup` when necessary + process.exit(); + } finally { + cleanup.execute(); + } + } +} From f487867fda2f3c5386d28525dd72958af5fbc81a Mon Sep 17 00:00:00 2001 From: "Devin W. Hurley" Date: Fri, 17 Jul 2020 17:13:34 -0400 Subject: [PATCH 4/9] [SIEM] Updates consumer in export_rule archive (#72324) --- .../es_archives/export_rule/data.json.gz | Bin 1911 -> 1931 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/x-pack/test/security_solution_cypress/es_archives/export_rule/data.json.gz b/x-pack/test/security_solution_cypress/es_archives/export_rule/data.json.gz index 3c1a0f663a8eeb58dbe0626b067a086f52f975ef..373251d9e4f93d8097029345aa45aba959dd7a08 100644 GIT binary patch literal 1931 zcmV;62Xy!!iwFpN;}Kr~17u-zVJ>QOZ*Bn9Sy@x#Iu?HCS9tPtmv&;ud#3IKTLYIR z&ZcOR0rP zQ2QVk9uO6B(g59I2*>4As>POSO?7>VDL8Jp<)Rq+A^0 zu@tQjiH~B`)29@ch%9*7K`FGC7UWXENfI+y{MT&p-yceJDbfYoPjdSh!J|4O{TNYJ zE3>8K6hBFhqlA3Ai*BMXp9W(Z96u+2?j~VpzkhL^-UiX({gZwijjfxZ?uQpAV*Gr0 zeE1wb+~2y*L(R&_ooziLqEQx~efcax_bG0CaW9W{@BGUP-S{*5tdYUrr{ntuxec$` z?a@u=V(i_thkPW?+C$6KlcV-t=YSfY2JH(keu%WaTVdd!VeefXSXbo!D;1$(+%-}& zW}hvy)4#g!95wE*j!i+2u59h}^8Sf_y=GQ)eR#s#G+mA8Mq^p=B#z|1-2mnhl znvf+{6SI0d@qdZ*?$7zyrB@+~vw@y)pMy!%b5+A__NmkKW(F!cS zAT%U#e~%`?usf-VSWsGF3NZ^ZlFH0nZG!}09N?Uboe{>@?wbQ~nJ~i117_=43FSC@ zg(pi!(zG~vR1I5WRlCRtaF(%Ij6kVk%i8ng8mWJ%JVP;?6y+Zkr8&GI;XdSqxv#fFjq$1JB-2_rP_ z6pf{&=$cj~VvgprtrP`)G;eCr3uOuRP@48cCaa+~;5={?q zaj#`K!Wo8gU7wK|VZ%<4WYRE8TTVI$PoehK>wzkp^m^UbT%-j})oq%FYT5=?y(WD1 zP1{6n;5k8P))r(hz=!q&&N31%#fYH?=s@985(YVlf!v%XkF}-5O?==3**Vv*|4QoDrHrCvw|XriNZovS=4T13^8=_K%Vu{b~(n>0Orwp>cR8Vd$E6co>u0<;w9 zSTImXwbrP%RyOP+Hk?G*v`p6vu%-sK>#DkrVdAj7rm7hzLP6x%x)znYK1coeB&_v% z`R0(~JsJKU30rSZoRX4R?`-9r^Tn6)=>!2z__EN{L>exhnLBf6)JCMAAz9>~XMmSy zqGC*{Id!kDElMw~$eDSw_)gT!mJ7>zXvO(&*IftB|D65%j{N_=v-559(Q$SdsuT`G z74_ppI^Tk$Ax!|@o?PB$YFweXidk_BZI&sR;~ z)l}aO4K;Lh13AdThHqBX7nV-_sV!E-9Fp56^{QmpIL#r{j`of;hY33({gz$V>Y9pZ zIzak1Nj*_h&Eb4Dy%2Jwa827OCmXo*G08@VxA?Q%;kvv3)~+(!@ttj@(6cpw3`RWM zs!k(sf^NN=xq7L|uG?F9ITbHyoU8QifG`FA_y8+~7fLSmk4k$WEWaFmgf=2F{Q5=b zpa5jS;3=vT%eIGI&VsYyQpIr}<_lFGnQCzdhw{Ld<25%qj^7lp*C2n@vTdFWR-VvO z@vdZ<%ph-g>zIj`l*4${L)CeGN$6hpaO3{vyz`hY`j;#F*H36%o~Ajv=QSxre^A8BCHKFKxU|FmnGyFaq#^D9DB_xK&wYMS z$i2Gfn!iBE%Xa+W`4+tA0>)e0vrPS08gD0t&1J>*qp2_Ga6f;y=t9g7#AMD|Jt+54 zx;P%o`}s#iC|p|dUy4?f&oN_@clISbXGt>LL&5L>EqSWWIXWT$$k>XXqncXXGo8wl zOiAcv>l%|QIK*L@m0^?xgWZVZY&8_Xr)zarTmHm1i_MB1a&qC_YV)_b5)?acdLxiq RS$ef{{sZ~)cm47m007H2!|VV6 literal 1911 zcmV--2Z;C|iwFP!000026V+K+bK5u)e)q57>S>Np0>xWf^WZDC*LP&cj<=>%APGva zAd(?KpOyc<4T@4oiL_-osmZGB2cZvie|?|;b%lg|2cfP?a=Gsl3(v#^-mt$tJATV zWJjlC)6n9*)2;pv(?5+)Pu%D(L|fOHPW+Cwb-rVs)7!5s3v~UalNb^IY#ROH`E7r% zb9=sTWNh!;LWgI!kL>FuH^a-b%i&*tEqi%-ACXkLN*LyX`ehCuu*@b?GADkPQt{Z2 zStdcQ@TCL`5^Tz7w7RH^RAj4~d7+`6VfHk$?V83-jlkAp!^R}S9wRbFBF#vNFv$ej zWDE!)#B7lg97J5;$cOMY=RkxfdFt*yBylQ~G-IT^b%Iko;gz(KSY=`qrxl$Qt-#`C zlm#>zZn4-OUrg&G${4FK1%&%4O>zJmZIA#w_$24zN0f^f_sxMgi#Zkaj_{XRDH9}p zg(rZF?3-phQ(j4A^3J-EZT_TYv z7(&FB3E&K=7h&;`FRv!pACrbn?r8)<1+5VphNEGIhk5plGm<`5v0>CF5znbr!UR9` zizd@kG=!=|BJe`Cm7<_c7ELYsqHJ9QoFs!RmDSK136b#`fCo5v<$3i%cAQkRoq)C z2}w$zTwl(}l=5-kk5g%wiRwwm;3?GJ%X*;7rhQ-Ykb{xmRh_P(tA?c$)$PJp&#(;a z_^#~-Mr%p-1bjHXBWX&5T8ucpgANqbk_gB_4CLl4zHikMSMfoNWaoUDg-!^g5vjLi zM!ledktPOcqLbt$S4CX3h^i;Eso?8!aey*DZF<^9P3pC{KO#ZK$YSHA5p*mVE~Hv> z(t1%gxWIfojj&-Fj_VVo`j+FUnnqyyFx{?-bR1$ov@H#V<*qOAa4`v6gF(JIr1(IF ze?WuY+Y_gxWZpY_dFO1^o=yI7Ua$(s3H=O@g+p*#N&)L6i$p3E}8_%nck@Le)WpEg(co@af`5qh% zNeu7~9`wYCM;b(D1m~tq)g}2J?%xdN6yWhnL z#KoY%tOlF!JTu%EiSyZL^*vuiM^G~Gz~3=R3%fKe2t3>J)h>2D)$kmodRCyTfvxG- z#wO7{qoTgBbmmWMxgr*jTsNtkl40dEhfv$-9cd2Jc1VXktBu-7#Vi?NZJnf^s;TC1 zKAWCoa-?t!%PuDyxb!hjCs_2vlicC*V*9OKWxnG(+e)ElZ2&opc%)aIM&1P7%WmfC zr6#Lwt>NWN{JF8xFGiFr@Fzw@DZEf}see`41LgVU=p(d|EG4gBbao0rmJFVuI<;(l z*ySuZA1+lA4Pm}e<&mit_i-o>V%lzZmE-tL0ecPdS1nuT`JnNHmWnqe%X9{L!@I{^ zyrdk)s{yXg>ouW!-NTjpm;KITy69hy>|Z~jak&WDn(KB=!#0rXYX5ALS`AelfQ13z zJ*Z|>g)3qT58!5jXV;|1Pn2-Lg9KguQ6j)GB`^q(3 zIR~0CuzpuIlZ25g;vG`aF?GAGTgq>T`xnPY9~4H%MA;+$IR0JPh9sPjj-x{)4GrmS zO|z6fRBB2Kv7b<@Gc@M@mLQ%lEkrbnMCn5DZMB#fy)HCx!+b|f;GzbX{XEwO8HZR5 zN{<_u^}3En^icod?e3L*7n)g;6tA2MFL%AtLqPcfjYgRu{50yrgi2)fKppF@X_Y?Z z$sx)n9!XEbZ7^vpV~Q$+UvZ194}jBlE&XQw`^PIdtegmCJAO(c*rxC4)-RF>BdF@v zBCffXQI5FtgCbroxqmX^(hloqM%*NUUJpQaVntB-W?-< xLDFU@fKNkh2h~6E&12KBLryNdYc_wKD?zdIsy70;m8n%L=YRStDGUA`001%rx*-4n From f0d3cb96a4bc453150b74ecef923e5423571c647 Mon Sep 17 00:00:00 2001 From: Chris Roberson Date: Fri, 17 Jul 2020 17:15:25 -0400 Subject: [PATCH 5/9] [Monitoring] Fix issue with ES node detail status (#72298) * Fix issue with ES node detail status * Add test --- .../monitoring/public/alerts/status.tsx | 4 ++ .../components/elasticsearch/node/advanced.js | 4 +- .../elasticsearch/node_detail_status/index.js | 9 ++- .../public/directives/main/index.html | 1 + .../elasticsearch/node/advanced/index.js | 17 +++++- .../monitoring/elasticsearch/node_detail.js | 59 +++++++++++++++++++ .../monitoring/elasticsearch_node_detail.js | 4 ++ 7 files changed, 92 insertions(+), 6 deletions(-) diff --git a/x-pack/plugins/monitoring/public/alerts/status.tsx b/x-pack/plugins/monitoring/public/alerts/status.tsx index d15dcc9974863..9c262884d7257 100644 --- a/x-pack/plugins/monitoring/public/alerts/status.tsx +++ b/x-pack/plugins/monitoring/public/alerts/status.tsx @@ -20,6 +20,10 @@ interface Props { export const AlertsStatus: React.FC = (props: Props) => { const { alerts, showBadge = false, showOnlyCount = false } = props; + if (!alerts) { + return null; + } + let atLeastOneDanger = false; const count = Object.values(alerts).reduce((cnt, alertStatus) => { if (alertStatus.states.length) { diff --git a/x-pack/plugins/monitoring/public/components/elasticsearch/node/advanced.js b/x-pack/plugins/monitoring/public/components/elasticsearch/node/advanced.js index 6fea34ed9c901..b2a17515bbb96 100644 --- a/x-pack/plugins/monitoring/public/components/elasticsearch/node/advanced.js +++ b/x-pack/plugins/monitoring/public/components/elasticsearch/node/advanced.js @@ -19,7 +19,7 @@ import { NodeDetailStatus } from '../node_detail_status'; import { MonitoringTimeseriesContainer } from '../../chart'; import { FormattedMessage } from '@kbn/i18n/react'; -export const AdvancedNode = ({ nodeSummary, metrics, ...props }) => { +export const AdvancedNode = ({ nodeSummary, metrics, alerts, ...props }) => { const metricsToShow = [ metrics.node_gc, metrics.node_gc_time, @@ -50,7 +50,7 @@ export const AdvancedNode = ({ nodeSummary, metrics, ...props }) => { - + diff --git a/x-pack/plugins/monitoring/public/components/elasticsearch/node_detail_status/index.js b/x-pack/plugins/monitoring/public/components/elasticsearch/node_detail_status/index.js index 18533b3bd4b5e..85b4d0daddade 100644 --- a/x-pack/plugins/monitoring/public/components/elasticsearch/node_detail_status/index.js +++ b/x-pack/plugins/monitoring/public/components/elasticsearch/node_detail_status/index.js @@ -9,8 +9,9 @@ import { SummaryStatus } from '../../summary_status'; import { NodeStatusIcon } from '../node'; import { formatMetric } from '../../../lib/format_number'; import { i18n } from '@kbn/i18n'; +import { AlertsStatus } from '../../../alerts/status'; -export function NodeDetailStatus({ stats, alerts }) { +export function NodeDetailStatus({ stats, alerts = {} }) { const { transport_address: transportAddress, usedHeap, @@ -29,8 +30,10 @@ export function NodeDetailStatus({ stats, alerts }) { const metrics = [ { - label: 'Alerts', - value: {Object.values(alerts).length}, + label: i18n.translate('xpack.monitoring.elasticsearch.nodeDetailStatus.alerts', { + defaultMessage: 'Alerts', + }), + value: , }, { label: i18n.translate('xpack.monitoring.elasticsearch.nodeDetailStatus.transportAddress', { diff --git a/x-pack/plugins/monitoring/public/directives/main/index.html b/x-pack/plugins/monitoring/public/directives/main/index.html index 39d357813b3f2..fabd207d72b1f 100644 --- a/x-pack/plugins/monitoring/public/directives/main/index.html +++ b/x-pack/plugins/monitoring/public/directives/main/index.html @@ -90,6 +90,7 @@ { + describe('Active Nodes', () => { + const { setup, tearDown } = getLifecycleMethods(getService, getPageObjects); + + before(async () => { + await setup('monitoring/singlecluster-three-nodes-shard-relocation', { + from: 'Oct 5, 2017 @ 20:31:48.354', + to: 'Oct 5, 2017 @ 20:35:12.176', + }); + + // go to nodes listing + await overview.clickEsNodes(); + expect(await nodesList.isOnListing()).to.be(true); + }); + + after(async () => { + await tearDown(); + }); + + afterEach(async () => { + await PageObjects.monitoring.clickBreadcrumb('~breadcrumbEsNodes'); // return back for next test + }); + + it('should show node summary of master node with 20 indices and 38 shards', async () => { + await nodesList.clickRowByResolver('jUT5KdxfRbORSCWkb5zjmA'); + await nodeDetail.clickAdvanced(); + + expect(await nodeDetail.getSummary()).to.eql({ + transportAddress: 'Transport Address\n127.0.0.1:9300', + jvmHeap: 'JVM Heap\n29%', + freeDiskSpace: 'Free Disk Space\n173.9 GB (37.42%)', + documentCount: 'Documents\n24.8k', + dataSize: 'Data\n50.4 MB', + indicesCount: 'Indices\n20', + shardsCount: 'Shards\n38', + nodeType: 'Type\nMaster Node', + status: 'Status: Online', + }); + }); + + it('should show node summary of data node with 4 indices and 4 shards', async () => { + await nodesList.clickRowByResolver('bwQWH-7IQY-mFPpfoaoFXQ'); + await nodeDetail.clickAdvanced(); + + expect(await nodeDetail.getSummary()).to.eql({ + transportAddress: 'Transport Address\n127.0.0.1:9302', + jvmHeap: 'JVM Heap\n17%', + freeDiskSpace: 'Free Disk Space\n173.9 GB (37.42%)', + documentCount: 'Documents\n240', + dataSize: 'Data\n1.4 MB', + indicesCount: 'Indices\n4', + shardsCount: 'Shards\n4', + nodeType: 'Type\nNode', + status: 'Status: Online', + }); + }); + }); + }); }); } diff --git a/x-pack/test/functional/services/monitoring/elasticsearch_node_detail.js b/x-pack/test/functional/services/monitoring/elasticsearch_node_detail.js index 2cfa7628c0c4b..41b69403829f7 100644 --- a/x-pack/test/functional/services/monitoring/elasticsearch_node_detail.js +++ b/x-pack/test/functional/services/monitoring/elasticsearch_node_detail.js @@ -19,6 +19,10 @@ export function MonitoringElasticsearchNodeDetailProvider({ getService }) { const SUBJ_SUMMARY_STATUS = `${SUBJ_SUMMARY} > statusIcon`; return new (class ElasticsearchNodeDetail { + async clickAdvanced() { + return testSubjects.click('esNodeDetailAdvancedLink'); + } + async getSummary() { return { transportAddress: await testSubjects.getVisibleText(SUBJ_SUMMARY_TRANSPORT_ADDRESS), From 7519c1f8c33ffea28b9e8daacdbebe05080125f3 Mon Sep 17 00:00:00 2001 From: Lee Drengenberg Date: Fri, 17 Jul 2020 16:42:56 -0500 Subject: [PATCH 6/9] use WORKSPACE env var for stack_functional_integration tests, fix navigate path (#71908) Co-authored-by: Elastic Machine --- .../configs/config.stack_functional_integration_base.js | 8 ++++++-- .../test/functional/apps/sample_data/e_commerce.js | 2 +- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/x-pack/test/stack_functional_integration/configs/config.stack_functional_integration_base.js b/x-pack/test/stack_functional_integration/configs/config.stack_functional_integration_base.js index a34d158496ba0..96d338a04b01b 100644 --- a/x-pack/test/stack_functional_integration/configs/config.stack_functional_integration_base.js +++ b/x-pack/test/stack_functional_integration/configs/config.stack_functional_integration_base.js @@ -12,12 +12,16 @@ import { esTestConfig, kbnTestConfig } from '@kbn/test'; const reportName = 'Stack Functional Integration Tests'; const testsFolder = '../test/functional/apps'; -const stateFilePath = '../../../../../integration-test/qa/envvars.sh'; -const prepend = (testFile) => require.resolve(`${testsFolder}/${testFile}`); const log = new ToolingLog({ level: 'info', writeTo: process.stdout, }); +log.info(`WORKSPACE in config file ${process.env.WORKSPACE}`); +const stateFilePath = process.env.WORKSPACE + ? `${process.env.WORKSPACE}/qa/envvars.sh` + : '../../../../../integration-test/qa/envvars.sh'; + +const prepend = (testFile) => require.resolve(`${testsFolder}/${testFile}`); export default async ({ readConfigFile }) => { const defaultConfigs = await readConfigFile(require.resolve('../../functional/config')); diff --git a/x-pack/test/stack_functional_integration/test/functional/apps/sample_data/e_commerce.js b/x-pack/test/stack_functional_integration/test/functional/apps/sample_data/e_commerce.js index 306f30133f6ee..0286f6984e89e 100644 --- a/x-pack/test/stack_functional_integration/test/functional/apps/sample_data/e_commerce.js +++ b/x-pack/test/stack_functional_integration/test/functional/apps/sample_data/e_commerce.js @@ -12,7 +12,7 @@ export default function ({ getService, getPageObjects }) { before(async () => { await browser.setWindowSize(1200, 800); - await PageObjects.common.navigateToUrl('home', '/home/tutorial_directory/sampleData', { + await PageObjects.common.navigateToUrl('home', '/tutorial_directory/sampleData', { useActualUrl: true, insertTimestamp: false, }); From 7aa600bff7250655dbbb80b026c67eed46f2969c Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 17 Jul 2020 14:53:04 -0700 Subject: [PATCH 7/9] [DOCS] Removes occurrences of X-Pack Security and Reporting (#72302) --- docs/dev-tools/grokdebugger/index.asciidoc | 2 +- docs/settings/monitoring-settings.asciidoc | 2 +- docs/setup/install.asciidoc | 4 ++-- .../monitoring/monitoring-kibana.asciidoc | 15 +++++++------ docs/user/reporting/chromium-sandbox.asciidoc | 12 +++++----- .../reporting/configuring-reporting.asciidoc | 10 ++++----- .../user/reporting/development/index.asciidoc | 10 +++++---- docs/user/reporting/gs-index.asciidoc | 2 +- docs/user/reporting/index.asciidoc | 2 +- docs/user/reporting/script-example.asciidoc | 3 ++- docs/user/reporting/watch-example.asciidoc | 2 +- docs/user/security/reporting.asciidoc | 16 +++++++------- docs/user/security/securing-kibana.asciidoc | 22 +++++++++---------- 13 files changed, 54 insertions(+), 48 deletions(-) diff --git a/docs/dev-tools/grokdebugger/index.asciidoc b/docs/dev-tools/grokdebugger/index.asciidoc index 5162e806edd07..994836de7a1a2 100644 --- a/docs/dev-tools/grokdebugger/index.asciidoc +++ b/docs/dev-tools/grokdebugger/index.asciidoc @@ -32,7 +32,7 @@ in ingest node and Logstash. This example walks you through using the *Grok Debugger*. This tool is automatically enabled in {kib}. -NOTE: If you're using {security}, you must have the `manage_pipeline` +NOTE: If you're using {stack-security-features}, you must have the `manage_pipeline` permission to use the Grok Debugger. . Open the menu, go to *Dev Tools*, then click *Grok Debugger*. diff --git a/docs/settings/monitoring-settings.asciidoc b/docs/settings/monitoring-settings.asciidoc index 48b5b5eb5d0c0..5b8fa0725d96b 100644 --- a/docs/settings/monitoring-settings.asciidoc +++ b/docs/settings/monitoring-settings.asciidoc @@ -7,7 +7,7 @@ By default, the Monitoring application is enabled, but data collection is disabled. When you first start {kib} monitoring, you are prompted to -enable data collection. If you are using {security}, you must be +enable data collection. If you are using {stack-security-features}, you must be signed in as a user with the `cluster:manage` privilege to enable data collection. The built-in `superuser` role has this privilege and the built-in `elastic` user has this role. diff --git a/docs/setup/install.asciidoc b/docs/setup/install.asciidoc index 73036da8f1390..cb47210cb3f08 100644 --- a/docs/setup/install.asciidoc +++ b/docs/setup/install.asciidoc @@ -53,8 +53,8 @@ Formulae are available from the Elastic Homebrew tap for installing {kib} on mac <> IMPORTANT: If your Elasticsearch installation is protected by -{ref}/elasticsearch-security.html[{security}] see -{kibana-ref}/using-kibana-with-security.html[Configuring security in Kibana] for +{ref}/elasticsearch-security.html[{stack-security-features}] see +{kibana-ref}/using-kibana-with-security.html[Configuring security in {kib}] for additional setup instructions. include::install/targz.asciidoc[] diff --git a/docs/user/monitoring/monitoring-kibana.asciidoc b/docs/user/monitoring/monitoring-kibana.asciidoc index b9ec3982eb3c5..bb8b3e5d42851 100644 --- a/docs/user/monitoring/monitoring-kibana.asciidoc +++ b/docs/user/monitoring/monitoring-kibana.asciidoc @@ -20,9 +20,10 @@ node in the production cluster. By default, it is is disabled (`false`). + -- NOTE: You can specify this setting in either the `elasticsearch.yml` on each -node or across the cluster as a dynamic cluster setting. If {es} -{security-features} are enabled, you must have `monitor` cluster privileges to -view the cluster settings and `manage` cluster privileges to change them. +node or across the cluster as a dynamic cluster setting. If +{stack-security-features} are enabled, you must have `monitor` cluster +privileges to view the cluster settings and `manage` cluster privileges to +change them. -- @@ -33,7 +34,7 @@ view the cluster settings and `manage` cluster privileges to change them. -- By default, if you are running {kib} locally, go to `http://localhost:5601/`. -If {es} {security-features} are enabled, log in. +If {security-features} are enabled, log in. -- ... Open the menu, then go to *Stack Monitoring*. If data collection is @@ -80,13 +81,13 @@ monitoring cluster prevents production cluster outages from impacting your ability to access your monitoring data. It also prevents monitoring activities from impacting the performance of your production cluster. -If {security} is enabled on the production cluster, use an HTTPS URL such -as `https://:9200` in this setting. +If {security-features} are enabled on the production cluster, use an HTTPS +URL such as `https://:9200` in this setting. =============================== -- -. If the Elastic {security-features} are enabled on the production cluster: +. If {security-features} are enabled on the production cluster: .. Verify that there is a valid user ID and password in the `elasticsearch.username` and diff --git a/docs/user/reporting/chromium-sandbox.asciidoc b/docs/user/reporting/chromium-sandbox.asciidoc index bfef5b8b86c6b..dcb421261c067 100644 --- a/docs/user/reporting/chromium-sandbox.asciidoc +++ b/docs/user/reporting/chromium-sandbox.asciidoc @@ -2,14 +2,16 @@ [[reporting-chromium-sandbox]] === Chromium sandbox -When {reporting} uses the Chromium browser for generating PDF reports, it's recommended to use the sandbox for -an additional layer of security. The Chromium sandbox uses operating system provided mechanisms to ensure that -code execution cannot make persistent changes to the computer or access confidential information. The specific -sandboxing techniques differ for each operating system. +When {report-features} uses the Chromium browser for generating PDF reports, +it's recommended to use the sandbox for an additional layer of security. The +Chromium sandbox uses operating system provided mechanisms to ensure that +code execution cannot make persistent changes to the computer or access +confidential information. The specific sandboxing techniques differ for each +operating system. ==== Linux sandbox The Linux sandbox depends on user namespaces, which were introduced with the 3.8 Linux kernel. However, many -distributions don't have user namespaces enabled by default, or they require the CAP_SYS_ADMIN capability. {reporting} +distributions don't have user namespaces enabled by default, or they require the CAP_SYS_ADMIN capability. The {report-features} will automatically disable the sandbox when it is running on Debian and CentOS as additional steps are required to enable unprivileged usernamespaces. In these situations, you'll see the following message in your {kib} startup logs: `Chromium sandbox provides an additional layer of protection, but is not supported for your OS. diff --git a/docs/user/reporting/configuring-reporting.asciidoc b/docs/user/reporting/configuring-reporting.asciidoc index 7489e2cf51f61..ca2d79bb2dec0 100644 --- a/docs/user/reporting/configuring-reporting.asciidoc +++ b/docs/user/reporting/configuring-reporting.asciidoc @@ -2,8 +2,8 @@ [[configuring-reporting]] == Reporting configuration -You can configure settings in `kibana.yml` to control how {reporting} -communicates with the {kib} server, manages background jobs, and captures +You can configure settings in `kibana.yml` to control how the {report-features} +communicate with the {kib} server, manages background jobs, and captures screenshots. See <> for the complete list of settings. @@ -11,9 +11,9 @@ list of settings. [[encryption-keys]] === Encryption keys for multiple {kib} instances -By default, a new encryption key is generated for {reporting} each time -you start {kib}. This means if a static encryption key is not persisted in the -{kib} configuration, any pending reports will fail when you restart {kib}. +By default, a new encryption key is generated for the {report-features} each +time you start {kib}. This means if a static encryption key is not persisted in +the {kib} configuration, any pending reports will fail when you restart {kib}. If you are load balancing across multiple {kib} instances, they need to have the same reporting encryption key. Otherwise, report generation will fail if a diff --git a/docs/user/reporting/development/index.asciidoc b/docs/user/reporting/development/index.asciidoc index a64e540da0c70..4e86c803bd82d 100644 --- a/docs/user/reporting/development/index.asciidoc +++ b/docs/user/reporting/development/index.asciidoc @@ -1,9 +1,11 @@ [role="xpack"] [[reporting-integration]] == Reporting integration -Integrating a {kib} application with {reporting} requires a minimum amount of code, and the goal is to not have to -modify the Reporting code as we add additional applications. Instead, applications abide by a contract that Reporting -uses to determine the information that is required to export CSVs and PDFs. +Integrating a {kib} application with the {report-features} requires a minimum +amount of code, and the goal is to not have to modify the reporting code as we +add additional applications. Instead, applications abide by a contract that +{report-features} use to determine the information that is required to export +CSVs and PDFs. [IMPORTANT] ============================================== @@ -18,7 +20,7 @@ X-Pack uses the `share` plugin of the Kibana platform to register actions in the [float] === Generate job URL -To generate a new {reporting} job, different export types require different `jobParams` that are Rison encoded into a URL +To generate a new reporting job, different export types require different `jobParams` that are Rison encoded into a URL that abide by the following convention: `/api/reporting/generate?jobParams=${rison.encode(jobParams)}`. If you use the aforementioned <> then this detail will be abstracted away, but if you provide a custom UI for generating the report, you will have to generate the URL and create a POST request to the URL. diff --git a/docs/user/reporting/gs-index.asciidoc b/docs/user/reporting/gs-index.asciidoc index 87918ee76340e..46c1fd38b7d69 100644 --- a/docs/user/reporting/gs-index.asciidoc +++ b/docs/user/reporting/gs-index.asciidoc @@ -21,7 +21,7 @@ You can also <>. IMPORTANT: Reports are stored in the `.reporting-*` indices. Any user with access to these indices has access to every report generated by all users. -To use {reporting} in a production environment, +To use {report-features} in a production environment, <>. -- diff --git a/docs/user/reporting/index.asciidoc b/docs/user/reporting/index.asciidoc index 6acdbbe3f0a99..e4e4b461ac2bd 100644 --- a/docs/user/reporting/index.asciidoc +++ b/docs/user/reporting/index.asciidoc @@ -19,7 +19,7 @@ image::user/reporting/images/share-button.png["Share"] [float] == Setup -{reporting} is automatically enabled in {kib}. It runs a custom build of the Chromium web browser, which +The {report-features} are automatically enabled in {kib}. It runs a custom build of the Chromium web browser, which runs on the server in headless mode to load {kib} and capture the rendered {kib} charts as images. Chromium is an open-source project not related to Elastic, but the Chromium binary for {kib} has been custom-built by Elastic to ensure it diff --git a/docs/user/reporting/script-example.asciidoc b/docs/user/reporting/script-example.asciidoc index 88f48ad1d3182..94301fc6fb448 100644 --- a/docs/user/reporting/script-example.asciidoc +++ b/docs/user/reporting/script-example.asciidoc @@ -19,7 +19,8 @@ curl \ // CONSOLE <1> `POST` method is required. -<2> Provide user credentials for a user with permission to access Kibana and X-Pack reporting. +<2> Provide user credentials for a user with permission to access Kibana and +{report-features}. <3> The `kbn-version` header is required for all `POST` requests to Kibana. **The value must match the dotted-numeral version of the Kibana instance.** <4> The POST URL. You can copy and paste the URL for any report from the Kibana UI. diff --git a/docs/user/reporting/watch-example.asciidoc b/docs/user/reporting/watch-example.asciidoc index 627e31017230c..253722fefecc0 100644 --- a/docs/user/reporting/watch-example.asciidoc +++ b/docs/user/reporting/watch-example.asciidoc @@ -52,7 +52,7 @@ report from the Kibana UI. <3> Optional, default is 40 <4> Optional, default is 15s <5> Provide user credentials for a user with permission to access Kibana and -{reporting}. +the {report-features}. //For more information, see <>. //<>. diff --git a/docs/user/security/reporting.asciidoc b/docs/user/security/reporting.asciidoc index 30340e1db989a..4e02759ce99cb 100644 --- a/docs/user/security/reporting.asciidoc +++ b/docs/user/security/reporting.asciidoc @@ -5,8 +5,8 @@ Reporting operates by creating and updating documents in {es} in response to user actions in {kib}. -To use {reporting} with {security} enabled, you need to -<>. +To use {report-features} with {security-features} enabled, you need to +<>. If you are automatically generating reports with {ref}/xpack-alerting.html[{watcher}], you also need to configure {watcher} to trust the {kib} server's certificate. @@ -118,10 +118,10 @@ reporting_user: === Secure the reporting endpoints In a production environment, you should restrict access to -the {reporting} endpoints to authorized users. This requires that you: +the reporting endpoints to authorized users. This requires that you: -. Enable {security} on your {es} cluster. For more information, -see {ref}/security-getting-started.html[Getting Started with Security]. +. Enable {stack-security-features} on your {es} cluster. For more information, +see {ref}/security-getting-started.html[Getting started with security]. . Configure TLS/SSL encryption for the {kib} server. For more information, see <>. . Specify the {kib} server's CA certificate chain in `elasticsearch.yml`: @@ -150,13 +150,13 @@ For more information, see {ref}/notification-settings.html#ssl-notification-sett -- . Add one or more users who have the permissions -necessary to use {kib} and {reporting}. For more information, see +necessary to use {kib} and {report-features}. For more information, see <>. -Once you've enabled SSL for {kib}, all requests to the {reporting} endpoints +Once you've enabled SSL for {kib}, all requests to the reporting endpoints must include valid credentials. For example, see the following page which includes a watch that submits requests as the built-in `elastic` user: <>. For more information about configuring watches, see -{ref}/how-watcher-works.html[How Watcher works]. +{ref}/how-watcher-works.html[How {watcher} works]. diff --git a/docs/user/security/securing-kibana.asciidoc b/docs/user/security/securing-kibana.asciidoc index b30acd0ed2e53..0177ac94bd402 100644 --- a/docs/user/security/securing-kibana.asciidoc +++ b/docs/user/security/securing-kibana.asciidoc @@ -5,21 +5,21 @@ Configure security ++++ -{kib} users have to log in when {security} is enabled on your cluster. You -configure {security} roles for your {kib} users to control what data those users -can access. +{kib} users have to log in when {stack-security-features} are enabled on your +cluster. You configure roles for your {kib} users to control what data those +users can access. Most requests made through {kib} to {es} are authenticated by using the credentials of the logged-in user. There are, however, a few internal requests that the {kib} server needs to make to the {es} cluster. For this reason, you must configure credentials for the {kib} server to use for those requests. -With {security} enabled, if you load a {kib} dashboard that accesses data in an -index that you are not authorized to view, you get an error that indicates the -index does not exist. {security} do not currently provide a way to control which -users can load which dashboards. +With {security-features} enabled, if you load a {kib} dashboard that accesses +data in an index that you are not authorized to view, you get an error that +indicates the index does not exist. The {security-features} do not currently +provide a way to control which users can load which dashboards. -To use {kib} with {security}: +To use {kib} with {security-features}: . {ref}/configuring-security.html[Configure security in {es}]. @@ -38,8 +38,8 @@ elasticsearch.password: "kibanapassword" The {kib} server submits requests as this user to access the cluster monitoring APIs and the `.kibana` index. The server does _not_ need access to user indices. -The password for the built-in `kibana_system` user is typically set as part of the -{security} configuration process on {es}. For more information, see +The password for the built-in `kibana_system` user is typically set as part of +the security configuration process on {es}. For more information, see {ref}/built-in-users.html[Built-in users]. -- @@ -53,7 +53,7 @@ as the encryption key. xpack.security.encryptionKey: "something_at_least_32_characters" -------------------------------------------------------------------------------- -For more information, see <>. +For more information, see <>. -- . Optional: Set a timeout to expire idle sessions. By default, a session stays From dc7db09533f62d43a70e2a903c89e19b81ae8287 Mon Sep 17 00:00:00 2001 From: Nathan Reese Date: Fri, 17 Jul 2020 16:29:23 -0600 Subject: [PATCH 8/9] [Maps] convert SavedGisMap to TS (#72286) * [Maps] convert SavedGisMap to TS * i18n translate new map title --- .../{saved_gis_map.js => saved_gis_map.ts} | 65 ++++++++++++------- .../maps/public/selectors/map_selectors.ts | 4 +- 2 files changed, 42 insertions(+), 27 deletions(-) rename x-pack/plugins/maps/public/routing/bootstrap/services/{saved_gis_map.js => saved_gis_map.ts} (64%) diff --git a/x-pack/plugins/maps/public/routing/bootstrap/services/saved_gis_map.js b/x-pack/plugins/maps/public/routing/bootstrap/services/saved_gis_map.ts similarity index 64% rename from x-pack/plugins/maps/public/routing/bootstrap/services/saved_gis_map.js rename to x-pack/plugins/maps/public/routing/bootstrap/services/saved_gis_map.ts index f8c783f673bab..4b474424bcdab 100644 --- a/x-pack/plugins/maps/public/routing/bootstrap/services/saved_gis_map.js +++ b/x-pack/plugins/maps/public/routing/bootstrap/services/saved_gis_map.ts @@ -5,7 +5,13 @@ */ import _ from 'lodash'; -import { createSavedObjectClass } from '../../../../../../../src/plugins/saved_objects/public'; +import { SavedObjectReference } from 'kibana/public'; +import { i18n } from '@kbn/i18n'; +import { + createSavedObjectClass, + SavedObject, + SavedObjectKibanaServices, +} from '../../../../../../../src/plugins/saved_objects/public'; import { getTimeFilters, getMapZoom, @@ -18,65 +24,74 @@ import { } from '../../../selectors/map_selectors'; import { getIsLayerTOCOpen, getOpenTOCDetails } from '../../../selectors/ui_selectors'; import { copyPersistentState } from '../../../reducers/util'; +// @ts-expect-error import { extractReferences, injectReferences } from '../../../../common/migrations/references'; import { getExistingMapPath, MAP_SAVED_OBJECT_TYPE } from '../../../../common/constants'; +// @ts-expect-error import { getStore } from '../../store_operations'; +import { MapStoreState } from '../../../reducers/store'; +import { LayerDescriptor } from '../../../../common/descriptor_types'; + +export interface ISavedGisMap extends SavedObject { + layerListJSON?: string; + mapStateJSON?: string; + uiStateJSON?: string; + getLayerList(): LayerDescriptor[]; + syncWithStore(): void; +} -export function createSavedGisMapClass(services) { +export function createSavedGisMapClass(services: SavedObjectKibanaServices) { const SavedObjectClass = createSavedObjectClass(services); - class SavedGisMap extends SavedObjectClass { - static type = MAP_SAVED_OBJECT_TYPE; + class SavedGisMap extends SavedObjectClass implements ISavedGisMap { + public static type = MAP_SAVED_OBJECT_TYPE; // Mappings are used to place object properties into saved object _source - static mapping = { + public static mapping = { title: 'text', description: 'text', mapStateJSON: 'text', layerListJSON: 'text', uiStateJSON: 'text', }; - static fieldOrder = ['title', 'description']; - static searchSource = false; + public static fieldOrder = ['title', 'description']; + public static searchSource = false; - constructor(id) { + public showInRecentlyAccessed = true; + public layerListJSON?: string; + public mapStateJSON?: string; + public uiStateJSON?: string; + + constructor(id: string) { super({ type: SavedGisMap.type, mapping: SavedGisMap.mapping, searchSource: SavedGisMap.searchSource, extractReferences, - injectReferences: (savedObject, references) => { + injectReferences: (savedObject: ISavedGisMap, references: SavedObjectReference[]) => { const { attributes } = injectReferences({ attributes: { layerListJSON: savedObject.layerListJSON }, references, }); savedObject.layerListJSON = attributes.layerListJSON; - - const indexPatternIds = references - .filter((reference) => { - return reference.type === 'index-pattern'; - }) - .map((reference) => { - return reference.id; - }); - savedObject.indexPatternIds = _.uniq(indexPatternIds); }, // if this is null/undefined then the SavedObject will be assigned the defaults - id: id, + id, // default values that will get assigned if the doc is new defaults: { - title: 'New Map', + title: i18n.translate('xpack.maps.newMapTitle', { + defaultMessage: 'New Map', + }), description: '', }, }); - this.showInRecentlyAccessed = true; - } - getFullPath() { - return getExistingMapPath(this.id); + this.getFullPath = () => { + return getExistingMapPath(this.id!); + }; } getLayerList() { @@ -84,7 +99,7 @@ export function createSavedGisMapClass(services) { } syncWithStore() { - const state = getStore().getState(); + const state: MapStoreState = getStore().getState(); const layerList = getLayerListRaw(state); const layerListConfigOnly = copyPersistentState(layerList); this.layerListJSON = JSON.stringify(layerListConfigOnly); diff --git a/x-pack/plugins/maps/public/selectors/map_selectors.ts b/x-pack/plugins/maps/public/selectors/map_selectors.ts index fe2cfec3c761c..e082398a02a9e 100644 --- a/x-pack/plugins/maps/public/selectors/map_selectors.ts +++ b/x-pack/plugins/maps/public/selectors/map_selectors.ts @@ -52,6 +52,7 @@ import { ISource } from '../classes/sources/source'; import { ITMSSource } from '../classes/sources/tms_source'; import { IVectorSource } from '../classes/sources/vector_source'; import { ILayer } from '../classes/layers/layer'; +import { ISavedGisMap } from '../routing/bootstrap/services/saved_gis_map'; function createLayerInstance( layerDescriptor: LayerDescriptor, @@ -419,12 +420,11 @@ export const areLayersLoaded = createSelector( export function hasUnsavedChanges( state: MapStoreState, - savedMap: unknown, + savedMap: ISavedGisMap, initialLayerListConfig: LayerDescriptor[] ) { const layerListConfigOnly = copyPersistentState(getLayerListRaw(state)); - // @ts-expect-error const savedLayerList = savedMap.getLayerList(); return !savedLayerList From ef875cf9fe3ff27f9014a7c5d7418c27acbbacef Mon Sep 17 00:00:00 2001 From: Spencer Date: Fri, 17 Jul 2020 16:41:52 -0700 Subject: [PATCH 9/9] [esArchiver] actually re-delete the .kibana index if we lose recreate race (#72354) Co-authored-by: spalger --- src/es_archiver/lib/indices/create_index_stream.ts | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/src/es_archiver/lib/indices/create_index_stream.ts b/src/es_archiver/lib/indices/create_index_stream.ts index 5629f95c7c9c6..fa4c95dc73166 100644 --- a/src/es_archiver/lib/indices/create_index_stream.ts +++ b/src/es_archiver/lib/indices/create_index_stream.ts @@ -20,7 +20,6 @@ import { Transform, Readable } from 'stream'; import { inspect } from 'util'; -import { get, once } from 'lodash'; import { Client } from 'elasticsearch'; import { ToolingLog } from '@kbn/dev-utils'; @@ -54,7 +53,7 @@ export function createCreateIndexStream({ // If we're trying to import Kibana index docs, we need to ensure that // previous indices are removed so we're starting w/ a clean slate for // migrations. This only needs to be done once per archive load operation. - const deleteKibanaIndicesOnce = once(deleteKibanaIndices); + let kibanaIndexAlreadyDeleted = false; async function handleDoc(stream: Readable, record: DocRecord) { if (skipDocsFromIndices.has(record.value.index)) { @@ -70,8 +69,9 @@ export function createCreateIndexStream({ async function attemptToCreate(attemptNumber = 1) { try { - if (isKibana) { - await deleteKibanaIndicesOnce({ client, stats, log }); + if (isKibana && !kibanaIndexAlreadyDeleted) { + await deleteKibanaIndices({ client, stats, log }); + kibanaIndexAlreadyDeleted = true; } await client.indices.create({ @@ -90,6 +90,7 @@ export function createCreateIndexStream({ err?.body?.error?.reason?.includes('index exists with the same name as the alias') && attemptNumber < 3 ) { + kibanaIndexAlreadyDeleted = false; const aliasStr = inspect(aliases); log.info( `failed to create aliases [${aliasStr}] because ES indicated an index/alias already exists, trying again` @@ -98,10 +99,7 @@ export function createCreateIndexStream({ return; } - if ( - get(err, 'body.error.type') !== 'resource_already_exists_exception' || - attemptNumber >= 3 - ) { + if (err?.body?.error?.type !== 'resource_already_exists_exception' || attemptNumber >= 3) { throw err; }