Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into feature/support-f…
Browse files Browse the repository at this point in the history
…laky-test-analyser

* upstream/master: (22 commits)
  [Ingest Manager] Prevent reporting ecs version twice (elastic#21616)
  [CI] Use google storage to keep artifacts (elastic#21910)
  Update docs.asciidoc (elastic#21849)
  Kubernetes leaderelection improvements (elastic#21896)
  Apply name changes to elastic agent docs (elastic#21549)
  Add 7.7.1 relnotes to 7.8 docs (elastic#21937) (elastic#21941)
  [libbeat] Fix potential deadlock in the disk queue + add more unit tests (elastic#21930)
  Refactor docker watcher to fix flaky test and other small issues (elastic#21851)
  [CI] Add stage name in the step (elastic#21887)
  [docs] Remove extra word in autodiscover docs (elastic#21871)
  [CI] lint stage doesn't produce test reports (elastic#21888)
  Add tests of reader of filestream input (elastic#21814)
  [Ingest Manager] Use local temp instead of system one (elastic#21883)
  chore: delegate variant pushes to the right method (elastic#21861)
  [CI] kind setup fails sometimes (elastic#21857)
  Fix panic on add_docker_metadata close (elastic#21882)
  Add tests for fileProspector in filestream input (elastic#21712)
  [Filebeat][okta] Fix okta pagination (elastic#21797)
  Add cloud.account.id into add_cloud_metadata for gcp (elastic#21776)
  Fix syslog RFC 5424 parsing in CheckPoint module (elastic#21854)
  ...
  • Loading branch information
v1v committed Oct 19, 2020
2 parents 970eafc + ee7d329 commit 4039f89
Show file tree
Hide file tree
Showing 47 changed files with 1,379 additions and 346 deletions.
106 changes: 64 additions & 42 deletions .ci/packaging.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,13 @@

@Library('apm@current') _

import groovy.transform.Field

/**
This is required to store the test suites we will use to trigger the E2E tests.
*/
@Field def e2eTestSuites = []

pipeline {
agent none
environment {
Expand Down Expand Up @@ -121,7 +128,7 @@ pipeline {
release()
pushCIDockerImages()
}
runE2ETestForPackages()
prepareE2ETestForPackage("${BEATS_FOLDER}")
}
}
stage('Package Mac OS'){
Expand Down Expand Up @@ -152,37 +159,34 @@ pipeline {
}
}
}
stage('Run E2E Tests for Packages'){
agent { label 'ubuntu && immutable' }
options { skipDefaultCheckout() }
steps {
runE2ETests()
}
}
}
}
}
}

def pushCIDockerImages(){
catchError(buildResult: 'UNSTABLE', message: 'Unable to push Docker images', stageResult: 'FAILURE') {
if ("${env.BEATS_FOLDER}" == "auditbeat"){
tagAndPush('auditbeat-oss')
} else if ("${env.BEATS_FOLDER}" == "filebeat") {
tagAndPush('filebeat-oss')
} else if ("${env.BEATS_FOLDER}" == "heartbeat"){
tagAndPush('heartbeat-oss')
if (env?.BEATS_FOLDER?.endsWith('auditbeat')) {
tagAndPush('auditbeat')
} else if (env?.BEATS_FOLDER?.endsWith('filebeat')) {
tagAndPush('filebeat')
} else if (env?.BEATS_FOLDER?.endsWith('heartbeat')) {
tagAndPush('heartbeat')
} else if ("${env.BEATS_FOLDER}" == "journalbeat"){
tagAndPush('journalbeat')
tagAndPush('journalbeat-oss')
} else if ("${env.BEATS_FOLDER}" == "metricbeat"){
tagAndPush('metricbeat-oss')
} else if (env?.BEATS_FOLDER?.endsWith('metricbeat')) {
tagAndPush('metricbeat')
} else if ("${env.BEATS_FOLDER}" == "packetbeat"){
tagAndPush('packetbeat')
tagAndPush('packetbeat-oss')
} else if ("${env.BEATS_FOLDER}" == "x-pack/auditbeat"){
tagAndPush('auditbeat')
} else if ("${env.BEATS_FOLDER}" == "x-pack/elastic-agent") {
tagAndPush('elastic-agent')
} else if ("${env.BEATS_FOLDER}" == "x-pack/filebeat"){
tagAndPush('filebeat')
} else if ("${env.BEATS_FOLDER}" == "x-pack/heartbeat"){
tagAndPush('heartbeat')
} else if ("${env.BEATS_FOLDER}" == "x-pack/metricbeat"){
tagAndPush('metricbeat')
}
}
}
Expand All @@ -208,7 +212,7 @@ def tagAndPush(name){
def commitName = "${DOCKER_REGISTRY}/observability-ci/${name}${variant}:${env.GIT_BASE_COMMIT}"

def iterations = 0
retryWithSleep(retries: 3, seconds: 5, backoff: true)
retryWithSleep(retries: 3, seconds: 5, backoff: true) {
iterations++
def status = sh(label:'Change tag and push', script: """
docker tag ${oldName} ${newName}
Expand All @@ -217,30 +221,27 @@ def tagAndPush(name){
docker push ${commitName}
""", returnStatus: true)

if ( status > 0 && iterations < 3) {
error('tag and push failed, retry')
} else if ( status > 0 ) {
log(level: 'WARN', text: "${name} doesn't have ${variant} docker images. See https://github.com/elastic/beats/pull/21621")
if ( status > 0 && iterations < 3) {
error('tag and push failed, retry')
} else if ( status > 0 ) {
log(level: 'WARN', text: "${name} doesn't have ${variant} docker images. See https://github.com/elastic/beats/pull/21621")
}
}
}
}

def runE2ETestForPackages(){
def suite = ''

catchError(buildResult: 'UNSTABLE', message: 'Unable to run e2e tests', stageResult: 'FAILURE') {
if ("${env.BEATS_FOLDER}" == "filebeat" || "${env.BEATS_FOLDER}" == "x-pack/filebeat") {
suite = 'helm,fleet'
} else if ("${env.BEATS_FOLDER}" == "metricbeat" || "${env.BEATS_FOLDER}" == "x-pack/metricbeat") {
suite = ''
} else if ("${env.BEATS_FOLDER}" == "x-pack/elastic-agent") {
suite = 'fleet'
} else {
echo("Skipping E2E tests for ${env.BEATS_FOLDER}.")
return
}

triggerE2ETests(suite)
def prepareE2ETestForPackage(String beat){
if ("${beat}" == "filebeat" || "${beat}" == "x-pack/filebeat") {
e2eTestSuites.push('fleet')
e2eTestSuites.push('helm')
} else if ("${beat}" == "metricbeat" || "${beat}" == "x-pack/metricbeat") {
e2eTestSuites.push('ALL')
echo("${beat} adds all test suites to the E2E tests job.")
} else if ("${beat}" == "x-pack/elastic-agent") {
e2eTestSuites.push('fleet')
} else {
echo("${beat} does not add any test suite to the E2E tests job.")
return
}
}

Expand All @@ -257,8 +258,29 @@ def release(){
}
}

def runE2ETests(){
if (e2eTestSuites.size() == 0) {
echo("Not triggering E2E tests for PR-${env.CHANGE_ID} because the changes does not affect the E2E.")
return
}

def suites = '' // empty value represents all suites in the E2E tests

catchError(buildResult: 'UNSTABLE', message: 'Unable to run e2e tests', stageResult: 'FAILURE') {
def suitesSet = e2eTestSuites.toSet()

if (!suitesSet.contains('ALL')) {
suitesSet.each { suite ->
suites += "${suite},"
};
}

triggerE2ETests(suites)
}
}

def triggerE2ETests(String suite) {
echo("Triggering E2E tests for ${env.BEATS_FOLDER}. Test suite: ${suite}.")
echo("Triggering E2E tests for PR-${env.CHANGE_ID}. Test suites: ${suite}.")

def branchName = isPR() ? "${env.CHANGE_TARGET}" : "${env.JOB_BASE_NAME}"
def e2eTestsPipeline = "e2e-tests/e2e-testing-mbp/${branchName}"
Expand All @@ -285,7 +307,7 @@ def triggerE2ETests(String suite) {
wait: false
)

def notifyContext = "${env.GITHUB_CHECK_E2E_TESTS_NAME} for ${env.BEATS_FOLDER}"
def notifyContext = "${env.GITHUB_CHECK_E2E_TESTS_NAME}"
githubNotify(context: "${notifyContext}", description: "${notifyContext} ...", status: 'PENDING', targetUrl: "${env.JENKINS_URL}search/?q=${e2eTestsPipeline.replaceAll('/','+')}")
}

Expand Down
39 changes: 38 additions & 1 deletion CHANGELOG.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -457,6 +457,38 @@ https://github.com/elastic/beats/compare/v7.7.0...v7.8.0[View commits]
- Add support for event IDs 4673,4674,4697,4698,4699,4700,4701,4702,4768,4769,4770,4771,4776,4778,4779,4964 to the Security module. {pull}17517[17517]
- Add registry and code signature information and ECS categorization fields for sysmon module. {pull}18058[18058]

[[release-notes-7.7.1]]
=== Beats version 7.7.1
https://github.com/elastic/beats/compare/v7.7.0...v7.7.1[View commits]

==== Bugfixes

*Affecting all Beats*

- Fix `keystore add` command hanging on Windows. {issue}18649[18649] {pull}18654[18654]

*Filebeat*

- Unescape filenames in SQS messages to resolve file paths correctly. {pull}18370[18370]
- Improve failure handler for Cisco ASA and FTD pipelines to avoid mapping temporary fields. {issue}18391[18391] {pull}18392[18392]
- Fix `source.address` field not being set for the Nginx `ingress_controller` fileset. {pull}18511[18511]
- Fix Google Cloud `audit` fileset to only take in fields that are explicitly defined by the fileset. {issue}18465[18465] {pull}18472[18472]
- Fix rate limit related issue in the `httpjson` input for the Okta module. {issue}18530[18530] {pull}18534[18534]
- Fix Cisco ASA and FTD parsing errors caused by NAT fields that contain a hostname instead of an IP. {issue}14034[14034] {pull}18376[18376]
- Fix PANW module to use correct mappings for bytes and packets counters. {issue}18522[18522] {pull}18525[18525]
- Fix Office 365 ingest failures caused by IP addresses surrounded by square brackets. {issue}18587[18587] {pull}18591[18591]

*Metricbeat*

- Fix `tags_filter` setting to work correctly for the AWS `cloudwatch` metricset. {pull}18524[18524]

==== Added

*Filebeat*

- Add support for Google Application Default Credentials to the Google Pub/Sub input and Google Cloud modules. {pull}15668[15668]
- Make `decode_cef` processor GA. {pull}17944[17944]

[[release-notes-7.7.0]]
=== Beats version 7.7.0
https://github.com/elastic/beats/compare/v7.6.2...v7.7.0[View commits]
Expand Down Expand Up @@ -729,6 +761,12 @@ https://github.com/elastic/beats/compare/v7.6.0...v7.6.1[View commits]

- Fix timeout option of GCP functions. {issue}16282[16282] {pull}16287[16287]

==== Added

*Winlogbeat*

- Made the event parser more lenient w.r.t. invalid event log definition version numbers. {issue}15838[15838]

[[release-notes-7.6.0]]
=== Beats version 7.6.0
https://github.com/elastic/beats/compare/v7.5.1...v7.6.0[View commits]
Expand Down Expand Up @@ -1101,7 +1139,6 @@ processing events. (CVE-2019-17596) See https://www.elastic.co/community/securit

- Fill `event.provider`. {pull}13937[13937]
- Add support for user management events to the Security module. {pull}13530[13530]
- Made the event parser more lenient w.r.t. invalid event log definition version numbers. {issue}15838[15838]

==== Deprecated

Expand Down
3 changes: 3 additions & 0 deletions CHANGELOG.next.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,7 @@ https://github.com/elastic/beats/compare/v7.0.0-alpha2...master[Check the HEAD d
- Fix `libbeat.output.write.bytes` and `libbeat.output.read.bytes` metrics of the Elasticsearch output. {issue}20752[20752] {pull}21197[21197]
- The `o365input` and `o365` module now recover from an authentication problem or other fatal errors, instead of terminating. {pull}21259[21258]
- Orderly close processors when processing pipelines are not needed anymore to release their resources. {pull}16349[16349]
- Fix memory leak and events duplication in docker autodiscover and add_docker_metadata. {pull}21851[21851]

*Auditbeat*

Expand Down Expand Up @@ -284,6 +285,7 @@ https://github.com/elastic/beats/compare/v7.0.0-alpha2...master[Check the HEAD d
- Provide backwards compatibility for the `append` processor when Elasticsearch is less than 7.10.0. {pull}21159[21159]
- Fix checkpoint module when logs contain time field. {pull}20567[20567]
- Add field limit check for AWS Cloudtrail flattened fields. {pull}21388[21388] {issue}21382[21382]
- Fix syslog RFC 5424 parsing in the CheckPoint module. {pull}21854[21854]

*Heartbeat*

Expand Down Expand Up @@ -464,6 +466,7 @@ https://github.com/elastic/beats/compare/v7.0.0-alpha2...master[Check the HEAD d
- Add istiod metricset. {pull}21519[21519]
- Release `add_cloudfoundry_metadata` as GA. {pull}21525[21525]
- Add support for OpenStack SSL metadata APIs in `add_cloud_metadata`. {pull}21590[21590]
- Add cloud.account.id for GCP into add_cloud_metadata processor. {pull}21776[21776]
- Add proxy metricset for istio module. {pull}21751[21751]

*Auditbeat*
Expand Down
39 changes: 32 additions & 7 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ pipeline {
}
steps {
withGithubNotify(context: 'Lint') {
withBeatsEnv(archive: true, id: 'lint') {
withBeatsEnv(archive: false, id: 'lint') {
dumpVariables()
cmd(label: 'make check', script: 'make check')
}
Expand Down Expand Up @@ -170,16 +170,28 @@ def cloud(Map args = [:]) {

def k8sTest(Map args = [:]) {
def versions = args.versions
node(args.label) {
versions.each{ v ->
versions.each{ v ->
node(args.label) {
stage("${args.context} ${v}"){
withEnv(["K8S_VERSION=${v}", "KIND_VERSION=v0.7.0", "KUBECONFIG=${env.WORKSPACE}/kubecfg"]){
withGithubNotify(context: "${args.context} ${v}") {
withBeatsEnv(archive: false, withModule: false) {
retryWithSleep(retries: 2, seconds: 5, backoff: true){ sh(label: "Install kind", script: ".ci/scripts/install-kind.sh") }
retryWithSleep(retries: 2, seconds: 5, backoff: true){ sh(label: "Install kubectl", script: ".ci/scripts/install-kubectl.sh") }
try {
sh(label: "Setup kind", script: ".ci/scripts/kind-setup.sh")
// Add some environmental resilience when setup does not work the very first time.
def i = 0
retryWithSleep(retries: 3, seconds: 5, backoff: true){
try {
sh(label: "Setup kind", script: ".ci/scripts/kind-setup.sh")
} catch(err) {
i++
sh(label: 'Delete cluster', script: 'kind delete cluster')
if (i > 2) {
error("Setup kind failed with error '${err.toString()}'")
}
}
}
sh(label: "Integration tests", script: "MODULE=kubernetes make -C metricbeat integration-tests")
sh(label: "Deploy to kubernetes",script: "make -C deploy/kubernetes test")
} finally {
Expand Down Expand Up @@ -211,7 +223,7 @@ def target(Map args = [:]) {
// make commands use -C <folder> while mage commands require the dir(folder)
// let's support this scenario with the location variable.
dir(isMage ? directory : '') {
cmd(label: "${command}", script: "${command}")
cmd(label: "${args.id?.trim() ? args.id : env.STAGE_NAME} - ${command}", script: "${command}")
}
}
}
Expand Down Expand Up @@ -375,7 +387,7 @@ def archiveTestOutput(Map args = [:]) {
script: 'rm -rf ve || true; find . -type d -name vendor -exec rm -r {} \\;')
} else { log(level: 'INFO', text: 'Delete folders that are causing exceptions (See JENKINS-58421) is disabled for Windows.') }
junitAndStore(allowEmptyResults: true, keepLongStdio: true, testResults: args.testResults, stashedTestReports: stashedTestReports, id: args.id)
tar(file: "test-build-artifacts-${args.id}.tgz", dir: '.', archive: true, allowMissing: true)
tarAndUploadArtifacts(file: "test-build-artifacts-${args.id}.tgz", location: '.')
}
catchError(buildResult: 'SUCCESS', message: 'Failed to archive the build test results', stageResult: 'SUCCESS') {
def folder = cmd(label: 'Find system-tests', returnStdout: true, script: 'python .ci/scripts/search_system_tests.py').trim()
Expand All @@ -384,12 +396,25 @@ def archiveTestOutput(Map args = [:]) {
// TODO: nodeOS() should support ARM
def os_suffix = isArm() ? 'linux' : nodeOS()
def name = folder.replaceAll('/', '-').replaceAll('\\\\', '-').replaceAll('build', '').replaceAll('^-', '') + '-' + os_suffix
tar(file: "${name}.tgz", archive: true, dir: folder)
tarAndUploadArtifacts(file: "${name}.tgz", location: folder)
}
}
}
}

/**
* Wrapper to tar and upload artifacts to Google Storage to avoid killing the
* disk space of the jenkins instance
*/
def tarAndUploadArtifacts(Map args = [:]) {
tar(file: args.file, dir: args.location, archive: false, allowMissing: true)
googleStorageUpload(bucket: "gs://${JOB_GCS_BUCKET}/${env.JOB_NAME}-${env.BUILD_ID}",
credentialsId: "${JOB_GCS_CREDENTIALS}",
pattern: "${args.file}",
sharedPublicly: true,
showInline: true)
}

/**
* This method executes a closure with credentials for cloud test
* environments.
Expand Down
18 changes: 10 additions & 8 deletions filebeat/docs/modules/checkpoint.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -12,25 +12,27 @@ This file is generated! See scripts/docs_collector.py
== Check Point module
beta[]

This is a module for Check Point firewall logs. It supports logs from the Log Exporter in the Syslog format.
This is a module for Check Point firewall logs. It supports logs from the Log
Exporter in the Syslog RFC 5424 format. If you need to ingest Check Point logs
in CEF format then please use the <<filebeat-module-cef, `CEF module`>> (more
fields are provided in the syslog output).

To configure a Log Exporter, please refer to the documentation by https://supportcenter.checkpoint.com/supportcenter/portal?eventSubmit_doGoviewsolutiondetails=&solutionid=sk122323[Check Point].
To configure a Log Exporter, please refer to the documentation by
https://supportcenter.checkpoint.com/supportcenter/portal?eventSubmit_doGoviewsolutiondetails=&solutionid=sk122323[Check
Point].

Example below:
Example Log Exporter config:

`cp_log_export add name testdestination target-server 192.168.1.1 target-port 9001 protocol udp format syslog`

The module that supports Check Point firewall logs sent in the CEF format requires the <<filebeat-module-cef, `CEF module`>>

The Check Point and ECS fields that are the same between both modules will be mapped to the same names for compability between modules, though not all fields are included in CEF. Please reference the supported fields in the CEF documentation.

include::../include/gs-link.asciidoc[]


[float]
=== Compatibility

This module has been tested against Check Point Log Exporter on R80.X but should also work with R77.30.
This module has been tested against Check Point Log Exporter on R80.X but should
also work with R77.30.

include::../include/configuring-intro.asciidoc[]

Expand Down
Loading

0 comments on commit 4039f89

Please sign in to comment.