diff --git a/resources/JenkinsfileTemplate.groovy b/resources/JenkinsfileTemplate.groovy index 782d59fc2..8bac63133 100644 --- a/resources/JenkinsfileTemplate.groovy +++ b/resources/JenkinsfileTemplate.groovy @@ -16,6 +16,7 @@ pipeline { parameters { string(name: 'PARAM_WITH_DEFAULT_VALUE', defaultValue: "defaultValue", description: "it would not be defined on the first build, see JENKINS-41929.") booleanParam(name: 'Run_As_Master_Branch', defaultValue: false, description: 'Allow to run any steps on a PR, some steps normally only run on master branch.') + booleanParam(name: 'doc_ci', defaultValue: true, description: 'Enable build docs.') } stages { stage('Initializing'){ @@ -87,7 +88,7 @@ pipeline { tag "v\\d+\\.\\d+\\.\\d+*" environment name: 'Run_As_Master_Branch', value: 'true' } - environment name: 'doc_ci', value: 'true' + expression { return params.doc_ci } } } steps { diff --git a/src/test/groovy/DummyStepTests.groovy b/src/test/groovy/DummyStepTests.groovy index cb846dcaf..4f07f13f3 100644 --- a/src/test/groovy/DummyStepTests.groovy +++ b/src/test/groovy/DummyStepTests.groovy @@ -19,6 +19,25 @@ class DummyStepTests extends BasePipelineTest { helper.registerAllowedMethod("sh", [String.class], { "OK" }) helper.registerAllowedMethod("withEnvWrapper", [Closure.class], { closure -> closure.call() }) helper.registerAllowedMethod("script", [Closure.class], { closure -> closure.call() }) + helper.registerAllowedMethod("pipeline", [Closure.class], { body -> body() }) + helper.registerAllowedMethod("agent", [String.class], { "OK" }) + helper.registerAllowedMethod("agent", [Closure.class], { body -> body() }) + helper.registerAllowedMethod("label", [String.class], { "OK" }) + helper.registerAllowedMethod("stages", [Closure.class], { body -> body() }) + helper.registerAllowedMethod("steps", [Closure.class], { body -> body() }) + helper.registerAllowedMethod("post", [Closure.class], { body -> body() }) + helper.registerAllowedMethod("success", [Closure.class], { body -> body() }) + helper.registerAllowedMethod("aborted", [Closure.class], { body -> body() }) + helper.registerAllowedMethod("failure", [Closure.class], { body -> body() }) + helper.registerAllowedMethod("unstable", [Closure.class], { body -> body() }) + helper.registerAllowedMethod("always", [Closure.class], { body -> body() }) + helper.registerAllowedMethod("dir", [String.class, Closure.class], { path, body -> body() }) + helper.registerAllowedMethod("when", [Closure.class], { "OK" }) + helper.registerAllowedMethod("parallel", [Closure.class], { body -> body() }) + helper.registerAllowedMethod("failFast", [Boolean.class], { "OK" }) + helper.registerAllowedMethod("script", [Closure.class], { body -> body() }) + helper.registerAllowedMethod("options", [Closure.class], { "OK" }) + helper.registerAllowedMethod("environment", [Closure.class], { "OK" }) } @Test diff --git a/src/test/groovy/GitCheckoutStepTests.groovy b/src/test/groovy/GitCheckoutStepTests.groovy index 1f955b0fd..d28c4a9ae 100644 --- a/src/test/groovy/GitCheckoutStepTests.groovy +++ b/src/test/groovy/GitCheckoutStepTests.groovy @@ -18,7 +18,7 @@ class GitCheckoutStepTests extends BasePipelineTest { helper.registerAllowedMethod("sh", [Map.class], { "OK" }) helper.registerAllowedMethod("sh", [String.class], { "OK" }) helper.registerAllowedMethod("checkout", [String.class], { "OK" }) - helper.registerAllowedMethod("github_enterprise_constructor", [], { "OK" }) + helper.registerAllowedMethod("githubEnv", [], { "OK" }) helper.registerAllowedMethod("withEnvWrapper", [Closure.class], { closure -> closure.call() }) } diff --git a/src/test/groovy/LogStepTests.groovy b/src/test/groovy/LogStepTests.groovy new file mode 100644 index 000000000..3a71eab2e --- /dev/null +++ b/src/test/groovy/LogStepTests.groovy @@ -0,0 +1,116 @@ +import com.lesfurets.jenkins.unit.BasePipelineTest +import org.junit.Before; +import org.junit.Test; +import static com.lesfurets.jenkins.unit.MethodCall.callArgsToString +import static org.junit.Assert.assertTrue + +class LogStepTests extends BasePipelineTest { + Map env = [:] + + @Override + @Before + void setUp() throws Exception { + super.setUp() + + env.WORKSPACE = "WS" + binding.setVariable('env', env) + helper.registerAllowedMethod("echoColor", [Map.class], { m -> + def echoColor = loadScript("vars/echoColor.groovy") + echoColor.call(m) + }) + } + + @Test + void test() throws Exception { + def script = loadScript("vars/log.groovy") + env.PIPELINE_LOG_LEVEL = 'DEBUG' + script.call(text: "message") + printCallStack() + assertTrue(helper.callStack.findAll { call -> + call.methodName == "echo" + }.any { call -> + callArgsToString(call).contains("message") + callArgsToString(call).contains("[DEBUG]") + }) + assertJobStatusSuccess() + } + + @Test + void testDebug() throws Exception { + def script = loadScript("vars/log.groovy") + env.PIPELINE_LOG_LEVEL = 'DEBUG' + script.call(level: 'DEBUG', text: "message") + printCallStack() + assertTrue(helper.callStack.findAll { call -> + call.methodName == "echo" + }.any { call -> + callArgsToString(call).contains("message") + callArgsToString(call).contains("[DEBUG]") + }) + assertJobStatusSuccess() + } + + @Test + void testInfo() throws Exception { + def script = loadScript("vars/log.groovy") + script.call(level: 'INFO', text: "message") + printCallStack() + assertTrue(helper.callStack.findAll { call -> + call.methodName == "echo" + }.any { call -> + callArgsToString(call).contains("message") + callArgsToString(call).contains("[INFO]") + }) + assertJobStatusSuccess() + } + + @Test + void testWarn() throws Exception { + def script = loadScript("vars/log.groovy") + env.PIPELINE_LOG_LEVEL = 'WARN' + script.call(level: 'WARN', text: "message") + printCallStack() + assertTrue(helper.callStack.findAll { call -> + call.methodName == "echo" + }.any { call -> + callArgsToString(call).contains("message") + callArgsToString(call).contains("[WARN]") + }) + assertJobStatusSuccess() + } + + @Test + void testError() throws Exception { + def script = loadScript("vars/log.groovy") + env.PIPELINE_LOG_LEVEL = 'ERROR' + script.call(level: 'ERROR', text: "message") + printCallStack() + assertTrue(helper.callStack.findAll { call -> + call.methodName == "echo" + }.any { call -> + callArgsToString(call).contains("message") + callArgsToString(call).contains("[ERROR]") + }) + assertJobStatusSuccess() + } + + @Test + void testLevel() throws Exception { + def script = loadScript("vars/log.groovy") + env.PIPELINE_LOG_LEVEL = 'WARN' + script.call(level: 'DEBUG', text: "messageDEBUG") + script.call(level: 'INFO', text: "messageINFO") + script.call(level: 'WARN', text: "messageWARN") + script.call(level: 'ERROR', text: "messageERROR") + printCallStack() + assertTrue(helper.callStack.findAll { call -> + call.methodName == "echo" + }.any { call -> + !callArgsToString(call).contains("[DEBUG]") + !callArgsToString(call).contains("[INFO]") + callArgsToString(call).contains("[WARN]") + callArgsToString(call).contains("[ERROR]") + }) + assertJobStatusSuccess() + } +} \ No newline at end of file diff --git a/src/test/groovy/RunPipelineTests.groovy b/src/test/groovy/RunPipelineTests.groovy deleted file mode 100644 index e6964ccb5..000000000 --- a/src/test/groovy/RunPipelineTests.groovy +++ /dev/null @@ -1,69 +0,0 @@ -import com.lesfurets.jenkins.unit.BasePipelineTest -import org.junit.Before; -import org.junit.Test; -import static com.lesfurets.jenkins.unit.MethodCall.callArgsToString -import static org.junit.Assert.assertTrue - -class RunPipelineTests extends BasePipelineTest { - @Override - @Before - void setUp() throws Exception { - super.setUp() - helper.registerAllowedMethod("pipeline", [Closure.class], { body -> body() }) - helper.registerAllowedMethod("agent", [String.class], { "OK" }) - helper.registerAllowedMethod("agent", [Closure.class], { body -> body() }) - helper.registerAllowedMethod("label", [String.class], { "OK" }) - helper.registerAllowedMethod("stages", [Closure.class], { body -> body() }) - helper.registerAllowedMethod("steps", [Closure.class], { body -> body() }) - helper.registerAllowedMethod("post", [Closure.class], { body -> body() }) - helper.registerAllowedMethod("success", [Closure.class], { body -> body() }) - helper.registerAllowedMethod("aborted", [Closure.class], { body -> body() }) - helper.registerAllowedMethod("failure", [Closure.class], { body -> body() }) - helper.registerAllowedMethod("unstable", [Closure.class], { body -> body() }) - helper.registerAllowedMethod("always", [Closure.class], { body -> body() }) - helper.registerAllowedMethod("when", [Closure.class], { "OK" }) - helper.registerAllowedMethod("parallel", [Closure.class], { body -> body() }) - helper.registerAllowedMethod("failFast", [Boolean.class], { "OK" }) - helper.registerAllowedMethod("script", [Closure.class], { body -> body() }) - helper.registerAllowedMethod("options", [Closure.class], { "OK" }) - helper.registerAllowedMethod("environment", [Closure.class], { "OK" }) - } - - @Test - void testDefault() throws Exception { - def script = loadScript("vars/runPipeline.groovy") - script.any = "any" - script.call(name: "default") - printCallStack() - assertTrue(helper.callStack.findAll { call -> - call.methodName == "echo" - }.any { call -> - callArgsToString(call).contains("Hello, I am pipeline") - }) - } - - @Test - void testNoName() throws Exception { - def script = loadScript("vars/runPipeline.groovy") - script.any = "any" - script.call() - printCallStack() - assertTrue(helper.callStack.findAll { call -> - call.methodName == "echo" - }.any { call -> - callArgsToString(call).contains("Hello, I am pipeline") - }) - } - - @Test - void testNameMatch() throws Exception { - def script = loadScript("vars/runPipeline.groovy") - script.call(name: 'test') - printCallStack() - assertTrue(helper.callStack.findAll { call -> - call.methodName == "echo" - }.any { call -> - callArgsToString(call).contains("Hello, I am Test pipeline") - }) - } -} \ No newline at end of file diff --git a/src/test/groovy/WithEnvWrapperStepTests.groovy b/src/test/groovy/WithEnvWrapperStepTests.groovy index 76d7c8b5f..2c06c903f 100644 --- a/src/test/groovy/WithEnvWrapperStepTests.groovy +++ b/src/test/groovy/WithEnvWrapperStepTests.groovy @@ -3,6 +3,8 @@ import org.junit.Before; import org.junit.Test; import static com.lesfurets.jenkins.unit.MethodCall.callArgsToString import static org.junit.Assert.assertTrue +import static org.junit.Assert.assertFalse + class WithEnvWrapperStepTests extends BasePipelineTest { @@ -46,6 +48,7 @@ class WithEnvWrapperStepTests extends BasePipelineTest { helper.registerAllowedMethod("wrap", [Map.class, Closure.class], wrapInterceptor) helper.registerAllowedMethod("deleteDir", [], { "OK" }) helper.registerAllowedMethod("withEnv", [List.class, Closure.class], withEnvInterceptor) + helper.registerAllowedMethod("dir", [String.class, Closure.class], { path, body -> body() }) } @Test @@ -64,6 +67,35 @@ class WithEnvWrapperStepTests extends BasePipelineTest { }) printCallStack() assertTrue(isOK) + assertTrue(helper.callStack.findAll { call -> + call.methodName == "deleteDir" + }?.size()==1) + assertJobStatusSuccess() + } + + @Test + void testCleanAfter() throws Exception { + def script = loadScript("vars/withEnvWrapper.groovy") + def isOK = false + script.call(cleanBefore: false, {isOK = true}) + printCallStack() + assertTrue(isOK) + assertTrue(helper.callStack.findAll { call -> + call.methodName == "deleteDir" + }?.size()==0) + assertJobStatusSuccess() + } + + @Test + void testCleanAfterBefore() throws Exception { + def script = loadScript("vars/withEnvWrapper.groovy") + def isOK = false + script.call(cleanAfter: true, cleanBefore: true, baseDir: 'src', {isOK = true}) + printCallStack() + assertTrue(isOK) + assertTrue(helper.callStack.findAll { call -> + call.methodName == "deleteDir" + }?.size()==2) assertJobStatusSuccess() } } \ No newline at end of file diff --git a/vars/README.md b/vars/README.md index bd86e2364..8ae56c401 100644 --- a/vars/README.md +++ b/vars/README.md @@ -113,6 +113,20 @@ githubEnv() * `REPO_NAME`: repository name in the git URL, it sets this environment variable processing the GIT_URL. * `GIT_SHA`: current commit SHA1, it sets this getting it from local repo. * `GIT_BUILD_CAUSE`: build cause can be a pull request(pr), a commit, or a merge +## log +Allow to print messages with different levels of verbosity. It will show all messages that match +to an upper log level than defined, the default level is debug. +You have to define the environment variable PIPELINE_LOG_LEVEL to select +the log level by default is INFO. + + Levels: DEBUG, INFO, WARN, ERROR + +``` + log(level: 'INFO', text: 'message') +``` + +* `level`: sets the verbosity of the messages (DEBUG, INFO, WARN, ERROR) +* `text`: Message to print. The color of the messages depends on the level. ## runIntegrationTestAxis Run a set of integration test against a Axis of versions.(go, java, nodejs, python, ruby) It needs the integration test sources stashed. @@ -123,14 +137,7 @@ runIntegrationTestAxis(source: 'source', agentType: 'go') * *agentType*: Agent type to test (all, go, java, python, nodejs, ruby, ...). * *source*: Stash name that contains the source code. * *baseDir*: Directory where the code is in the stash code(default 'src/github.com/elastic/apm-integration-testing'). -* *elasticStack*: Elastic Stack branch/tag to use(default 'master').## runPipeline -Run a pipeline passed as parameter. - -``` -runPipeline(name: 'pipeline-name') -``` - -* name: the pipeline name to execute. ## sendBenchmarks +* *elasticStack*: Elastic Stack branch/tag to use(default 'master').## sendBenchmarks Send the benchmarks to the cloud service. Requires Go installed. @@ -232,6 +239,16 @@ withEnvWrapper(){ } ``` +``` +withEnvWrapper(cleanBefore: true, cleanAfter: true, baseDir: 'src'){ + //block +} +``` + +* cleanBefore: clean the workspace before execute the code block. +* cleanAfter: clean the workspace after execute the code block. +* baseDir: directory to work into, if does not exists would be created. + *TODO* replace each variable with a secret text credential type, then use withCredentials step. ``` diff --git a/vars/gitCheckout.groovy b/vars/gitCheckout.groovy index 52179dcae..71381a629 100644 --- a/vars/gitCheckout.groovy +++ b/vars/gitCheckout.groovy @@ -27,7 +27,7 @@ def call(Map params = [:]){ } else if (branch && branch != "" && repo && credentialsId){ - echo "Checkout ${branch}" + echo "Checkout ${branch} from ${repo} with credentials ${credentialsId}" checkout([$class: 'GitSCM', branches: [[name: "${branch}"]], doGenerateSubmoduleConfigurations: false, extensions: [], diff --git a/vars/log.groovy b/vars/log.groovy new file mode 100644 index 000000000..b49457808 --- /dev/null +++ b/vars/log.groovy @@ -0,0 +1,63 @@ +#!/usr/bin/env groovy + +/** +Allow to print messages with different levels of verbosity. It will show all messages that match +to an upper log level than defined, the default level is debug. +You have to define the environment variable PIPELINE_LOG_LEVEL to select +the log level by default is INFO. + + Levels: DEBUG, INFO, WARN, ERROR + + log(level: 'INFO', text: 'message') + +*/ +def call(Map params = [:]) { + def level = params.containsKey('level') ? getLogLevelNum(params.level) : getLogLevelNum('DEBUG') + def text = params.containsKey('text') ? params.text : '' + def currentLevel = getLogLevelNum(env?.PIPELINE_LOG_LEVEL) + if( level >= currentLevel){ + logMessage(level, text) + } +} + +def logMessage(level, text){ + switch(level) { + case 0: + echoColor(text: "[DEBUG] ${text}", colorfg: 'blue', colorbg: 'default') + break + case 1: + echoColor(text: "[INFO] ${text}", colorfg: 'white', colorbg: 'default') + break + case 2: + echoColor(text: "[WARN] ${text}", colorfg: 'yellow', colorbg: 'default') + break + case 3: + echoColor(text: "[ERROR] ${text}", colorfg: 'red', colorbg: 'default') + break + default: + echoColor(text: "[DEBUG] ${text}", colorfg: 'blue', colorbg: 'default') + break + } +} + +def getLogLevelNum(level){ + def levelNum = 1 + switch(level) { + case 'DEBUG': + levelNum = 0 + break + case 'INFO': + levelNum = 1 + break + case 'WARN': + levelNum = 2 + break + case 'ERROR': + levelNum = 3 + break + default: + levelNum = 1 + break + } + return levelNum +} \ No newline at end of file diff --git a/vars/log.txt b/vars/log.txt new file mode 100644 index 000000000..a4c9751ad --- /dev/null +++ b/vars/log.txt @@ -0,0 +1,13 @@ +Allow to print messages with different levels of verbosity. It will show all messages that match +to an upper log level than defined, the default level is debug. +You have to define the environment variable PIPELINE_LOG_LEVEL to select +the log level by default is INFO. + + Levels: DEBUG, INFO, WARN, ERROR + +``` + log(level: 'INFO', text: 'message') +``` + +* `level`: sets the verbosity of the messages (DEBUG, INFO, WARN, ERROR) +* `text`: Message to print. The color of the messages depends on the level. diff --git a/vars/pipelineApmUI.groovy b/vars/pipelineApmUI.groovy index b3d48d13a..f2306d804 100644 --- a/vars/pipelineApmUI.groovy +++ b/vars/pipelineApmUI.groovy @@ -1,3 +1,206 @@ +/** + APM UI Pipeline +*/ +void call(Map args = [:]){ + pipeline { + agent { label 'linux && immutable' } + environment { + BASE_DIR="src/github.com/elastic/kibana" + ES_BASE_DIR="src/github.com/elastic/elasticsearch" + JOB_GIT_CREDENTIALS = "f6c7695a-671e-4f4f-a331-acdce44ff9ba" + FORCE_COLOR = "2" + GIT_URL = "git@github.com:elastic/kibana.git" + ES_GIT_URL = "git@github.com:elastic/elasticsearch.git" + TEST_BROWSER_HEADLESS = "${params.TEST_BROWSER_HEADLESS}" + TEST_ES_FROM = "${params.TEST_ES_FROM}" + } + options { + //timeout(time: 5, unit: 'HOURS') + buildDiscarder(logRotator(numToKeepStr: '3', artifactNumToKeepStr: '2', daysToKeepStr: '30')) + timestamps() + preserveStashes() + ansiColor('xterm') + disableResume() + durabilityHint('PERFORMANCE_OPTIMIZED') + } + parameters { + string(name: 'branch_specifier', defaultValue: "master", description: "the Git branch specifier to build (branchName, tagName, commitId, etc.)") + string(name: 'ES_VERSION', defaultValue: "6.5", description: "Elastic Stack Git branch/tag to use") + string(name: 'TEST_BROWSER_HEADLESS', defaultValue: "1", description: "Use headless browser.") + string(name: 'TEST_ES_FROM', defaultValue: "source", description: "Test from sources.") + booleanParam(name: 'Run_As_Master_Branch', defaultValue: false, description: 'Allow to run any steps on a PR, some steps normally only run on master branch.') + booleanParam(name: 'build_oss_ci', defaultValue: false, description: 'Build OSS') + booleanParam(name: 'build_no_oss_ci', defaultValue: false, description: 'Build NO OSS') + booleanParam(name: 'intake_ci', defaultValue: false, description: 'Intake Tests') + booleanParam(name: 'ciGroup_ci', defaultValue: false, description: 'Group Tests') + booleanParam(name: 'x_pack_intake_ci', defaultValue: false, description: 'X-Pack intake Tests') + booleanParam(name: 'x_pack_ciGroup_ci', defaultValue: false, description: 'X-Pack Group Tests') + } + stages { + /** + Checkout the code and stash it, to use it on other stages. + */ + stage('Initializing') { + agent { label 'linux && immutable' } + environment { + HOME = "${env.WORKSPACE}" + } + stages { + stage('Checkout') { + steps { + checkoutKibana() + checkoutES() + } + } + stage('Quick Test') { + steps { + quickTest() + } + } + } + } + stage('build'){ + failFast true + parallel { + /** + Build on a linux environment. + */ + stage('build oss') { + agent { label 'linux && immutable' } + when { + beforeAgent true + expression { return params.build_oss_ci } + } + steps { + buildOSSSteps() + } + } + /** + Building and extracting default Kibana distributable for use in functional tests + */ + stage('build no-oss') { + agent { label 'linux && immutable' } + when { + beforeAgent true + expression { return params.build_no_oss_ci } + } + steps { + buildNoOSSSteps() + } + } + } + } + /** + Test on a linux environment. + */ + stage('kibana-intake') { + environment { + HOME = "${env.WORKSPACE}" + JAVA_HOME = "${env.HUDSON_HOME}/.java/java11" + PATH = "${env.JAVA_HOME}/bin:${env.PATH}" + } + when { + beforeAgent true + expression { return params.intake_ci } + } + steps { + kibanaIntakeSteps() + } + post { always { grabTestResults() } } + } + /** + Test ciGroup tests on a linux environment. + */ + stage('kibana-ciGroup') { + environment { + HOME = "${env.WORKSPACE}" + JAVA_HOME = "${env.HUDSON_HOME}/.java/java11" + PATH = "${env.JAVA_HOME}/bin:${env.PATH}" + } + when { + beforeAgent true + expression { return params.ciGroup_ci } + } + steps { + kibanaGroupSteps() + } + post { always { grabTestResults() } } + } + /** + Test x-pack-intake tests on a linux environment. + */ + stage('x-pack-intake') { + environment { + HOME = "${env.WORKSPACE}" + JAVA_HOME = "${env.HUDSON_HOME}/.java/java11" + PATH = "${env.JAVA_HOME}/bin:${env.PATH}" + XPACK_DIR = "${env.WORKSPACE}/${env.BASE_DIR}/x-pack" + } + when { + beforeAgent true + expression { return params.x_pack_intake_ci } + } + steps { + xPackIntakeSteps() + } + post { always { grabTestResults() } } + } + /** + Test x-pack-ciGroup tests on a linux environment. + */ + stage('x-pack-ciGroup') { + environment { + HOME = "${env.WORKSPACE}" + JAVA_HOME = "${env.HUDSON_HOME}/.java/java11" + PATH = "${env.JAVA_HOME}/bin:${env.PATH}" + XPACK_DIR = "${env.WORKSPACE}/${env.BASE_DIR}/x-pack" + INSTALL_DIR = "${env.WORKSPACE}/install/kibana" + } + when { + beforeAgent true + expression { return params.x_pack_ciGroup_ci } + } + steps { + xPackGroupSteps() + } + post { always { grabTestResults() } } + } + } + post { + success { + echoColor(text: '[SUCCESS]', colorfg: 'green', colorbg: 'default') + } + aborted { + echoColor(text: '[ABORTED]', colorfg: 'magenta', colorbg: 'default') + } + failure { + echoColor(text: '[FAILURE]', colorfg: 'red', colorbg: 'default') + //step([$class: 'Mailer', notifyEveryUnstableBuild: true, recipients: "${NOTIFY_TO}", sendToIndividuals: false]) + } + unstable { + echoColor(text: '[UNSTABLE]', colorfg: 'yellow', colorbg: 'default') + } + } + } +} + +/** + unstash the stash passed as parameter or execute the block code passed. + This works as a cache that make the retrieve process only once, the rest of times + unstash the stuff. +*/ +def useCache(String name, Closure body){ + try{ + unstash name + } catch(error){ + body() + currentBuild.result = "SUCCESS" + } +} + +/** + Archive result files. +*/ def grabTestResults(){ junit(allowEmptyResults: true, keepLongStdio: true, @@ -7,6 +210,9 @@ def grabTestResults(){ onlyIfSuccessful: false) } +/** + Define NodeJs environment variables. +*/ def nodeEnviromentVars(nodeVersion){ /** TODO this enviroment variables could change on diferent type of agents, so maybe it is better to move then to the stage*/ if(env.ORG_PATH == null){ @@ -18,85 +224,122 @@ def nodeEnviromentVars(nodeVersion){ sh 'export' } +/** + install NodeJs, it uses stash as cache. +*/ def installNodeJs(nodeVersion, pakages = null){ nodeEnviromentVars(nodeVersion) - sh """#!/bin/bash - set -euxo pipefail - NODE_URL="https://nodejs.org/dist/v${nodeVersion}/node-v${nodeVersion}-linux-x64.tar.gz" - mkdir -p "${NODE_DIR}" - curl -sL \${NODE_URL} | tar -xz -C "${NODE_DIR}" --strip-components=1 - node --version - npm config set prefix "${NODE_DIR}" - npm config list - """ - def cmd = "echo 'Installing aditional packages'\n" - pakages?.each{ pkg -> - cmd += "npm install -g ${pkg}\n" - } - sh """#!/bin/bash - set -euxo pipefail - ${cmd} - """ + useCache('nodeJs'){ + sh """#!/bin/bash + set -euxo pipefail + NODE_URL="https://nodejs.org/dist/v${nodeVersion}/node-v${nodeVersion}-linux-x64.tar.gz" + mkdir -p "${NODE_DIR}" + curl -sL \${NODE_URL} | tar -xz -C "${NODE_DIR}" --strip-components=1 + node --version + npm config set prefix "${NODE_DIR}" + npm config list + """ + def cmd = "echo 'Installing aditional packages'\n" + pakages?.each{ pkg -> + cmd += "npm install -g ${pkg}\n" + } + sh """#!/bin/bash + set -euxo pipefail + ${cmd} + """ + stash allowEmpty: true, name: 'nodeJs', includes: "node/**", useDefaultExcludes: false + } } -def checkoutSteps(){ - sh 'export' - withEnvWrapper() { - gitCheckout(basedir: "${BASE_DIR}", branch: env?.branch_specifier, - repo: "${GIT_URL}", - credentialsId: "${JOB_GIT_CREDENTIALS}") - stash allowEmpty: true, name: 'source', useDefaultExcludes: false - dir("${BASE_DIR}"){ - script{ - def packageJson = readJSON(file: 'package.json') - env.NODE_VERSION = packageJson.engines.node - env.YARN_VERSION = packageJson.engines.yarn - installNodeJs("${NODE_VERSION}", ["yarn@${YARN_VERSION}"]) - sh """#!/bin/bash - set -euxo pipefail - yarn kbn bootstrap - """ - } - } - stash allowEmpty: true, name: 'cache', includes: "${BASE_DIR}/node_modules/**,node/**", useDefaultExcludes: false +/** + Get Elasticsearch sources, it uses stash as cache. +*/ +def checkoutES(){ + useCache('es-source'){ dir("${ES_BASE_DIR}"){ - /** TODO grab the correct elasticsearch branch */ - checkout([$class: 'GitSCM', branches: [[name: "master"]], + checkout([$class: 'GitSCM', branches: [[name: "${params.ES_VERSION}"]], doGenerateSubmoduleConfigurations: false, extensions: [], submoduleCfg: [], userRemoteConfigs: [[credentialsId: "${JOB_GIT_CREDENTIALS}", url: "${ES_GIT_URL}"]]]) } - stash allowEmpty: true, name: 'es', includes: "${ES_BASE_DIR}/**", useDefaultExcludes: false + stash allowEmpty: true, name: 'es-source', includes: "${ES_BASE_DIR}/**", excludes: ".git", useDefaultExcludes: false } } +/** + Get Kibana sources, it uses stash as cache. + also, define NODE_VERSION, and YARN_VERSION environment variables. + It modifies the path to add the `yarn bin` folder. + It executes `yarn kbn bootstrap` and stash the reults. +*/ +def checkoutKibana(){ + useCache('source'){ + gitCheckout(basedir: "${BASE_DIR}", branch: params.branch_specifier, + repo: "${GIT_URL}", + credentialsId: "${JOB_GIT_CREDENTIALS}") + stash allowEmpty: true, name: 'source', excludes: "${BASE_DIR}/.git,node/**", useDefaultExcludes: false + } + dir("${BASE_DIR}"){ + def packageJson = readJSON(file: 'package.json') + env.NODE_VERSION = packageJson.engines.node + env.YARN_VERSION = packageJson.engines.yarn + } + + installNodeJs("${NODE_VERSION}", ["yarn@${YARN_VERSION}"]) + + dir("${BASE_DIR}"){ + def yarnBinPath = sh(script: 'yarn bin', returnStdout: true) + env.PATH="${env.PATH}:${yarnBinPath}" + } + + useCache('cache'){ + dir("${BASE_DIR}"){ + sh '''#!/bin/bash + set -euxo pipefail + yarn kbn bootstrap + ''' + } + stash allowEmpty: true, name: 'cache', + includes: "${BASE_DIR}/node_modules/**,${BASE_DIR}/optimize/**,${BASE_DIR}/target/**", + useDefaultExcludes: false + } +} + +/** + build the Kibana OSS. +*/ def buildOSSSteps(){ - withEnvWrapper() { - unstash 'source' - unstash 'cache' - nodeEnviromentVars("${NODE_VERSION}") + useCache('build-oss'){ + checkoutKibana() dir("${BASE_DIR}"){ sh '''#!/bin/bash set -euxo pipefail node scripts/build --debug --oss --skip-archives --skip-os-packages ''' } - stash allowEmpty: true, name: 'build-oss', includes: "${BASE_DIR}/build/**", useDefaultExcludes: false + stash allowEmpty: true, name: 'build-oss', excludes: "${BASE_DIR}/.git,node/**", useDefaultExcludes: false } } +/** + build the Kibana No OSS. +*/ def buildNoOSSSteps(){ - withEnvWrapper() { - unstash 'source' - unstash 'cache' - nodeEnviromentVars("${NODE_VERSION}") + useCache('build-no-oss'){ + checkoutKibana() dir("${BASE_DIR}"){ sh '''#!/bin/bash set -euxo pipefail node scripts/build --debug --no-oss --skip-os-packages ''' + } + stash allowEmpty: true, name: 'build-no-oss', excludes: "${BASE_DIR}/.git,node/**", useDefaultExcludes: false + } + + useCache('kibana-bin'){ + dir("${BASE_DIR}"){ sh '''#!/bin/bash set -euxo pipefail linuxBuild="$(find "./target" -name 'kibana-*-linux-x86_64.tar.gz')" @@ -106,117 +349,89 @@ def buildNoOSSSteps(){ ''' } stash allowEmpty: true, name: 'kibana-bin', includes: "install/kibana/**", useDefaultExcludes: false - stash allowEmpty: true, name: 'build-no-oss', includes: "${BASE_DIR}/build/**", useDefaultExcludes: false + } +} + +/** + Some quick Test to run before anything else. +*/ +def quickTest(){ + dir("${BASE_DIR}"){ + sh 'yarn tslint ~/elastic/kibana/x-pack/plugins/apm/**/*.{ts,tsx} --fix' + sh 'cd x-pack/plugins/apm && yarn tsc --noEmit' + sh 'cd x-pack && node ./scripts/jest.js apm' } } def kibanaIntakeSteps(){ - withEnvWrapper() { - unstash 'source' - unstash 'cache' - nodeEnviromentVars("${NODE_VERSION}") - dir("${BASE_DIR}"){ - sh '''#!/bin/bash - set -euxo pipefail - PATH=${PATH}:$(yarn bin) - yarn kbn bootstrap - grunt jenkins:unit --from=source --dev || echo -e "\033[31;49mTests FAILED\033[0m" - ''' - } + checkoutKibana() + dir("${BASE_DIR}"){ + sh '''#!/bin/bash + set -euxo pipefail + grunt jenkins:unit --from=source --dev || echo -e "\033[31;49mTests FAILED\033[0m" + ''' } } def kibanaGroupSteps(){ - withEnvWrapper() { - unstash 'source' - unstash 'cache' - unstash 'build-oss' - nodeEnviromentVars("${NODE_VERSION}") - dir("${BASE_DIR}"){ - script { - def parallelSteps = Map [:] - def groups = (1..12) - sh '''#!/bin/bash - set -euxo pipefail - PATH=${PATH}:$(yarn bin) - yarn kbn bootstrap''' - - parallelSteps['ensureAllTestsInCiGroup'] = {sh '''#!/bin/bash - set -euxo pipefail - PATH=${PATH}:$(yarn bin) - - grunt functionalTests:ensureAllTestsInCiGroup || echo -e "\033[31;49mTests FAILED\033[0m" - '''} - - parallelSteps['pluginFunctionalTestsRelease'] = {sh '''#!/bin/bash - set -euxo pipefail - PATH=${PATH}:$(yarn bin) - - grunt run:pluginFunctionalTestsRelease --from=source || echo -e "\033[31;49mTests FAILED\033[0m" - '''} - - groups.each{ group -> - parallelSteps["functionalTests_ciGroup${group}"] ={sh """#!/bin/bash - set -euxo pipefail - PATH=\${PATH}:\$(yarn bin) - - grunt "run:functionalTests_ciGroup${group}" --from=source || echo -e "\033[31;49mTests FAILED\033[0m" - """} - } - parallel(parallelSteps) - } + buildOSSSteps() + checkoutES() + dir("${BASE_DIR}"){ + def parallelSteps = [:] + def groups = (1..12) + sh '''#!/bin/bash + set -euxo pipefail + grunt functionalTests:ensureAllTestsInCiGroup || echo -e "\033[31;49mTests FAILED\033[0m" + ''' + + parallelSteps['pluginFunctionalTestsRelease'] = {sh '''#!/bin/bash + set -euxo pipefail + grunt run:pluginFunctionalTestsRelease --from=source || echo -e "\033[31;49mTests FAILED\033[0m" + '''} + + groups.each{ group -> + parallelSteps["functionalTests_ciGroup${group}"] ={sh """#!/bin/bash + set -euxo pipefail + grunt "run:functionalTests_ciGroup${group}" --from=source || echo -e "\033[31;49mTests FAILED\033[0m" + """} } + parallel(parallelSteps) } } def xPackIntakeSteps(){ - withEnvWrapper() { - unstash 'source' - unstash 'cache' - nodeEnviromentVars("${NODE_VERSION}") - dir("${XPACK_DIR}"){ - script { - def parallelSteps = Map [:] - - parallelSteps['Mocha tests'] = {sh '''#!/bin/bash - set -euxo pipefail - yarn kbn bootstrap - yarn test'''} - parallelSteps['Jest tests'] = {sh '''#!/bin/bash - set -euxo pipefail - node scripts/jest --ci --no-cache --verbose'''} - parallel(parallelSteps) - } - } + checkoutKibana() + dir("${XPACK_DIR}"){ + def parallelSteps = [:] + parallelSteps['Mocha tests'] = {sh '''#!/bin/bash + set -euxo pipefail + yarn test'''} + parallelSteps['Jest tests'] = {sh '''#!/bin/bash + set -euxo pipefail + node scripts/jest --ci --no-cache --verbose'''} + parallel(parallelSteps) } } def xPackGroupSteps(){ - withEnvWrapper() { - unstash 'source' - unstash 'cache' - unstash 'build-no-oss' - nodeEnviromentVars("${NODE_VERSION}") - dir("${XPACK_DIR}"){ - script { - def parallelSteps = Map [:] - def groups = (1..6) - def funTestGroups = (1..12) - - groups.each{ group -> - parallelSteps["ciGroup${group}"] = {sh """#!/bin/bash - set -euxo pipefail - node scripts/functional_tests --assert-none-excluded --include-tag "ciGroup${group}" - """} - } - funTestGroups.each{ group -> - parallelSteps["functional and api tests ciGroup${group}"] = {sh """#!/bin/bash - set -euxo pipefail - node scripts/functional_tests --debug --bail --kibana-install-dir "${INSTALL_DIR}" --include-tag "ciGroup${group}" - """} - } - parallel(parallelSteps) - } + buildNoOSSSteps() + dir("${XPACK_DIR}"){ + def parallelSteps = [:] + def groups = (1..6) + def funTestGroups = (1..12) + + groups.each{ group -> + parallelSteps["ciGroup${group}"] = {sh """#!/bin/bash + set -euxo pipefail + node scripts/functional_tests --assert-none-excluded --include-tag "ciGroup${group}" + """} } + funTestGroups.each{ group -> + parallelSteps["functional and api tests ciGroup${group}"] = {sh """#!/bin/bash + set -euxo pipefail + node scripts/functional_tests --debug --bail --kibana-install-dir "${INSTALL_DIR}" --include-tag "ciGroup${group}" + """} + } + parallel(parallelSteps) } -} \ No newline at end of file +} diff --git a/vars/runPipeline.groovy b/vars/runPipeline.groovy deleted file mode 100644 index f6ddef673..000000000 --- a/vars/runPipeline.groovy +++ /dev/null @@ -1,190 +0,0 @@ -#!/usr/bin/env groovy - -/** - Run a pipeline passed as parameter. - - There is a limitation, the main pipeline should be definned in the call function. - https://jenkins.io/doc/book/pipeline/shared-libraries/#defining-declarative-pipelines -*/ -void call(Map args = [:]){ - def name = args.containsKey('name') ? args.name : 'default' - switch (name) { - case 'apm-ui': - pipeline { - agent { label 'linux && immutable' } - environment { - BASE_DIR="src/github.com/elastic/kibana" - ES_BASE_DIR="src/github.com/elastic/elasticsearch" - JOB_GIT_CREDENTIALS = "f6c7695a-671e-4f4f-a331-acdce44ff9ba" - FORCE_COLOR = "2" - } - options { - timeout(time: 1, unit: 'HOURS') - buildDiscarder(logRotator(numToKeepStr: '3', artifactNumToKeepStr: '2', daysToKeepStr: '30')) - timestamps() - preserveStashes() - ansiColor('xterm') - disableResume() - durabilityHint('PERFORMANCE_OPTIMIZED') - } - parameters { - string(name: 'GIT_URL', defaultValue: "https://github.com/elastic/kibana.git", description: "Repo") - string(name: 'ES_GIT_URL', defaultValue: "https://github.com/elastic/elasticsearch.git", description: "Repo") - string(name: 'branch_specifier', defaultValue: "master", description: "the Git branch specifier to build (branchName, tagName, commitId, etc.)") - string(name: 'TEST_BROWSER_HEADLESS', defaultValue: "1", description: "Use headless browser.") - string(name: 'TEST_ES_FROM', defaultValue: "source", description: "Test from sources.") - booleanParam(name: 'Run_As_Master_Branch', defaultValue: false, description: 'Allow to run any steps on a PR, some steps normally only run on master branch.') - booleanParam(name: 'test_ci', defaultValue: true, description: 'Enable test') - booleanParam(name: 'build_oss_ci', defaultValue: false, description: 'Build OSS') - booleanParam(name: 'build_no_oss_ci', defaultValue: false, description: 'Build NO OSS') - booleanParam(name: 'intake_ci', defaultValue: false, description: 'Intake Tests') - booleanParam(name: 'ciGroup_ci', defaultValue: false, description: 'Group Tests') - booleanParam(name: 'x_pack_intake_ci', defaultValue: false, description: 'X-Pack intake Tests') - booleanParam(name: 'x_pack_ciGroup_ci', defaultValue: false, description: 'X-Pack Group Tests') - } - stages { - /** - Checkout the code and stash it, to use it on other stages. - */ - stage('Initializing') { - agent { label 'linux && immutable' } - environment { - HOME = "${env.WORKSPACE}" - } - steps { - script { pipelineApmUI.checkoutSteps() } - } - } - stage('build'){ - failFast true - parallel { - /** - Build on a linux environment. - */ - stage('build oss') { - agent { label 'linux && immutable' } - when { - beforeAgent true - environment name: 'build_oss_ci', value: 'true' - } - steps { - script { pipelineApmUI.buildOSSSteps() } - } - } - /** - Building and extracting default Kibana distributable for use in functional tests - */ - stage('build no-oss') { - agent { label 'linux && immutable' } - when { - beforeAgent true - environment name: 'build_no_oss_ci', value: 'true' - } - steps { - script { pipelineApmUI.buildNoOSSSteps() } - } - } - } - } - /** - Test on a linux environment. - */ - stage('kibana-intake') { - when { - beforeAgent true - environment name: 'intake_ci', value: 'true' - } - steps { - script { pipelineApmUI.kibanaIntakeSteps() } - } - post { always { grabTestResults() } } - } - /** - Test ciGroup tests on a linux environment. - */ - stage('kibana-ciGroup') { - when { - beforeAgent true - environment name: 'ciGroup_ci', value: 'true' - } - steps { - script { pipelineApmUI.kibanaGroupSteps() } - } - post { always { grabTestResults() } } - } - /** - Test x-pack-intake tests on a linux environment. - */ - stage('x-pack-intake') { - environment { - XPACK_DIR = "${env.WORKSPACE}/${env.BASE_DIR}/x-pack" - } - when { - beforeAgent true - environment name: 'x_pack_intake_ci', value: 'true' - } - steps { - script { pipelineApmUI.xPackIntakeSteps() } - } - post { always { grabTestResults() } } - } - /** - Test x-pack-ciGroup tests on a linux environment. - */ - stage('x-pack-ciGroup') { - environment { - XPACK_DIR = "${env.WORKSPACE}/${env.BASE_DIR}/x-pack" - INSTALL_DIR = "${env.WORKSPACE}/install/kibana" - } - when { - beforeAgent true - environment name: 'x_pack_ciGroup_ci', value: 'true' - } - steps { - script { pipelineApmUI.xPackGroupSteps() } - } - post { always { grabTestResults() } } - } - } - post { - success { - echoColor(text: '[SUCCESS]', colorfg: 'green', colorbg: 'default') - } - aborted { - echoColor(text: '[ABORTED]', colorfg: 'magenta', colorbg: 'default') - } - failure { - echoColor(text: '[FAILURE]', colorfg: 'red', colorbg: 'default') - //step([$class: 'Mailer', notifyEveryUnstableBuild: true, recipients: "${NOTIFY_TO}", sendToIndividuals: false]) - } - unstable { - echoColor(text: '[UNSTABLE]', colorfg: 'yellow', colorbg: 'default') - } - } - } - break - case 'test': - pipeline { - agent { label 'linux' } - stages { - stage('Hello'){ - steps { - echo "Hello, I am Test pipeline" - } - } - } - } - break - default: - pipeline { - agent { label 'linux' } - stages { - stage('Hello'){ - steps { - echo "Hello, I am pipeline" - } - } - } - } - } -} diff --git a/vars/runPipeline.txt b/vars/runPipeline.txt deleted file mode 100644 index 81a3b734f..000000000 --- a/vars/runPipeline.txt +++ /dev/null @@ -1,7 +0,0 @@ -Run a pipeline passed as parameter. - -``` -runPipeline(name: 'pipeline-name') -``` - -* name: the pipeline name to execute. \ No newline at end of file diff --git a/vars/withEnvWrapper.groovy b/vars/withEnvWrapper.groovy index 125955279..44ae4f014 100644 --- a/vars/withEnvWrapper.groovy +++ b/vars/withEnvWrapper.groovy @@ -7,7 +7,10 @@ //block } */ -def call(Closure body) { +def call(Map params = [:], Closure body) { + def cleanAfter = params.containsKey('cleanAfter') ? params.cleanAfter : false + def cleanBefore = params.containsKey('cleanBefore') ? params.cleanBefore : true + def baseDir = params.containsKey('baseDir') ? params.baseDir : '.' wrap([$class: 'MaskPasswordsBuildWrapper', varPasswordPairs: [ [var: 'JOB_GCS_CREDENTIALS', password: 'apm-ci-gcs-plugin'], @@ -16,13 +19,22 @@ def call(Closure body) { ], varMaskRegexes: [[regex: 'http(s)?\\:\\/+(.*)\\.elastic\\.co']] ]) { - deleteDir() + cleanWS(cleanBefore) withEnv([ "JOB_GCS_CREDENTIALS=apm-ci-gcs-plugin", "JOB_GCS_BUCKET=apm-ci-artifacts/jobs", "NOTIFY_TO=infra-root+build@elastic.co" ]){ + dir(baseDir){ body() } + } + cleanWS(cleanAfter) + } +} + +def cleanWS(condition){ + if(condition){ + deleteDir() } } \ No newline at end of file diff --git a/vars/withEnvWrapper.txt b/vars/withEnvWrapper.txt index 6b9ae756d..3ce1899a5 100644 --- a/vars/withEnvWrapper.txt +++ b/vars/withEnvWrapper.txt @@ -6,6 +6,16 @@ withEnvWrapper(){ } ``` +``` +withEnvWrapper(cleanBefore: true, cleanAfter: true, baseDir: 'src'){ + //block +} +``` + +* cleanBefore: clean the workspace before execute the code block. +* cleanAfter: clean the workspace after execute the code block. +* baseDir: directory to work into, if does not exists would be created. + *TODO* replace each variable with a secret text credential type, then use withCredentials step. ```