Skip to content

Commit

Permalink
Merge branch 'master' into text-halo
Browse files Browse the repository at this point in the history
  • Loading branch information
elasticmachine authored Jan 10, 2020
2 parents 3ea01da + 0951faa commit 66a5571
Show file tree
Hide file tree
Showing 1,981 changed files with 45,014 additions and 20,513 deletions.
112 changes: 112 additions & 0 deletions .ci/Jenkinsfile_coverage
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
#!/bin/groovy

library 'kibana-pipeline-library'
kibanaLibrary.load() // load from the Jenkins instance

stage("Kibana Pipeline") { // This stage is just here to help the BlueOcean UI a little bit
timeout(time: 180, unit: 'MINUTES') {
timestamps {
ansiColor('xterm') {
catchError {
withEnv([
'CODE_COVERAGE=1', // Needed for multiple ci scripts, such as remote.ts, test/scripts/*.sh, schema.js, etc.
]) {
parallel([
'kibana-intake-agent': {
withEnv([
'NODE_ENV=test' // Needed for jest tests only
]) {
kibanaPipeline.legacyJobRunner('kibana-intake')()
}
},
'x-pack-intake-agent': {
withEnv([
'NODE_ENV=test' // Needed for jest tests only
]) {
kibanaPipeline.legacyJobRunner('x-pack-intake')()
}
},
'kibana-oss-agent': kibanaPipeline.withWorkers('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
'oss-ciGroup1': kibanaPipeline.getOssCiGroupWorker(1),
'oss-ciGroup2': kibanaPipeline.getOssCiGroupWorker(2),
'oss-ciGroup3': kibanaPipeline.getOssCiGroupWorker(3),
'oss-ciGroup4': kibanaPipeline.getOssCiGroupWorker(4),
'oss-ciGroup5': kibanaPipeline.getOssCiGroupWorker(5),
'oss-ciGroup6': kibanaPipeline.getOssCiGroupWorker(6),
'oss-ciGroup7': kibanaPipeline.getOssCiGroupWorker(7),
'oss-ciGroup8': kibanaPipeline.getOssCiGroupWorker(8),
'oss-ciGroup9': kibanaPipeline.getOssCiGroupWorker(9),
'oss-ciGroup10': kibanaPipeline.getOssCiGroupWorker(10),
'oss-ciGroup11': kibanaPipeline.getOssCiGroupWorker(11),
'oss-ciGroup12': kibanaPipeline.getOssCiGroupWorker(12),
]),
'kibana-xpack-agent-1': kibanaPipeline.withWorkers('kibana-xpack-tests-1', { kibanaPipeline.buildXpack() }, [
'xpack-ciGroup1': kibanaPipeline.getXpackCiGroupWorker(1),
'xpack-ciGroup2': kibanaPipeline.getXpackCiGroupWorker(2),
]),
'kibana-xpack-agent-2': kibanaPipeline.withWorkers('kibana-xpack-tests-2', { kibanaPipeline.buildXpack() }, [
'xpack-ciGroup3': kibanaPipeline.getXpackCiGroupWorker(3),
'xpack-ciGroup4': kibanaPipeline.getXpackCiGroupWorker(4),
]),

'kibana-xpack-agent-3': kibanaPipeline.withWorkers('kibana-xpack-tests-3', { kibanaPipeline.buildXpack() }, [
'xpack-ciGroup5': kibanaPipeline.getXpackCiGroupWorker(5),
'xpack-ciGroup6': kibanaPipeline.getXpackCiGroupWorker(6),
'xpack-ciGroup7': kibanaPipeline.getXpackCiGroupWorker(7),
'xpack-ciGroup8': kibanaPipeline.getXpackCiGroupWorker(8),
'xpack-ciGroup9': kibanaPipeline.getXpackCiGroupWorker(9),
'xpack-ciGroup10': kibanaPipeline.getXpackCiGroupWorker(10),
]),
])
kibanaPipeline.jobRunner('tests-l', false) {
kibanaPipeline.downloadCoverageArtifacts()
kibanaPipeline.bash(
'''
# bootstrap from x-pack folder
source src/dev/ci_setup/setup_env.sh
cd x-pack
yarn kbn bootstrap --prefer-offline
cd ..
# extract archives
mkdir -p /tmp/extracted_coverage
echo extracting intakes
tar -xzf /tmp/downloaded_coverage/coverage/kibana-intake/kibana-coverage.tar.gz -C /tmp/extracted_coverage
tar -xzf /tmp/downloaded_coverage/coverage/x-pack-intake/kibana-coverage.tar.gz -C /tmp/extracted_coverage
echo extracting kibana-oss-tests
tar -xzf /tmp/downloaded_coverage/coverage/kibana-oss-tests/kibana-coverage.tar.gz -C /tmp/extracted_coverage
echo extracting kibana-xpack-tests
for i in {1..3}; do
tar -xzf /tmp/downloaded_coverage/coverage/kibana-xpack-tests-${i}/kibana-coverage.tar.gz -C /tmp/extracted_coverage
done
# replace path in json files to have valid html report
pwd=$(pwd)
du -sh /tmp/extracted_coverage/target/kibana-coverage/
echo replacing path in json files
for i in {1..9}; do
sed -i "s|/dev/shm/workspace/kibana|$pwd|g" /tmp/extracted_coverage/target/kibana-coverage/functional/${i}*.json &
done
wait
# merge oss & x-pack reports
echo merging coverage reports
yarn nyc report --temp-dir /tmp/extracted_coverage/target/kibana-coverage/jest --report-dir target/kibana-coverage/jest-combined --reporter=html --reporter=json-summary
yarn nyc report --temp-dir /tmp/extracted_coverage/target/kibana-coverage/functional --report-dir target/kibana-coverage/functional-combined --reporter=html --reporter=json-summary
echo copy mocha reports
mkdir -p target/kibana-coverage/mocha-combined
cp -r /tmp/extracted_coverage/target/kibana-coverage/mocha target/kibana-coverage/mocha-combined
''',
"run `yarn kbn bootstrap && merge coverage`"
)
sh 'tar -czf kibana-jest-coverage.tar.gz target/kibana-coverage/jest-combined/*'
kibanaPipeline.uploadCoverageArtifacts("coverage/jest-combined", 'kibana-jest-coverage.tar.gz')
sh 'tar -czf kibana-functional-coverage.tar.gz target/kibana-coverage/functional-combined/*'
kibanaPipeline.uploadCoverageArtifacts("coverage/functional-combined", 'kibana-functional-coverage.tar.gz')
sh 'tar -czf kibana-mocha-coverage.tar.gz target/kibana-coverage/mocha-combined/*'
kibanaPipeline.uploadCoverageArtifacts("coverage/mocha-combined", 'kibana-mocha-coverage.tar.gz')
}
}
}
kibanaPipeline.sendMail()
}
}
}
}
2 changes: 1 addition & 1 deletion .ci/Jenkinsfile_flaky
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def getWorkerMap(agentNumber, numberOfExecutions, worker, workerFailures, maxWor
def numberOfWorkers = Math.min(numberOfExecutions, maxWorkerProcesses)

for(def i = 1; i <= numberOfWorkers; i++) {
def workerExecutions = numberOfExecutions/numberOfWorkers + (i <= numberOfExecutions%numberOfWorkers ? 1 : 0)
def workerExecutions = floor(numberOfExecutions/numberOfWorkers + (i <= numberOfExecutions%numberOfWorkers ? 1 : 0))

workerMap["agent-${agentNumber}-worker-${i}"] = { workerNumber ->
for(def j = 0; j < workerExecutions; j++) {
Expand Down
162 changes: 162 additions & 0 deletions .ci/es-snapshots/Jenkinsfile_build_es
Original file line number Diff line number Diff line change
@@ -0,0 +1,162 @@
#!/bin/groovy

// This job effectively has two SCM configurations:
// one for kibana, used to check out this Jenkinsfile (which means it's the job's main SCM configuration), as well as kick-off the downstream verification job
// one for elasticsearch, used to check out the elasticsearch source before building it

// There are two parameters that drive which branch is checked out for each of these, but they will typically be the same
// 'branch_specifier' is for kibana / the job itself
// ES_BRANCH is for elasticsearch

library 'kibana-pipeline-library'
kibanaLibrary.load()

def ES_BRANCH = params.ES_BRANCH

if (!ES_BRANCH) {
error "Parameter 'ES_BRANCH' must be specified."
}

currentBuild.displayName += " - ${ES_BRANCH}"
currentBuild.description = "ES: ${ES_BRANCH}<br />Kibana: ${params.branch_specifier}"

def PROMOTE_WITHOUT_VERIFY = !!params.PROMOTE_WITHOUT_VERIFICATION

timeout(time: 120, unit: 'MINUTES') {
timestamps {
ansiColor('xterm') {
node('linux && immutable') {
catchError {
def VERSION
def SNAPSHOT_ID
def DESTINATION

def scmVars = checkoutEs(ES_BRANCH)
def GIT_COMMIT = scmVars.GIT_COMMIT
def GIT_COMMIT_SHORT = sh(script: "git rev-parse --short ${GIT_COMMIT}", returnStdout: true).trim()

buildArchives('to-archive')

dir('to-archive') {
def now = new Date()
def date = now.format("yyyyMMdd-HHmmss")

def filesRaw = sh(script: "ls -1", returnStdout: true).trim()
def files = filesRaw
.split("\n")
.collect { filename ->
// Filename examples
// elasticsearch-oss-8.0.0-SNAPSHOT-linux-x86_64.tar.gz
// elasticsearch-8.0.0-SNAPSHOT-linux-x86_64.tar.gz
def parts = filename.replace("elasticsearch-oss", "oss").split("-")

VERSION = VERSION ?: parts[1]
SNAPSHOT_ID = SNAPSHOT_ID ?: "${date}_${GIT_COMMIT_SHORT}"
DESTINATION = DESTINATION ?: "${VERSION}/archives/${SNAPSHOT_ID}"

return [
filename: filename,
checksum: filename + '.sha512',
url: "https://storage.googleapis.com/kibana-ci-es-snapshots-daily/${DESTINATION}/${filename}".toString(),
version: parts[1],
platform: parts[3],
architecture: parts[4].split('\\.')[0],
license: parts[0] == 'oss' ? 'oss' : 'default',
]
}

sh 'find * -exec bash -c "shasum -a 512 {} > {}.sha512" \\;'

def manifest = [
bucket: "kibana-ci-es-snapshots-daily/${DESTINATION}".toString(),
branch: ES_BRANCH,
sha: GIT_COMMIT,
sha_short: GIT_COMMIT_SHORT,
version: VERSION,
generated: now.format("yyyy-MM-dd'T'HH:mm:ss'Z'", TimeZone.getTimeZone("UTC")),
archives: files,
]
def manifestJson = toJSON(manifest).toString()
writeFile file: 'manifest.json', text: manifestJson

upload(DESTINATION, '*.*')

sh "cp manifest.json manifest-latest.json"
upload(VERSION, 'manifest-latest.json')
}

if (PROMOTE_WITHOUT_VERIFY) {
esSnapshots.promote(VERSION, SNAPSHOT_ID)

emailext(
to: 'build-kibana@elastic.co',
subject: "ES snapshot promoted without verification: ${params.ES_BRANCH}",
body: '${SCRIPT,template="groovy-html.template"}',
mimeType: 'text/html',
)
} else {
build(
propagate: false,
wait: false,
job: 'elasticsearch+snapshots+verify',
parameters: [
string(name: 'branch_specifier', value: branch_specifier),
string(name: 'SNAPSHOT_VERSION', value: VERSION),
string(name: 'SNAPSHOT_ID', value: SNAPSHOT_ID),
]
)
}
}

kibanaPipeline.sendMail()
}
}
}
}

def checkoutEs(branch) {
retryWithDelay(8, 15) {
return checkout([
$class: 'GitSCM',
branches: [[name: branch]],
doGenerateSubmoduleConfigurations: false,
extensions: [],
submoduleCfg: [],
userRemoteConfigs: [[
credentialsId: 'f6c7695a-671e-4f4f-a331-acdce44ff9ba',
url: 'git@github.com:elastic/elasticsearch',
]],
])
}
}

def upload(destination, pattern) {
return googleStorageUpload(
credentialsId: 'kibana-ci-gcs-plugin',
bucket: "gs://kibana-ci-es-snapshots-daily/${destination}",
pattern: pattern,
sharedPublicly: false,
showInline: false,
)
}

def buildArchives(destination) {
def props = readProperties file: '.ci/java-versions.properties'
withEnv([
// Select the correct JDK for this branch
"PATH=/var/lib/jenkins/.java/${props.ES_BUILD_JAVA}/bin:${env.PATH}",

// These Jenkins env vars trigger some automation in the elasticsearch repo that we don't want
"BUILD_NUMBER=",
"JENKINS_URL=",
"BUILD_URL=",
"JOB_NAME=",
"NODE_NAME=",
]) {
sh """
./gradlew -p distribution/archives assemble --parallel
mkdir -p ${destination}
find distribution/archives -type f \\( -name 'elasticsearch-*-*-*-*.tar.gz' -o -name 'elasticsearch-*-*-*-*.zip' \\) -not -path *no-jdk* -exec cp {} ${destination} \\;
"""
}
}
19 changes: 19 additions & 0 deletions .ci/es-snapshots/Jenkinsfile_trigger_build_es
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#!/bin/groovy

if (!params.branches_yaml) {
error "'branches_yaml' parameter must be specified"
}

def branches = readYaml text: params.branches_yaml

branches.each { branch ->
build(
propagate: false,
wait: false,
job: 'elasticsearch+snapshots+build',
parameters: [
string(name: 'branch_specifier', value: branch),
string(name: 'ES_BRANCH', value: branch),
]
)
}
72 changes: 72 additions & 0 deletions .ci/es-snapshots/Jenkinsfile_verify_es
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
#!/bin/groovy

library 'kibana-pipeline-library'
kibanaLibrary.load()

def SNAPSHOT_VERSION = params.SNAPSHOT_VERSION
def SNAPSHOT_ID = params.SNAPSHOT_ID

if (!SNAPSHOT_VERSION) {
error "Parameter SNAPSHOT_VERSION must be specified"
}

if (!SNAPSHOT_ID) {
error "Parameter SNAPSHOT_ID must be specified"
}

currentBuild.displayName += " - ${SNAPSHOT_VERSION}"
currentBuild.description = "ES: ${SNAPSHOT_VERSION}<br />Kibana: ${params.branch_specifier}"

def SNAPSHOT_MANIFEST = "https://storage.googleapis.com/kibana-ci-es-snapshots-daily/${SNAPSHOT_VERSION}/archives/${SNAPSHOT_ID}/manifest.json"

timeout(time: 120, unit: 'MINUTES') {
timestamps {
ansiColor('xterm') {
catchError {
withEnv(["ES_SNAPSHOT_MANIFEST=${SNAPSHOT_MANIFEST}"]) {
parallel([
// TODO we just need to run integration tests from intake?
'kibana-intake-agent': kibanaPipeline.legacyJobRunner('kibana-intake'),
'x-pack-intake-agent': kibanaPipeline.legacyJobRunner('x-pack-intake'),
'kibana-oss-agent': kibanaPipeline.withWorkers('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
'oss-ciGroup1': kibanaPipeline.getOssCiGroupWorker(1),
'oss-ciGroup2': kibanaPipeline.getOssCiGroupWorker(2),
'oss-ciGroup3': kibanaPipeline.getOssCiGroupWorker(3),
'oss-ciGroup4': kibanaPipeline.getOssCiGroupWorker(4),
'oss-ciGroup5': kibanaPipeline.getOssCiGroupWorker(5),
'oss-ciGroup6': kibanaPipeline.getOssCiGroupWorker(6),
'oss-ciGroup7': kibanaPipeline.getOssCiGroupWorker(7),
'oss-ciGroup8': kibanaPipeline.getOssCiGroupWorker(8),
'oss-ciGroup9': kibanaPipeline.getOssCiGroupWorker(9),
'oss-ciGroup10': kibanaPipeline.getOssCiGroupWorker(10),
'oss-ciGroup11': kibanaPipeline.getOssCiGroupWorker(11),
'oss-ciGroup12': kibanaPipeline.getOssCiGroupWorker(12),
]),
'kibana-xpack-agent': kibanaPipeline.withWorkers('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
'xpack-ciGroup1': kibanaPipeline.getXpackCiGroupWorker(1),
'xpack-ciGroup2': kibanaPipeline.getXpackCiGroupWorker(2),
'xpack-ciGroup3': kibanaPipeline.getXpackCiGroupWorker(3),
'xpack-ciGroup4': kibanaPipeline.getXpackCiGroupWorker(4),
'xpack-ciGroup5': kibanaPipeline.getXpackCiGroupWorker(5),
'xpack-ciGroup6': kibanaPipeline.getXpackCiGroupWorker(6),
'xpack-ciGroup7': kibanaPipeline.getXpackCiGroupWorker(7),
'xpack-ciGroup8': kibanaPipeline.getXpackCiGroupWorker(8),
'xpack-ciGroup9': kibanaPipeline.getXpackCiGroupWorker(9),
'xpack-ciGroup10': kibanaPipeline.getXpackCiGroupWorker(10),
]),
])
}

promoteSnapshot(SNAPSHOT_VERSION, SNAPSHOT_ID)
}

kibanaPipeline.sendMail()
}
}
}

def promoteSnapshot(snapshotVersion, snapshotId) {
node('linux && immutable') {
esSnapshots.promote(snapshotVersion, snapshotId)
}
}
Loading

0 comments on commit 66a5571

Please sign in to comment.