diff --git a/Pipeline/CreateUCDComponentVersion/README.md b/Pipeline/CreateUCDComponentVersion/README.md index 76907b4e..96a9c7dd 100644 --- a/Pipeline/CreateUCDComponentVersion/README.md +++ b/Pipeline/CreateUCDComponentVersion/README.md @@ -4,10 +4,12 @@ An important step in the pipeline is to generate a deployable package. This sample Groovy script: -- Extracts information about the build outputs from the Dependency Based Build (DBB) `BuildReport.json`. The script is able to take a single DBB build report or multiple build reports to build a cumulative package across multiple incremental builds. +- Extracts information about the build outputs from the Dependency Based Build (DBB) `BuildReport.json`. The script is able to take a single DBB build report or multiple build reports to build a cumulative package across multiple incremental builds. - Generates the UrbanCode Deploy (UCD) shiplist `shiplist.xml` file. - Invokes the `buztool.sh` with the appropriate configuration to store the binary package either in UCD packaging format v1 or v2 in the artifact repository and to register a new UCD component version. To use UCD packaging format v2, pass the CLI option `--ucdV2PackageFormat`. +The support for zFS files in the packaging process is performed through the use of an USS_RECORD type record in the DBB BuildReport. + ## High-level Processing Flow This section provides a more detailed explanation of how the CreateUCDComponentVersion script works and what it does. diff --git a/Pipeline/CreateUCDComponentVersion/dbb-ucd-packaging.groovy b/Pipeline/CreateUCDComponentVersion/dbb-ucd-packaging.groovy index 52c44c7c..f3d82fa3 100644 --- a/Pipeline/CreateUCDComponentVersion/dbb-ucd-packaging.groovy +++ b/Pipeline/CreateUCDComponentVersion/dbb-ucd-packaging.groovy @@ -10,6 +10,7 @@ import groovy.json.JsonParserType import groovy.json.JsonBuilder import groovy.json.JsonSlurper import com.ibm.jzos.ZFile; +import java.nio.file.* /** * This script creates a version in UrbanCode Deploy based on the build result. @@ -75,8 +76,8 @@ def startTime = new Date() properties.startTime = startTime.format("yyyyMMdd.hhmmss.mmm") println("** Create version start at $properties.startTime") println("** Properties at startup:") -properties.each{k,v-> - println " $k -> $v" +properties.each { k,v-> + println " $k -> $v" } /* @@ -95,78 +96,122 @@ println "* Buildrecord type TYPE_COPY_TO_PDS is supported with DBB toolkit 1.0.8 println("** Reading provided build report(s).") def buildReportRank = 1 -properties.buildReportOrder.each{ buildReportFile -> - println("*** Parsing DBB build report $buildReportFile.") - - def buildReport = BuildReport.parse(new FileInputStream(buildReportFile)) - - def executes = buildReport.getRecords().findAll{ - try { - (it.getType()==DefaultRecordFactory.TYPE_EXECUTE || it.getType()==DefaultRecordFactory.TYPE_COPY_TO_PDS) && - !it.getOutputs().isEmpty() - } catch (Exception e) {} - } - - //removes all outputs of deploytype ZUNIT-TESTCASE or null - executes.each { - def unwantedOutputs = it.getOutputs().findAll{ o -> - o.deployType == null || o.deployType == 'ZUNIT-TESTCASE' - } - it.getOutputs().removeAll(unwantedOutputs) - } - - - def deletions = buildReport.getRecords().findAll{ - try { - // Obtain delete records, which got added by zAppBuild - it.getType()=="DELETE_RECORD" - } catch (Exception e){ - println e - } - } - - def count = 0 - def deletionCount = 0 - - // store build output information in Hashmap buildOutputsMap to replace potential duplicates - // managing TYPE EXECUTE and COPY_TO_PDS - executes.each{ executeRecord -> - if(executeRecord.getOutputs().isEmpty() != true) { - count += executeRecord.getOutputs().size() - executeRecord.getOutputs().each{ output -> - def (ds,member) = getDatasetName(output.dataset) - tempBuildOutputsMap.put(new DeployableArtifact(member, output.deployType) , [ds, buildReport, executeRecord, buildReportRank]) - } - } - } - - // store build output information in Hashmap buildOutputsMap to replace potential duplicates - // managing DELETE_RECORD leveraging the generic AnyTypeRecord Type - deletions.each { deleteRecord -> - deletionCount += deleteRecord.getAttributeAsList("deletedBuildOutputs").size() - deleteRecord.getAttributeAsList("deletedBuildOutputs").each{ deletedFile -> -// def (ds,member) = getDatasetName(deletedFile) - String cleansedDeletedFile = ((String) deletedFile).replace('"', ''); - tempBuildOutputsMap.put(new DeployableArtifact(cleansedDeletedFile, "DELETE") , [deletedFile, buildReport, deleteRecord, buildReportRank]) - } - } - - if ( count + deletionCount == 0 ) { - println("** No items to package in $buildReportFile.") - } else { - // Log deployable files - if (count != 0) { - println("** Deployable files detected in $buildReportFile") - executes.each { it.getOutputs().each { println(" ${it.dataset}, ${it.deployType}")}} - } - - // Log deleted files - if (deletionCount != 0) { - println("** Deleted files detected in $buildReportFile") - deletions.each { it.getAttributeAsList("deletedBuildOutputs").each { println(" ${it}")}} - } - } - buildReportRank++ +properties.buildReportOrder.each { buildReportFile -> + println("*** Parsing DBB build report $buildReportFile.") + + def buildReport = BuildReport.parse(new FileInputStream(buildReportFile)) + + def executesRecords = buildReport.getRecords().findAll { + try { + (it.getType()==DefaultRecordFactory.TYPE_EXECUTE || it.getType()==DefaultRecordFactory.TYPE_COPY_TO_PDS) && + !it.getOutputs().isEmpty() + } catch (Exception e) {} + } + + //removes all outputs of deploytype ZUNIT-TESTCASE or null + executesRecords.each { + def unwantedOutputs = it.getOutputs().findAll { o -> + o.deployType == null || o.deployType == 'ZUNIT-TESTCASE' + } + it.getOutputs().removeAll(unwantedOutputs) + } + + + def deletions = buildReport.getRecords().findAll { + try { + // Obtain delete records, which got added by zAppBuild + it.getType()=="DELETE_RECORD" + } catch (Exception e) { + println e + } + } + + def ussRecords = buildReport.getRecords().findAll { + try { + it.getType()=="USS_RECORD" + } catch (Exception e) {} + } + + def datasetMembersCount = 0 + def zFSFilesCount = 0 + def deletionCount = 0 + + // store build output information in Hashmap buildOutputsMap to replace potential duplicates + // managing TYPE EXECUTE and COPY_TO_PDS + executesRecords.each { executeRecord -> + if(executeRecord.getOutputs().isEmpty() != true) { + datasetMembersCount += executeRecord.getOutputs().size() + executeRecord.getOutputs().each { output -> + def (ds,member) = getDatasetAndMember(output.dataset) + tempBuildOutputsMap.put(new DeployableArtifact(member, output.deployType) , [ds, buildReport, executeRecord, buildReportRank]) + } + } + } + + // store build output information in Hashmap buildOutputsMap to replace potential duplicates + // managing TYPE USS_RECORD + // Today the USS_RECORD type is built using an AnyTypeRecord record + // An Idea is currently opened to have an official USS_RECORD: https://ideas.ibm.com/ideas/DBB-I-43 + ussRecords.each { ussRecord -> + println(ussRecord.getAttribute("label")) + ArrayList outputs = [] + ussRecord.getAttribute("outputs").split(';').collectEntries { entry -> + outputs += entry.replaceAll('\\[|\\]', '').split(',') + } + + if (outputs.size() > 0) { + zFSFilesCount += outputs.size() + outputs.each { output -> + rootDir = output[0].trim() + file = output[1].trim() + deployType = output[2].trim() + tempBuildOutputsMap.put(new DeployableArtifact(file, deployType) , [rootDir, buildReport, ussRecord, buildReportRank]) + } + } + } + + + // store build output information in Hashmap buildOutputsMap to replace potential duplicates + // managing DELETE_RECORD leveraging the generic AnyTypeRecord Type + deletions.each { deleteRecord -> + deletionCount += deleteRecord.getAttributeAsList("deletedBuildOutputs").size() + deleteRecord.getAttributeAsList("deletedBuildOutputs").each{ deletedFile -> + String cleansedDeletedFile = ((String) deletedFile).replace('"', ''); + tempBuildOutputsMap.put(new DeployableArtifact(cleansedDeletedFile, "DELETE") , [deletedFile, buildReport, deleteRecord, buildReportRank]) + } + } + + if (datasetMembersCount + zFSFilesCount + deletionCount == 0) { + println("** No items to package in $buildReportFile.") + } else { + // Log deployable files + if (datasetMembersCount != 0) { + println("** Deployable dataset members detected in $buildReportFile") + executesRecords.each { it.getOutputs().each { println(" ${it.dataset}, ${it.deployType}")}} + } + if (zFSFilesCount != 0) { + println("** Deployable zFS files detected in $buildReportFile") + ussRecords.each { ussRecord -> + ArrayList outputs = [] + ussRecord.getAttribute("outputs").split(';').collectEntries { entry -> + outputs += entry.replaceAll('\\[|\\]', '').split(',') + } + outputs.each { output -> + rootDir = output[0].trim() + file = output[1].trim() + deployType = output[2].trim() + println(" $rootDir/$file, $deployType") + } + } + } + + // Log deleted files + if (deletionCount != 0) { + println("** Deleted files detected in $buildReportFile") + deletions.each { it.getAttributeAsList("deletedBuildOutputs").each { println(" ${it}")}} + } + } + buildReportRank++ } @@ -176,29 +221,29 @@ properties.buildReportOrder.each{ buildReportFile -> Map buildOutputsMap = tempBuildOutputsMap.clone() -tempBuildOutputsMap.each{ deployableArtifact, info -> - container = info[0] - buildReport = info[1] - record = info[2] - artifactRank = info[3] - - if (record.getType() == DefaultRecordFactory.TYPE_EXECUTE || record.getType() == DefaultRecordFactory.TYPE_COPY_TO_PDS) { - DeployableArtifact deleteArtifact = new DeployableArtifact(container + "(" + deployableArtifact.file + ")", "DELETE") - if (tempBuildOutputsMap.containsKey(deleteArtifact)) { - deleteArtifactInfo = tempBuildOutputsMap.get(deleteArtifact) - deleteArtifactRank = deleteArtifactInfo[3] - if (artifactRank > deleteArtifactRank) { - buildOutputsMap.remove(deleteArtifact) - } else { - buildOutputsMap.remove(deployableArtifact) - } - } - } +tempBuildOutputsMap.each { deployableArtifact, info -> + container = info[0] + buildReport = info[1] + record = info[2] + artifactRank = info[3] + + if (record.getType() == DefaultRecordFactory.TYPE_EXECUTE || record.getType() == DefaultRecordFactory.TYPE_COPY_TO_PDS || record.getType() == "USS_RECORD") { + DeployableArtifact deleteArtifact = new DeployableArtifact(container + "(" + deployableArtifact.file + ")", "DELETE") + if (tempBuildOutputsMap.containsKey(deleteArtifact)) { + deleteArtifactInfo = tempBuildOutputsMap.get(deleteArtifact) + deleteArtifactRank = deleteArtifactInfo[3] + if (artifactRank > deleteArtifactRank) { + buildOutputsMap.remove(deleteArtifact) + } else { + buildOutputsMap.remove(deployableArtifact) + } + } + } } -if (buildOutputsMap.size() == 0 ) { - println("** No items to package in the provided build reports. Exiting.") - System.exit(0) +if (buildOutputsMap.size() == 0) { + println("** No items to package in the provided build reports. Exiting.") + System.exit(0) } // generate ship list file. specification of UCD ship list can be found at @@ -209,214 +254,283 @@ writer.write("\n"); def xml = new MarkupBuilder(writer) xml.manifest(type:"MANIFEST_SHIPLIST"){ - println " Creating general UCD component version properties." - - // Url to CI pipeline - if (properties.pipelineURL) property(name : "ci-pipelineUrl", value : properties.pipelineURL ) - // Git branch - if (properties.pullRequestURL) property(name : "ci-pullRequestURL", value : properties.pullRequestURL ) - // Git branch - if (properties.gitBranch) property(name : "ci-gitBranch", value : properties.gitBranch ) - - // flag for single build report reporting - boolean singleBuildReportReporting = true - - // iterate over Hashmap to generate container entries for UCD shiplist. - buildOutputsMap.each{ deployableArtifact, info -> - container = info[0] - buildReport = info[1] - record = info[2] - - - // obtain build info from the build result record - def buildResult = buildReport.getRecords().findAll{it.getType()==DefaultRecordFactory.TYPE_BUILD_RESULT}[0] - def buildResultRecord = buildReport.getRecords().find{ - try { - it.getType()==DefaultRecordFactory.TYPE_PROPERTIES && it.getId()=="DBB.BuildResultProperties" - } catch (Exception e){} - } - def buildResultProperties = null - if(buildResultRecord!=null){ - buildResultProperties = buildResultRecord.getProperties() - } - - // document DBB build properties as component version properties in case of a single build report (keep backward compatibility) - if (properties.buildReportOrder.size() == 1 && singleBuildReportReporting) { - println " Storing DBB Build result properties as general component version properties due to single build report." - - // Url to DBB Build result - if (buildResult != null) { - property(name : "dbb-buildResultUrl", label: buildResult.getLabel(), value : buildResult.getUrl()) - } - // Populate build result properties - if (buildResultProperties != null) { - buildResultProperties.each{ - //not all properties need to be included in the shiplist - //can ignore files processed - //can ignore full build / impact build - property(name:it.key, value:it.value) - } - } - - // Shiplist entry created, no need to document it a second time - singleBuildReportReporting = false - } - - // process TYPE_EXECUTE and TYPE_COPY_TO_PDS - if (record.getType()==DefaultRecordFactory.TYPE_EXECUTE || record.getType()==DefaultRecordFactory.TYPE_COPY_TO_PDS) { - - record.getOutputs().each{ output -> - // process only outputs of the key of the map - def fullDatasetName = container + "(" + deployableArtifact.file + ")" - if (fullDatasetName == output.dataset) { - if (ZFile.exists("//'$container(${deployableArtifact.file})'")) { - - println " Creating shiplist record for build output $container(${deployableArtifact.file}) with recordType $record.type." - def containerAttributes = getContainerAttributes(container, properties) - container(containerAttributes){ - resource(name:deployableArtifact.file, type:"PDSMember", deployType:output.deployType){ - - // document dbb build result url and build properties on the element level when there are more than one buildReport processed - if (properties.buildReportOrder.size() != 1) { - - // Url to DBB Build result - if (buildResult != null) { - property(name : "dbb-buildResultUrl", label: buildResult.getLabel(), value : buildResult.getUrl()) - } - // Populate build result properties - if (buildResultProperties != null) { - buildResultProperties.each{ - //not all properties need to be included in the shiplist - //can ignore files processed - //can ignore full build / impact build - property(name:it.key, value:it.value) - } - } - } - - property(name:"buildcommand", value:record.getCommand()) - - // Only TYPE_EXECUTE Records carry options - if (record.getType()==DefaultRecordFactory.TYPE_EXECUTE) property(name:"buildoptions", value:record.getOptions()) - - // Sample to add additional artifact properties. Here: adding db2 properties for a DBRM - // which where added to the build report through a basic PropertiesRecord. - // see https://github.com/IBM/dbb-zappbuild/blob/06ff114ee22b4e41a09aa0640ac75b7e56c70521/build-conf/build.properties#L79-L89 - - if (output.deployType.equals("DBRM")){ - propertyRecord = buildReport.getRecords().findAll{ - it.getType()==DefaultRecordFactory.TYPE_PROPERTIES && it.getProperty("file")==record.getFile() - } - propertyRecord.each { propertyRec -> - // Iterate Properties - (propertyRec.getProperties()).each { - property(name:"$it.key", value:it.value) - } - } - } - - // add githash to container - def githash = "" // set empty - if (buildResultProperties != null){ - // get git references from build properties - def gitproperty = buildResultProperties.find{ - it.key.contains(":githash:") && record.getFile().contains(it.key.substring(9)) - } - if (gitproperty != null ) { - githash = gitproperty.getValue() - // set properties in shiplist - property(name:"githash", value:githash) - if(properties.git_commitURL_prefix) property(name:"git-link-to-commit", value:"${properties.git_commitURL_prefix}/${githash}") - } - } - - // add source information in the input column of UCD - inputUrl = (buildResultProperties != null && properties.git_treeURL_prefix && githash!="") ? "${properties.git_treeURL_prefix}/${githash}/"+ record.getFile() : "" - inputs(url : "${inputUrl}"){ - input(name : record.getFile(), compileType : "Main", url : inputUrl) - - // adding dependencies - def dependencySets = buildReport.getRecords().findAll{ - it.getType()==DefaultRecordFactory.TYPE_DEPENDENCY_SET && it.getFile()==record.getFile() - }; - Set dependencyCache = new HashSet() - dependencySets.unique().each{ - it.getAllDependencies().each{ - if (it.isResolved() && !dependencyCache.contains(it.getLname()) && it.getFile()!=record.getFile()){ - def displayName = it.getFile() ? it.getFile() : it.getLname() - def dependencyUrl ="" - if (it.getFile() && (it.getCategory()=="COPY"||it.getCategory()=="SQL INCLUDE")) dependencyUrl = (buildResultProperties != null && properties.git_treeURL_prefix && githash!="") ? "${properties.git_treeURL_prefix}/${githash}/"+ it.getFile() : "" - input(name : displayName , compileType : it.getCategory(), url : dependencyUrl) - dependencyCache.add(it.getLname()) - } - } - } - } - } - } - } else { - println "*! The file '$container(${deployableArtifact.file})' doesn't exist. Copy is skipped." - } - } - } - } - else if (record.getType()=="DELETE_RECORD") { - // document delete container - - deletedFiles = record.getAttributeAsList("deletedBuildOutputs") - deletedFiles.each { deletedOutput -> - - // remove any quotes - deletedOutput = ((String) deletedOutput).replace('"', ''); - - // process only outputs of the key of the map - if (deployableArtifact.file == deletedOutput) { - - println " Defining shiplist delete container for $deletedOutput." - - deleted{ - // create container - def (ds, member) = getDatasetName(deployableArtifact.file) - def containerAttributes = getContainerAttributes(ds, properties) - container(containerAttributes){ - resource(name:member, type:"PDSMember") - - // document dbb build result url and build properties on the element level when there are more than one buildReport processed - if (properties.buildReportOrder.size() != 1) { - - // Url to DBB Build result - property(name : "dbb-buildResultUrl", label: buildResult.getLabel(), value : buildResult.getUrl()) - // Populate build result properties - if (buildResultProperties != null) { - buildResultProperties.each{ - //not all properties need to be included in the shiplist - //can ignore files processed - //can ignore full build / impact build - property(name:it.key, value:it.value) - } - } - } - - // add githash to container - def githash = "" // set empty - if (buildResultProperties != null){ - // get git references from build properties - def gitproperty = buildResultProperties.find{ - it.key.contains(":githash:") && record.getAttribute("file").contains(it.key.substring(9)) - } - if (gitproperty != null ) { - githash = gitproperty.getValue() - // set properties in shiplist - property(name:"githash", value:githash) - if(properties.git_commitURL_prefix) property(name:"git-link-to-commit", value:"${properties.git_commitURL_prefix}/${githash}") - } - } - } - } - } - } - } - } + println " Creating general UCD component version properties." + + // Url to CI pipeline + if (properties.pipelineURL) property(name : "ci-pipelineUrl", value : properties.pipelineURL ) + // Git branch + if (properties.pullRequestURL) property(name : "ci-pullRequestURL", value : properties.pullRequestURL ) + // Git branch + if (properties.gitBranch) property(name : "ci-gitBranch", value : properties.gitBranch ) + + // flag for single build report reporting + boolean singleBuildReportReporting = true + + // iterate over Hashmap to generate container entries for UCD shiplist. + buildOutputsMap.each { deployableArtifact, info -> + container = info[0] + buildReport = info[1] + record = info[2] + + + // obtain build info from the build result record + def buildResult = buildReport.getRecords().findAll { it.getType()==DefaultRecordFactory.TYPE_BUILD_RESULT }[0] + def buildResultRecord = buildReport.getRecords().find { + try { + it.getType()==DefaultRecordFactory.TYPE_PROPERTIES && it.getId()=="DBB.BuildResultProperties" + } catch (Exception e){} + } + def buildResultProperties = null + if (buildResultRecord!=null) { + buildResultProperties = buildResultRecord.getProperties() + } + + // document DBB build properties as component version properties in case of a single build report (keep backward compatibility) + if (properties.buildReportOrder.size() == 1 && singleBuildReportReporting) { + println " Storing DBB Build result properties as general component version properties due to single build report." + + // Url to DBB Build result + if (buildResult != null) { + property(name : "dbb-buildResultUrl", label: buildResult.getLabel(), value : buildResult.getUrl()) + } + // Populate build result properties + if (buildResultProperties != null) { + buildResultProperties.each{ + //not all properties need to be included in the shiplist + //can ignore files processed + //can ignore full build / impact build + property(name:it.key, value:it.value) + } + } + + // Shiplist entry created, no need to document it a second time + singleBuildReportReporting = false + } + + // process TYPE_EXECUTE and TYPE_COPY_TO_PDS + if (record.getType()==DefaultRecordFactory.TYPE_EXECUTE || record.getType()==DefaultRecordFactory.TYPE_COPY_TO_PDS) { + + record.getOutputs().each { output -> + // process only outputs of the key of the map + def fullDatasetName = container + "(" + deployableArtifact.file + ")" + if (fullDatasetName == output.dataset) { + if (ZFile.exists("//'$container(${deployableArtifact.file})'")) { + + println " Creating shiplist record for build output $container(${deployableArtifact.file}) with recordType $record.type." + def containerAttributes = getContainerAttributes(container, properties) + container(containerAttributes) { + resource(name:deployableArtifact.file, type:"PDSMember", deployType:output.deployType){ + + // document dbb build result url and build properties on the element level when there are more than one buildReport processed + if (properties.buildReportOrder.size() != 1) { + + // Url to DBB Build result + if (buildResult != null) { + property(name : "dbb-buildResultUrl", label: buildResult.getLabel(), value : buildResult.getUrl()) + } + // Populate build result properties + if (buildResultProperties != null) { + buildResultProperties.each { + //not all properties need to be included in the shiplist + //can ignore files processed + //can ignore full build / impact build + property(name:it.key, value:it.value) + } + } + } + + property(name:"buildcommand", value:record.getCommand()) + + // Only TYPE_EXECUTE Records carry options + if (record.getType()==DefaultRecordFactory.TYPE_EXECUTE) property(name:"buildoptions", value:record.getOptions()) + + // Sample to add additional artifact properties. Here: adding db2 properties for a DBRM + // which where added to the build report through a basic PropertiesRecord. + // see https://github.com/IBM/dbb-zappbuild/blob/06ff114ee22b4e41a09aa0640ac75b7e56c70521/build-conf/build.properties#L79-L89 + + if (output.deployType.equals("DBRM")) { + propertyRecord = buildReport.getRecords().findAll { + it.getType()==DefaultRecordFactory.TYPE_PROPERTIES && it.getProperty("file")==record.getFile() + } + propertyRecord.each { propertyRec -> + // Iterate Properties + (propertyRec.getProperties()).each { + property(name:"$it.key", value:it.value) + } + } + } + + // add githash to container + def githash = "" // set empty + if (buildResultProperties != null) { + // get git references from build properties + def gitproperty = buildResultProperties.find { + it.key.contains(":githash:") && record.getFile().contains(it.key.substring(9)) + } + if (gitproperty != null) { + githash = gitproperty.getValue() + // set properties in shiplist + property(name:"githash", value:githash) + if (properties.git_commitURL_prefix) property(name:"git-link-to-commit", value:"${properties.git_commitURL_prefix}/${githash}") + } + } + + // add source information in the input column of UCD + inputUrl = (buildResultProperties != null && properties.git_treeURL_prefix && githash!="") ? "${properties.git_treeURL_prefix}/${githash}/"+ record.getFile() : "" + inputs(url : "${inputUrl}") { + input(name : record.getFile(), compileType : "Main", url : inputUrl) + + // adding dependencies + def dependencySets = buildReport.getRecords().findAll { + it.getType()==DefaultRecordFactory.TYPE_DEPENDENCY_SET && it.getFile()==record.getFile() + }; + Set dependencyCache = new HashSet() + dependencySets.unique().each { + it.getAllDependencies().each { + if (it.isResolved() && !dependencyCache.contains(it.getLname()) && it.getFile()!=record.getFile()){ + def displayName = it.getFile() ? it.getFile() : it.getLname() + def dependencyUrl ="" + if (it.getFile() && (it.getCategory()=="COPY"||it.getCategory()=="SQL INCLUDE")) dependencyUrl = (buildResultProperties != null && properties.git_treeURL_prefix && githash!="") ? "${properties.git_treeURL_prefix}/${githash}/"+ it.getFile() : "" + input(name : displayName , compileType : it.getCategory(), url : dependencyUrl) + dependencyCache.add(it.getLname()) + } + } + } + } + } + } + } else { + println "*! The file '$container(${deployableArtifact.file})' doesn't exist. Copy is skipped." + } + } + } + } + else if (record.getType()=="USS_RECORD") { + ArrayList outputs = [] + record.getAttribute("outputs").split(';').collectEntries { entry -> + outputs += entry.replaceAll('\\[|\\]', '').split(',') + } + + outputs.each { output -> + rootDir = output[0].trim() + file = output[1].trim() + deployType = output[2].trim() + // process only outputs of the key of the map + if (deployableArtifact.file.equals(file)) { + if (isZFSFile("$rootDir/${deployableArtifact.file}")) { + println " Creating shiplist record for build output $rootDir/${deployableArtifact.file} with recordType $record.type." + def (directory, relativeFileName) = getDirectoryAndFile(deployableArtifact.file) + println "**** $directory - $relativeFileName" + def containerAttributes = [name:directory, rootDir:rootDir, type:"directory"] + container(containerAttributes) { + resource(name:relativeFileName, type:"file", deployType:deployType) { + + // document dbb build result url and build properties on the element level when there are more than one buildReport processed + if (properties.buildReportOrder.size() != 1) { + + // Url to DBB Build result + if (buildResult != null) { + property(name : "dbb-buildResultUrl", label: buildResult.getLabel(), value : buildResult.getUrl()) + } + // Populate build result properties + if (buildResultProperties != null) { + buildResultProperties.each{ + //not all properties need to be included in the shiplist + //can ignore files processed + //can ignore full build / impact build + property(name:it.key, value:it.value) + } + } + } + + property(name:"buildcommand", value:record.getAttribute("command")) + property(name:"label", value:record.getAttribute("label")) + + // add githash to container + def githash = "" // set empty + if (buildResultProperties != null) { + // get git references from build properties + def gitproperty = buildResultProperties.find{ + it.key.contains(":githash:") && record.getAttribute("file").contains(it.key.substring(9)) + } + if (gitproperty != null) { + githash = gitproperty.getValue() + // set properties in shiplist + property(name:"githash", value:githash) + if(properties.git_commitURL_prefix) property(name:"git-link-to-commit", value:"${properties.git_commitURL_prefix}/${githash}") + } + } + + // add source information in the input column of UCD + inputUrl = (buildResultProperties != null && properties.git_treeURL_prefix && githash!="") ? "${properties.git_treeURL_prefix}/${githash}/"+ record.getAttribute("file") : "" + inputs(url : "${inputUrl}") { + input(name : record.getAttribute("file"), compileType : "Main", url : inputUrl) + } + } + } + } else { + println "*! The file '$rootDir/${deployableArtifact.file}' doesn't exist. Copy is skipped." + } + } + } + } + else if (record.getType()=="DELETE_RECORD") { + // document delete container + + deletedFiles = record.getAttributeAsList("deletedBuildOutputs") + deletedFiles.each { deletedOutput -> + + // remove any quotes + deletedOutput = ((String) deletedOutput).replace('"', ''); + + // process only outputs of the key of the map + if (deployableArtifact.file == deletedOutput) { + + println " Defining shiplist delete container for $deletedOutput." + + deleted { + // create container + def (ds, member) = getDatasetAndMember(deployableArtifact.file) + def containerAttributes = getContainerAttributes(ds, properties) + container(containerAttributes) { + resource(name:member, type:"PDSMember") + + // document dbb build result url and build properties on the element level when there are more than one buildReport processed + if (properties.buildReportOrder.size() != 1) { + + // Url to DBB Build result + property(name : "dbb-buildResultUrl", label: buildResult.getLabel(), value : buildResult.getUrl()) + // Populate build result properties + if (buildResultProperties != null) { + buildResultProperties.each{ + //not all properties need to be included in the shiplist + //can ignore files processed + //can ignore full build / impact build + property(name:it.key, value:it.value) + } + } + } + + // add githash to container + def githash = "" // set empty + if (buildResultProperties != null) { + // get git references from build properties + def gitproperty = buildResultProperties.find{ + it.key.contains(":githash:") && record.getAttribute("file").contains(it.key.substring(9)) + } + if (gitproperty != null) { + githash = gitproperty.getValue() + // set properties in shiplist + property(name:"githash", value:githash) + if(properties.git_commitURL_prefix) property(name:"git-link-to-commit", value:"${properties.git_commitURL_prefix}/${githash}") + } + } + } + } + } + } + } + } } println("** Write ship list file to $properties.workDir/shiplist.xml") @@ -432,217 +546,235 @@ shiplistFile.text = writer // https://www.ibm.com/docs/en/urbancode-deploy/7.2.1?topic=czcv-creating-zos-component-version-using-v2-package-format def buztoolOption = "createzosversion" if (properties.ucdV2PackageFormat.toBoolean()) { - buztoolOption = "createzosversion2" + buztoolOption = "createzosversion2" } def cmd = [ - properties.buztoolPath, - buztoolOption, - "-c", - properties.component, - "-s", - "$properties.workDir/shiplist.xml", - //requires UCD v6.2.6 and above - "-o", - "${properties.workDir}/buztool.output" + properties.buztoolPath, + buztoolOption, + "-c", + properties.component, + "-s", + "$properties.workDir/shiplist.xml", + //requires UCD v6.2.6 and above + "-o", + "${properties.workDir}/buztool.output" ] // set artifactRepository option if specified if (properties.artifactRepositorySettings) { - cmd << "-ar" - cmd << properties.artifactRepositorySettings + cmd << "-ar" + cmd << properties.artifactRepositorySettings } // set buztoolPropertyFile option if specified if (properties.buztoolPropertyFile) { - cmd << "-prop" - cmd << properties.buztoolPropertyFile + cmd << "-prop" + cmd << properties.buztoolPropertyFile } //set component version name if specified -if(properties.versionName){ - cmd << "-v" - cmd << "\"${properties.versionName}\"" +if(properties.versionName) { + cmd << "-v" + cmd << "\"${properties.versionName}\"" } def cmdStr = ""; -cmd.each{ cmdStr = cmdStr + it + " "} +cmd.each { cmdStr = cmdStr + it + " " } println("** Following UCD buztool cmd will be invoked") println cmdStr // execute command, if no preview is set -if (!properties.preview.toBoolean()){ - println("** Create version by running UCD buztool") - - StringBuffer response = new StringBuffer() - StringBuffer error = new StringBuffer() - - def p = cmd.execute() - p.waitForProcessOutput(response, error) - println(response.toString()) - - def rc = p.exitValue(); - if(rc==0){ - println("** buztool output properties") - def outputProp = new Properties() - new File("${properties.workDir}/buztool.output").withInputStream { outputProp.load(it) } - outputProp.each{k,v-> - println " $k -> $v" - } - }else{ - println("*! Error executing buztool\n" +error.toString()) - System.exit(rc) - } +if (!properties.preview.toBoolean()) { + println("** Create version by running UCD buztool") + + StringBuffer response = new StringBuffer() + StringBuffer error = new StringBuffer() + + def p = cmd.execute() + p.waitForProcessOutput(response, error) + println(response.toString()) + + def rc = p.exitValue(); + if (rc==0) { + println("** buztool output properties") + def outputProp = new Properties() + new File("${properties.workDir}/buztool.output").withInputStream { outputProp.load(it) } + outputProp.each { k,v-> + println " $k -> $v" + } + } else { + println("*! Error executing buztool\n" +error.toString()) + System.exit(rc) + } } /** - * parse data set name and member name - * @param fullname e.g. BLD.LOAD(PGM1) - * @return e.g. (BLD.LOAD, PGM1) + * return true if the file is a file stored on zFS, false otherwise */ -def getDatasetName(String fullname){ - def ds,member; - def elements = fullname.split("[\\(\\)]"); - ds = elements[0]; - member = elements.size()>1? elements[1] : ""; - return [ds, member]; +def isZFSFile(String name) { + Path path = Paths.get(name); + return Files.exists(path); } /** + * Extract the directory and the file + * from the fullname of a zFS file + * For instance: /var/test/file.txt --> [/var/test, file.txt] */ +def getDirectoryAndFile(String fullname) { + Path filePath = Paths.get(fullname); + String file = filePath.getFileName().toString(); + String directory = filePath.getParent(); + return [directory, file]; +} + +/** + * Parse the fullname of a qualified dataset and member name + * Returns its dataset name and member name) + * For instance: BLD.LOAD(PGM1) --> [BLD.LOAD, PGM1] + */ +def getDatasetAndMember(String fullname) { + def ds,member; + def elements = fullname.split("[\\(\\)]"); + ds = elements[0]; + member = elements.size()>1? elements[1] : ""; + return [ds, member]; +} + /** * calculate the container attributes for default package or package v2 */ // define container attributes def getContainerAttributes(String ds, Properties properties) { - def containerAttMap = [:] - if (properties.ucdV2PackageFormat.toBoolean()) { - // ucd package format v2 requres to set a deployType at container level - def containerDeployType - def lastLevelQual = ds.tokenize('.').last() - if (properties.containerMapping) { - // obtain the deployType setting from the property - def cMapping = parseJSONStringToMap(properties.containerMapping) - containerDeployType = cMapping[lastLevelQual] - if (containerDeployType == null) { - println "*! UCD v2 Package format requires a mapping for the copymode for $lastLevelQual through the containerMapping property - Current mapping is '$properties.containerMapping'." - } - } else { - // set the last level qualifier as deployType - containerDeployType = lastLevelQual - } - // create container element with deployType - containerAttMap = [name:ds, type:"PDS", deployType:containerDeployType] - }else { - // create container without deployType attribute - containerAttMap = [name:ds, type:"PDS"] - } - return containerAttMap + def containerAttMap = [:] + if (properties.ucdV2PackageFormat.toBoolean()) { + // ucd package format v2 requres to set a deployType at container level + def containerDeployType + def lastLevelQual = ds.tokenize('.').last() + if (properties.containerMapping) { + // obtain the deployType setting from the property + def cMapping = parseJSONStringToMap(properties.containerMapping) + containerDeployType = cMapping[lastLevelQual] + if (containerDeployType == null) { + println "*! UCD v2 Package format requires a mapping for the copymode for $lastLevelQual through the containerMapping property - Current mapping is '$properties.containerMapping'." + } + } else { + // set the last level qualifier as deployType + containerDeployType = lastLevelQual + } + // create container element with deployType + containerAttMap = [name:ds, type:"PDS", deployType:containerDeployType] + } else { + // create container without deployType attribute + containerAttMap = [name:ds, type:"PDS"] + } + return containerAttMap } -def parseInput(String[] cliArgs){ - def cli = new CliBuilder(usage: "deploy.groovy [options]") - cli.b(longOpt:'buztool', args:1, argName:'file', 'Absolute path to UrbanCode Deploy buztool.sh script') - cli.w(longOpt:'workDir', args:1, argName:'dir', 'Absolute path to the DBB build output directory') - cli.c(longOpt:'component', args:1, argName:'name', 'Name of the UCD component to create version in') - cli.ar(longOpt:'artifactRepository', args:1, argName:'artifactRepositorySettings', 'Absolute path to Artifactory Server connection file (** Deprecated, please use --propertyFile instead **)') - cli.prop(longOpt:'propertyFile', args:1, argName:'buztoolPropertyFile', 'Absolute path to UCD buztool property file (Optional). From UCD v7.1.x and greater it replaces the -ar option') - cli.v(longOpt:'versionName', args:1, argName:'versionName', 'Name of the UCD component version') - cli.zpv2(longOpt:'ucdV2PackageFormat', 'Invoke buztool with the buztool package version v2.') - cli.p(longOpt:'preview', 'Preview mode - generate shiplist, but do not run buztool.sh') - - cli.bO(longOpt:'buildReportOrder', args:1, argName:'buildReportOrder', 'Build a cumulative package based on a comma separated list of one or multiple DBB build reports processed in the provided order (Optional).') - cli.boFile(longOpt:'buildReportOrderFile', args:1, argName:'buildReportOrderFile', 'Build a cumulative package based on an input file that lists one or multiple build reports defining the order of processing (Optional).') - - cli.ppf(longOpt:'packagingPropFiles', args:1,'Comma separated list of property files to configure the dbb-ucd-packaging script (Optional)') - cli.rpFile(longOpt:'repositoryInfoPropertiesFile', args:1,'Absolute path to property file containing URL prefixes to git provider (Optional) (** Deprecated, please use --packagingPropFiles instead **)') - - // additional references to build and workflow - cli.pURL(longOpt:'pipelineURL', args:1,'URL to the pipeline build result (Optional)') - cli.g(longOpt:'gitBranch', args:1,'Name of the git branch (Optional)') - cli.prURL(longOpt:'pullRequestURL', args:1,'URL to the Pull/Merge request (Optional)') - - cli.h(longOpt:'help', 'Prints this message') - def opts = cli.parse(cliArgs) - if (opts == null || opts.h) { // if help option used, print usage and exit - cli.usage() - System.exit(0) - } - - def properties = new Properties() - - // load workDir from ./build.properties if it exists - def buildProperties = new Properties() - def scriptDir = new File(getClass().protectionDomain.codeSource.location.path).parent - def buildPropFile = new File("$scriptDir/build.properties") - if (buildPropFile.exists()){ - buildPropFile.withInputStream { buildProperties.load(it) } - if (buildProperties.workDir != null) - properties.workDir = buildProperties.workDir - } - - // load properties from repositoryInfoPropertiesFile - if (opts.rpFile){ - def repositoryPropFile = new File("$opts.rpFile") - if (repositoryPropFile.exists()){ - repositoryPropFile.withInputStream { properties.load(it) } - } - } - - // load configuration files - if (opts.ppf){ - opts.ppf.split(",").each { propertyFile -> - def repositoryPropFile = new File(propertyFile) - if (repositoryPropFile.exists()){ - repositoryPropFile.withInputStream { properties.load(it) } - } - } - } - - // set command line arguments - if (opts.w) properties.workDir = opts.w - if (opts.b) properties.buztoolPath = opts.b - if (opts.c) properties.component = opts.c - if (opts.ar) properties.artifactRepositorySettings = opts.ar - if (opts.prop) properties.buztoolPropertyFile = opts.prop - if (opts.v) properties.versionName = opts.v - if (opts.pURL) properties.pipelineURL = opts.pURL - if (opts.g) properties.gitBranch = opts.g - if (opts.prURL) properties.pullRequestURL = opts.prURL - properties.preview = (opts.p) ? 'true' : 'false' - properties.ucdV2PackageFormat = (opts.zpv2) ? 'true' : 'false' - - // setup single or multiple build reports - def buildReports = [] - if (opts.boFile) { - new File (opts.boFile).eachLine { line -> - buildReports.add(line) - } - } - - if (opts.bO) { - opts.bO.split(',').each{ - buildReports.add(it) - } - } - - if (!opts.boFile && !opts.bO){ // default lookup in Workdir - buildReports.add("" + opts.w + "/BuildReport.json") - } - - properties.buildReportOrder = buildReports - - - // validate required properties - try { - assert properties.buztoolPath : "Missing property buztool script path" - assert properties.workDir: "Missing property build work directory" - assert properties.component: "Missing property UCD component" - } catch (AssertionError e) { - cli.usage() - throw e - } - return properties +def parseInput(String[] cliArgs) { + def cli = new CliBuilder(usage: "deploy.groovy [options]") + cli.b(longOpt:'buztool', args:1, argName:'file', 'Absolute path to UrbanCode Deploy buztool.sh script') + cli.w(longOpt:'workDir', args:1, argName:'dir', 'Absolute path to the DBB build output directory') + cli.c(longOpt:'component', args:1, argName:'name', 'Name of the UCD component to create version in') + cli.ar(longOpt:'artifactRepository', args:1, argName:'artifactRepositorySettings', 'Absolute path to Artifactory Server connection file (** Deprecated, please use --propertyFile instead **)') + cli.prop(longOpt:'propertyFile', args:1, argName:'buztoolPropertyFile', 'Absolute path to UCD buztool property file (Optional). From UCD v7.1.x and greater it replaces the -ar option') + cli.v(longOpt:'versionName', args:1, argName:'versionName', 'Name of the UCD component version') + cli.zpv2(longOpt:'ucdV2PackageFormat', 'Invoke buztool with the buztool package version v2.') + cli.p(longOpt:'preview', 'Preview mode - generate shiplist, but do not run buztool.sh') + + cli.bO(longOpt:'buildReportOrder', args:1, argName:'buildReportOrder', 'Build a cumulative package based on a comma separated list of one or multiple DBB build reports processed in the provided order (Optional).') + cli.boFile(longOpt:'buildReportOrderFile', args:1, argName:'buildReportOrderFile', 'Build a cumulative package based on an input file that lists one or multiple build reports defining the order of processing (Optional).') + + cli.ppf(longOpt:'packagingPropFiles', args:1,'Comma separated list of property files to configure the dbb-ucd-packaging script (Optional)') + cli.rpFile(longOpt:'repositoryInfoPropertiesFile', args:1,'Absolute path to property file containing URL prefixes to git provider (Optional) (** Deprecated, please use --packagingPropFiles instead **)') + + // additional references to build and workflow + cli.pURL(longOpt:'pipelineURL', args:1,'URL to the pipeline build result (Optional)') + cli.g(longOpt:'gitBranch', args:1,'Name of the git branch (Optional)') + cli.prURL(longOpt:'pullRequestURL', args:1,'URL to the Pull/Merge request (Optional)') + + cli.h(longOpt:'help', 'Prints this message') + def opts = cli.parse(cliArgs) + if (opts == null || opts.h) { // if help option used, print usage and exit + cli.usage() + System.exit(0) + } + + def properties = new Properties() + + // load workDir from ./build.properties if it exists + def buildProperties = new Properties() + def scriptDir = new File(getClass().protectionDomain.codeSource.location.path).parent + def buildPropFile = new File("$scriptDir/build.properties") + if (buildPropFile.exists()) { + buildPropFile.withInputStream { buildProperties.load(it) } + if (buildProperties.workDir != null) + properties.workDir = buildProperties.workDir + } + + // load properties from repositoryInfoPropertiesFile + if (opts.rpFile) { + def repositoryPropFile = new File("$opts.rpFile") + if (repositoryPropFile.exists()){ + repositoryPropFile.withInputStream { properties.load(it) } + } + } + + // load configuration files + if (opts.ppf) { + opts.ppf.split(",").each { propertyFile -> + def repositoryPropFile = new File(propertyFile) + if (repositoryPropFile.exists()){ + repositoryPropFile.withInputStream { properties.load(it) } + } + } + } + + // set command line arguments + if (opts.w) properties.workDir = opts.w + if (opts.b) properties.buztoolPath = opts.b + if (opts.c) properties.component = opts.c + if (opts.ar) properties.artifactRepositorySettings = opts.ar + if (opts.prop) properties.buztoolPropertyFile = opts.prop + if (opts.v) properties.versionName = opts.v + if (opts.pURL) properties.pipelineURL = opts.pURL + if (opts.g) properties.gitBranch = opts.g + if (opts.prURL) properties.pullRequestURL = opts.prURL + properties.preview = (opts.p) ? 'true' : 'false' + properties.ucdV2PackageFormat = (opts.zpv2) ? 'true' : 'false' + + // setup single or multiple build reports + def buildReports = [] + if (opts.boFile) { + new File (opts.boFile).eachLine { line -> + buildReports.add(line) + } + } + + if (opts.bO) { + opts.bO.split(',').each{ + buildReports.add(it) + } + } + + if (!opts.boFile && !opts.bO) { // default lookup in Workdir + buildReports.add("" + opts.w + "/BuildReport.json") + } + + properties.buildReportOrder = buildReports + + + // validate required properties + try { + assert properties.buztoolPath : "Missing property buztool script path" + assert properties.workDir: "Missing property build work directory" + assert properties.component: "Missing property UCD component" + } catch (AssertionError e) { + cli.usage() + throw e + } + return properties } @@ -652,49 +784,49 @@ def parseInput(String[] cliArgs){ */ def parseJSONStringToMap(String packageProperty) { - Map map = [:] - try { - JsonSlurper slurper = new groovy.json.JsonSlurper() - map = slurper.parseText(packageProperty) - } catch (Exception e) { - errorMsg = "*! dbb-ucd-packaging.parseStringToMap - Failed to parse setting $packageProperty from String into a Map object. Process exiting." - println errorMsg - println e.getMessage() - System.exit(3) - } - return map + Map map = [:] + try { + JsonSlurper slurper = new groovy.json.JsonSlurper() + map = slurper.parseText(packageProperty) + } catch (Exception e) { + errorMsg = "*! dbb-ucd-packaging.parseStringToMap - Failed to parse setting $packageProperty from String into a Map object. Process exiting." + println errorMsg + println e.getMessage() + System.exit(3) + } + return map } class DeployableArtifact { - private final String file; - private final String deployType; - - DeployableArtifact(String file, String deployType) { - this.file = file; - this.deployType = deployType; - } - - @Override - public int hashCode() { - String concatenation = file + "." + deployType; - return concatenation.hashCode(); - } - - public boolean equals(DeployableArtifact other) { - return other.file.equals(file) & other.deployType.equals(deployType); - } - - @Override - public boolean equals(Object other) { - if (other instanceof DeployableArtifact) { - return equals((DeployableArtifact) other) - } else { - return false; - } - } - - @Override - public String toString() { - return file + "." + deployType; - } + private final String file; + private final String deployType; + + DeployableArtifact(String file, String deployType) { + this.file = file; + this.deployType = deployType; + } + + @Override + public int hashCode() { + String concatenation = file + "." + deployType; + return concatenation.hashCode(); + } + + public boolean equals(DeployableArtifact other) { + return other.file.equals(file) & other.deployType.equals(deployType); + } + + @Override + public boolean equals(Object other) { + if (other instanceof DeployableArtifact) { + return equals((DeployableArtifact) other) + } else { + return false; + } + } + + @Override + public String toString() { + return file + "." + deployType; + } } \ No newline at end of file diff --git a/Pipeline/PackageBuildOutputs/PackageBuildOutputs.groovy b/Pipeline/PackageBuildOutputs/PackageBuildOutputs.groovy index c768b4b2..91745214 100644 --- a/Pipeline/PackageBuildOutputs/PackageBuildOutputs.groovy +++ b/Pipeline/PackageBuildOutputs/PackageBuildOutputs.groovy @@ -51,11 +51,11 @@ def startTime = new Date() props.startTime = startTime.format("yyyyMMdd.hhmmss.mmm") println("** PackageBuildOutputs start at $props.startTime") println("** Properties at startup:") -props.sort().each{k,v-> - if ( k == "artifactRepository.password" ) - println " $k -> xxxxxx " - else - println " $k -> $v" +props.sort().each { k,v-> + if ( k == "artifactRepository.password" ) + println " $k -> xxxxxx " + else + println " $k -> $v" } // Enable file tagging @@ -71,231 +71,308 @@ Map buildOutputsMap = new HashMap - println("** Read build report data from ${buildReportFile}.") - def jsonOutputFile = new File(buildReportFile) - - if(!jsonOutputFile.exists()){ - println("*! Error: Build report data at $buildReportFile not found.") - System.exit(1) - } - - def buildReport= BuildReport.parse(new FileInputStream(jsonOutputFile)) - - // Read buildInfo to obtain build information - def buildInfo = buildReport.getRecords().findAll{ - try { - it.getType()==DefaultRecordFactory.TYPE_BUILD_RESULT - } catch (Exception e){} - } - if (buildInfo.size() != 0) { - tarFileLabel = buildInfo[0].label - } - - // finds all the build outputs with a deployType - def buildRecords = buildReport.getRecords().findAll{ - try { - (it.getType()==DefaultRecordFactory.TYPE_EXECUTE || it.getType()==DefaultRecordFactory.TYPE_COPY_TO_PDS) && - !it.getOutputs().isEmpty() - } catch (Exception e){} - } - - if (props.deployTypeFilter){ - println("** Filtering Output Records on following deployTypes: ${props.deployTypeFilter}...") - buildRecords.each { - // filtered executes - def filteredOutputs = it.getOutputs().findAll{ o -> - o.deployType != null && (props.deployTypeFilter).split(',').contains(o.deployType) - } - // Manipulating the scope of build outputs - it.getOutputs().clear() - it.getOutputs().addAll(filteredOutputs) - } - } else { - // Remove outputs without deployType + ZUNIT-TESTCASEs - println("** Removing Output Records without deployType or with deployType=ZUNIT-TESTCASE...") - buildRecords.each { - def unwantedOutputs = it.getOutputs().findAll{ o -> - o.deployType == null || o.deployType == 'ZUNIT-TESTCASE' - } - it.getOutputs().removeAll(unwantedOutputs) - } - } - - def count = 0 - - // adding files and executes with outputs to Hashmap to remove redundant data - buildRecords.each{ buildRecord -> - if (buildRecord.getOutputs().size() != 0) { - buildRecord.getOutputs().each{ output -> - count++ - def (dataset, member) = getDatasetName(output.dataset) - buildOutputsMap.put(new DeployableArtifact(member, output.deployType), [dataset, buildRecord]) - } - } - } - - if ( count == 0 ) { - println("** No items to package in ${buildReportFile}.") - } else { - println("** Files detected in ${buildReportFile}.") - buildRecords.each { it.getOutputs().each { println(" ${it.dataset}, ${it.deployType}")}} - } +props.buildReportOrder.each { buildReportFile -> + println("** Read build report data from ${buildReportFile}.") + def jsonOutputFile = new File(buildReportFile) + + if(!jsonOutputFile.exists()){ + println("*! Error: Build report data at $buildReportFile not found.") + System.exit(1) + } + + def buildReport= BuildReport.parse(new FileInputStream(jsonOutputFile)) + + // Read buildInfo to obtain build information + def buildInfo = buildReport.getRecords().findAll{ + try { + it.getType()==DefaultRecordFactory.TYPE_BUILD_RESULT + } catch (Exception e){} + } + if (buildInfo.size() != 0) { + tarFileLabel = buildInfo[0].label + } + + // finds all the build outputs with a deployType + def buildRecords = buildReport.getRecords().findAll{ + try { + (it.getType()==DefaultRecordFactory.TYPE_EXECUTE || it.getType()==DefaultRecordFactory.TYPE_COPY_TO_PDS) && + !it.getOutputs().isEmpty() + } catch (Exception e){} + } + + // finds all the build outputs with a deployType + // Today the USS_RECORD type is built using an AnyTypeRecord record + // An Idea is currently opened to have an official USS_RECORD: https://ideas.ibm.com/ideas/DBB-I-43 + def ussBuildRecords = buildReport.getRecords().findAll{ + try { + it.getType()=="USS_RECORD" && !it.getAttribute("outputs").isEmpty() + } catch (Exception e){} + } + + if (props.deployTypeFilter){ + println("** Filtering Output Records on following deployTypes: ${props.deployTypeFilter}...") + buildRecords.each { + // filtered executes + def filteredOutputs = it.getOutputs().findAll{ o -> + o.deployType != null && (props.deployTypeFilter).split(',').contains(o.deployType) + } + // Manipulating the scope of build outputs + it.getOutputs().clear() + it.getOutputs().addAll(filteredOutputs) + } + ussBuildRecords.each { + ArrayList outputs = [] + it.getAttribute("outputs").split(';').collectEntries { entry -> + outputs += entry.replaceAll('\\[|\\]', '').split(',') + } + + ArrayList filteredOutputs = [] + outputs.each{ output -> + rootDir = output[0].trim() + file = output[1].trim() + deployType = output[2].trim() + if (!(props.deployTypeFilter).split(',').contains(deployType)) { + filteredOutputs += output.toString() + } + } + + def filteredOutputsStrings = String.join(";", filteredOutputs) + it.setAttribute("outputs", filteredOutputsStrings) + } + } else { + // Remove outputs without deployType + ZUNIT-TESTCASEs + println("** Removing Output Records without deployType or with deployType=ZUNIT-TESTCASE...") + buildRecords.each { + def unwantedOutputs = it.getOutputs().findAll{ o -> + o.deployType == null || o.deployType == 'ZUNIT-TESTCASE' + } + it.getOutputs().removeAll(unwantedOutputs) + } + } + + buildRecords += ussBuildRecords + + def datasetMembersCount = 0 + def zFSFilesCount = 0 + + // adding files and executes with outputs to Hashmap to remove redundant data + buildRecords.each{ buildRecord -> + if (buildRecord.getType()=="USS_RECORD") { + if (!buildRecord.getAttribute("outputs").isEmpty()) { + ArrayList outputs = [] + buildRecord.getAttribute("outputs").split(';').collectEntries { entry -> + outputs += entry.replaceAll('\\[|\\]', '').split(',') + } + zFSFilesCount += outputs.size() + outputs.each{ output -> + rootDir = output[0].trim() + file = output[1].trim() + deployType = output[2].trim() + buildOutputsMap.put(new DeployableArtifact(file, deployType), [rootDir, buildRecord]) + } + } + } else { + if (buildRecord.getOutputs().size() != 0) { + buildRecord.getOutputs().each{ output -> + datasetMembersCount++ + def (dataset, member) = getDatasetName(output.dataset) + buildOutputsMap.put(new DeployableArtifact(member, output.deployType), [dataset, buildRecord]) + } + } + } + } + + if ( datasetMembersCount + zFSFilesCount == 0 ) { + println("** No items to package in ${buildReportFile}.") + } else { + println("** Deployable files detected in $buildReportFile") + buildRecords.each { record -> + if (record.getType()=="USS_RECORD") { + if (!record.getAttribute("outputs").isEmpty()) { + ArrayList outputs = [] + record.getAttribute("outputs").split(';').collectEntries { entry -> + outputs += entry.replaceAll('\\[|\\]', '').split(',') + } + outputs.each{ output -> + rootDir = output[0].trim() + file = output[1].trim() + deployType = output[2].trim() + println(" $rootDir/$file, $deployType") + } + } + } else { + record.getOutputs().each {println(" ${it.dataset}, ${it.deployType}")} + } + } + } } if (buildOutputsMap.size() == 0) { - println("** There are no build outputs found in all provided build reports. Exiting.") - System.exit(0) + println("** There are no build outputs found in all provided build reports. Exiting.") + System.exit(0) } else { - def String tarFileName = (props.tarFileName) ? props.tarFileName : "${tarFileLabel}.tar" - - //Create a temporary directory on zFS to copy the load modules from data sets to - def tempLoadDir = new File("$props.workDir/tempPackageDir") - !tempLoadDir.exists() ?: tempLoadDir.deleteDir() - tempLoadDir.mkdirs() - - println( "*** Number of build outputs to package: ${buildOutputsMap.size()}") - - println("** Copying build outputs to temporary package directory....") - - buildOutputsMap.each { deployableArtifact, info -> - String dataset = info[0] - Record record = info[1] - - def filePath = "$tempLoadDir/$dataset" - new File(filePath).mkdirs() - - // define file name in USS - // default : member - def fileName = deployableArtifact.file - - // add deployType to file name - if (props.addExtension && props.addExtension.toBoolean()) { - fileName = fileName + '.' + deployableArtifact.deployType - } - def file = new File(filePath, fileName) - - // set copyMode based on last level qualifier - currentCopyMode = copyModeMap[dataset.replaceAll(/.*\.([^.]*)/, "\$1")] - if (currentCopyMode != null) { - if (ZFile.exists("//'$dataset(${deployableArtifact.file})'")) { - // Copy outputs to HFS - CopyToHFS copy = new CopyToHFS() - copy.setCopyMode(DBBConstants.CopyMode.valueOf(currentCopyMode)) - copy.setDataset(dataset) - - println " Copying $dataset(${deployableArtifact.file}) to $filePath/$fileName with DBB Copymode $currentCopyMode..." - copy.dataset(dataset).member(deployableArtifact.file).file(file).execute() - - // Tagging binary files - if (currentCopyMode == CopyMode.BINARY || currentCopyMode == CopyMode.LOAD) { - StringBuffer stdout = new StringBuffer() - StringBuffer stderr = new StringBuffer() - Process process = "chtag -b $file".execute() - process.waitForProcessOutput(stdout, stderr) - if (stderr){ - println ("*! stderr : $stderr") - println ("*! stdout : $stdout") - } - } - } else { - println "*! The file '$dataset(${deployableArtifact.file})' doesn't exist. Copy is skipped." - } - } else { - println "*! Copying $dataset(${deployableArtifact.file}) could not be copied due to missing mapping." - } - } - - // log buildReportOrder file and add build reports to tar file - File buildReportOrder = new File("$tempLoadDir/buildReportOrder.txt") - buildReportOrder.write('') - String logEncoding = 'UTF-8' - String buildReportFileName - int counter = 0 - - buildReportOrder.withWriter(logEncoding) { writer -> - props.buildReportOrder.each{ buildReportFile -> - counter++ - - Path buildReportFilePath = Paths.get(buildReportFile) - Path copiedBuildReportFilePath = Paths.get(tempLoadDir.getPath() + "/" + buildReportFilePath.getFileName().toString()) - - // prefixing the buildreport with sequence number when having multiple - if (props.buildReportOrder.size() > 1) - copiedBuildReportFilePath = Paths.get(tempLoadDir.getPath() + "/" + "$counter".padLeft(3, "0") + "_" + buildReportFilePath.getFileName().toString()) - - Files.copy(buildReportFilePath, copiedBuildReportFilePath, COPY_ATTRIBUTES) - writer.write("${copiedBuildReportFilePath.toString()}\n") - } - } - - Path packagingPropertiesFilePath = Paths.get(props.packagingPropertiesFile) - Path copiedPackagingPropertiesFilePath = Paths.get(tempLoadDir.getPath() + "/" + packagingPropertiesFilePath.getFileName().toString()) - Files.copy(packagingPropertiesFilePath, copiedPackagingPropertiesFilePath, COPY_ATTRIBUTES) - - def tarFile = new File("$props.workDir/${tarFileName}") - - println("** Creating tar file at ${tarFile}...") - // Note: https://www.ibm.com/docs/en/zos/2.4.0?topic=scd-tar-manipulate-tar-archive-files-copy-back-up-file - // To save all attributes to be restored on z/OS and non-z/OS systems : tar -UX - def processCmd = [ - "sh", - "-c", - "tar cUXf $tarFile *" - ] - - def rc = runProcess(processCmd, tempLoadDir) - assert rc == 0 : "Failed to package" - - //Package additional outputs to tar file. - if (props.includeLogs) (props.includeLogs).split(",").each { logPattern -> - println("** Adding $logPattern to ${tarFile}...") - processCmd = [ - "sh", - "-c", - "tar rUXf $tarFile $logPattern" - ] - - rc = runProcess(processCmd, new File(props.workDir)) - assert rc == 0 : "Failed to append $logPattern." - } - - println ("** Package successfully created at ${tarFile}.") - - if(props.verbose && props.verbose.toBoolean()) { - println ("** List package contents.") - - processCmd = [ - "sh", - "-c", - "tar tvf $tarFile" - ] - - rc = runProcess(processCmd, new File(props.workDir)) - assert rc == 0 : "Failed to list contents of tarfile $tarFile." - - } - - //Set up the artifact repository information to publish the tar file - if (props.publish && props.publish.toBoolean()){ - // Configuring artifact repositoryHelper parms - def String remotePath = (props.versionName) ? (props.versionName + "/" + tarFileName) : (tarFileLabel + "/" + tarFileName) - def url = new URI(props.get('artifactRepository.url') + "/" + props.get('artifactRepository.repo') + "/" + props.'artifactRepository.directory' + "/" + remotePath ).normalize().toString() // Normalized URL - - def apiKey = props.'artifactRepository.user' - def user = props.'artifactRepository.user' - def password = props.'artifactRepository.password' - def repo = props.get('artifactRepository.repo') as String - - //Call the artifactRepositoryHelpers to publish the tar file - def scriptDir = new File(getClass().protectionDomain.codeSource.location.path).parent - File artifactRepoHelpersFile = new File("$scriptDir/ArtifactRepositoryHelpers.groovy") - Class artifactRepositoryHelpersClass = new GroovyClassLoader(getClass().getClassLoader()).parseClass(artifactRepoHelpersFile) - GroovyObject artifactRepositoryHelpers = (GroovyObject) artifactRepositoryHelpersClass.newInstance() - - println ("** Uploading package to Artifact Repository $url...") - artifactRepositoryHelpers.upload(url, tarFile as String, user, password, props.verbose.toBoolean() ) - } + def String tarFileName = (props.tarFileName) ? props.tarFileName : "${tarFileLabel}.tar" + + //Create a temporary directory on zFS to copy the load modules from data sets to + def tempLoadDir = new File("$props.workDir/tempPackageDir") + !tempLoadDir.exists() ?: tempLoadDir.deleteDir() + tempLoadDir.mkdirs() + + println( "*** Number of build outputs to package: ${buildOutputsMap.size()}") + + println("** Copying build outputs to temporary package directory....") + + buildOutputsMap.each { deployableArtifact, info -> + String container = info[0] + Record record = info[1] + + def filePath = "" + if (record.getType()=="USS_RECORD") { + filePath = "$tempLoadDir" + } else { + filePath = "$tempLoadDir/$container" + } + + // define file name in USS + def fileName = deployableArtifact.file + + // add deployType to file name + if (props.addExtension && props.addExtension.toBoolean()) { + fileName = fileName + '.' + deployableArtifact.deployType + } + def file = new File(filePath, fileName) + + def (directory, relativeFileName) = extractDirectoryAndFile(file.toPath().toString()) + new File(directory).mkdirs() + + + if (record.getType()=="USS_RECORD") { + def originalFile = new File(container + "/" + deployableArtifact.file) + println " Copying ${originalFile.toPath()} to ${file.toPath()}..." + Files.copy(originalFile.toPath(), file.toPath(), StandardCopyOption.COPY_ATTRIBUTES); + } else { + // set copyMode based on last level qualifier + currentCopyMode = copyModeMap[container.replaceAll(/.*\.([^.]*)/, "\$1")] + if (currentCopyMode != null) { + if (ZFile.exists("//'$container(${deployableArtifact.file})'")) { + // Copy outputs to HFS + CopyToHFS copy = new CopyToHFS() + copy.setCopyMode(DBBConstants.CopyMode.valueOf(currentCopyMode)) + copy.setDataset(container) + + println " Copying $container(${deployableArtifact.file}) to $filePath/$fileName with DBB Copymode $currentCopyMode..." + copy.dataset(container).member(deployableArtifact.file).file(file).execute() + + // Tagging binary files + if (currentCopyMode == CopyMode.BINARY || currentCopyMode == CopyMode.LOAD) { + StringBuffer stdout = new StringBuffer() + StringBuffer stderr = new StringBuffer() + Process process = "chtag -b $file".execute() + process.waitForProcessOutput(stdout, stderr) + if (stderr){ + println ("*! stderr : $stderr") + println ("*! stdout : $stdout") + } + } + } else { + println "*! The file '$container(${deployableArtifact.file})' doesn't exist. Copy is skipped." + } + } else { + println "*! Copying $container(${deployableArtifact.file}) could not be copied due to missing mapping." + } + } + } + + // log buildReportOrder file and add build reports to tar file + File buildReportOrder = new File("$tempLoadDir/buildReportOrder.txt") + buildReportOrder.write('') + String logEncoding = 'UTF-8' + String buildReportFileName + int counter = 0 + + buildReportOrder.withWriter(logEncoding) { writer -> + props.buildReportOrder.each{ buildReportFile -> + counter++ + + Path buildReportFilePath = Paths.get(buildReportFile) + Path copiedBuildReportFilePath = Paths.get(tempLoadDir.getPath() + "/" + buildReportFilePath.getFileName().toString()) + + // prefixing the buildreport with sequence number when having multiple + if (props.buildReportOrder.size() > 1) + copiedBuildReportFilePath = Paths.get(tempLoadDir.getPath() + "/" + "$counter".padLeft(3, "0") + "_" + buildReportFilePath.getFileName().toString()) + + Files.copy(buildReportFilePath, copiedBuildReportFilePath, COPY_ATTRIBUTES) + writer.write("${copiedBuildReportFilePath.toString()}\n") + } + } + + Path packagingPropertiesFilePath = Paths.get(props.packagingPropertiesFile) + Path copiedPackagingPropertiesFilePath = Paths.get(tempLoadDir.getPath() + "/" + packagingPropertiesFilePath.getFileName().toString()) + Files.copy(packagingPropertiesFilePath, copiedPackagingPropertiesFilePath, COPY_ATTRIBUTES) + + def tarFile = new File("$props.workDir/${tarFileName}") + + println("** Creating tar file at ${tarFile}...") + // Note: https://www.ibm.com/docs/en/zos/2.4.0?topic=scd-tar-manipulate-tar-archive-files-copy-back-up-file + // To save all attributes to be restored on z/OS and non-z/OS systems : tar -UX + def processCmd = [ + "sh", + "-c", + "tar cUXf $tarFile *" + ] + + def rc = runProcess(processCmd, tempLoadDir) + assert rc == 0 : "Failed to package" + + //Package additional outputs to tar file. + if (props.includeLogs) (props.includeLogs).split(",").each { logPattern -> + println("** Adding $logPattern to ${tarFile}...") + processCmd = [ + "sh", + "-c", + "tar rUXf $tarFile $logPattern" + ] + + rc = runProcess(processCmd, new File(props.workDir)) + assert rc == 0 : "Failed to append $logPattern." + } + + println ("** Package successfully created at ${tarFile}.") + + if(props.verbose && props.verbose.toBoolean()) { + println ("** List package contents.") + + processCmd = [ + "sh", + "-c", + "tar tvf $tarFile" + ] + + rc = runProcess(processCmd, new File(props.workDir)) + assert rc == 0 : "Failed to list contents of tarfile $tarFile." + + } + + //Set up the artifact repository information to publish the tar file + if (props.publish && props.publish.toBoolean()){ + // Configuring artifact repositoryHelper parms + def String remotePath = (props.versionName) ? (props.versionName + "/" + tarFileName) : (tarFileLabel + "/" + tarFileName) + def url = new URI(props.get('artifactRepository.url') + "/" + props.get('artifactRepository.repo') + "/" + props.'artifactRepository.directory' + "/" + remotePath ).normalize().toString() // Normalized URL + + def apiKey = props.'artifactRepository.user' + def user = props.'artifactRepository.user' + def password = props.'artifactRepository.password' + def repo = props.get('artifactRepository.repo') as String + + //Call the artifactRepositoryHelpers to publish the tar file + def scriptDir = new File(getClass().protectionDomain.codeSource.location.path).parent + File artifactRepoHelpersFile = new File("$scriptDir/ArtifactRepositoryHelpers.groovy") + Class artifactRepositoryHelpersClass = new GroovyClassLoader(getClass().getClassLoader()).parseClass(artifactRepoHelpersFile) + GroovyObject artifactRepositoryHelpers = (GroovyObject) artifactRepositoryHelpersClass.newInstance() + + println ("** Uploading package to Artifact Repository $url...") + artifactRepositoryHelpers.upload(url, tarFile as String, user, password, props.verbose.toBoolean() ) + } } @@ -306,208 +383,221 @@ if (buildOutputsMap.size() == 0) { * @return e.g. (BLD.LOAD, PGM1) */ def getDatasetName(String fullname){ - def ds,member; - def elements = fullname.split("[\\(\\)]"); - ds = elements[0]; - member = elements.size()>1? elements[1] : ""; - return [ds, member]; + def ds,member; + def elements = fullname.split("[\\(\\)]"); + ds = elements[0]; + member = elements.size()>1? elements[1] : ""; + return [ds, member]; } +/** + * Extract the directory and the file + * from the fullname of a zFS file + * For instance: /var/test/file.txt --> [/var/test, file.txt] + */ +def extractDirectoryAndFile(String fullname) { + Path filePath = Paths.get(fullname); + String file = filePath.getFileName().toString(); + String directory = filePath.getParent(); + return [directory, file]; +} + + /** * run process */ def runProcess(ArrayList cmd, File dir){ - if (props.verbose && props.verbose.toBoolean()) println " Executing $cmd: " - StringBuffer response = new StringBuffer() - StringBuffer error = new StringBuffer() - - // execute cmd - def p = cmd.execute(null, dir) - - p.waitForProcessOutput(response, error) - if(response) println(response.toString()) - - def rc = p.exitValue(); - if(rc!=0){ - println("*! Error executing $cmd \n" + error.toString()) - //System.exit(1) - } - return rc + if (props.verbose && props.verbose.toBoolean()) println " Executing $cmd: " + StringBuffer response = new StringBuffer() + StringBuffer error = new StringBuffer() + + // execute cmd + def p = cmd.execute(null, dir) + + p.waitForProcessOutput(response, error) + if(response) println(response.toString()) + + def rc = p.exitValue(); + if(rc!=0){ + println("*! Error executing $cmd \n" + error.toString()) + //System.exit(1) + } + return rc } /** * read cliArgs */ def parseInput(String[] cliArgs){ - def cli = new CliBuilder(usage: "PackageBuildOutputs.groovy [options]") - // required packaging options - cli.w(longOpt:'workDir', args:1, argName:'dir', 'Absolute path to the DBB build output directory') - cli.properties(longOpt:'packagingPropertiesFile', args:1, argName:'packagingPropertiesFile', 'Path of a property file containing application specific packaging details.') - - // optional packaging options - cli.d(longOpt:'deployTypes', args:1, argName:'deployTypes','Comma-seperated list of deployTypes to filter on the scope of the tar file. (Optional)') - cli.t(longOpt:'tarFileName', args:1, argName:'filename', 'Name of the package tar file. (Optional unless using --buildReportOrder or --buildReportOrderFile)') - cli.il(longOpt:'includeLogs', args:1, argName:'includeLogs', 'Comma-separated list of files/patterns from the USS build workspace. (Optional)') - cli.ae(longOpt:'addExtension', 'Flag to add the deploy type extension to the member in the package tar file. (Optional)') - - // Artifact repository options :: - cli.p(longOpt:'publish', 'Flag to indicate package upload to the provided Artifact Repository server. (Optional)') - cli.v(longOpt:'versionName', args:1, argName:'versionName', 'Name of the version/package on the Artifact repository server. (Optional)') - - // Artifact repository info - cli.au(longOpt:'artifactRepositoryUrl', args:1, argName:'url', 'URL to the Artifact repository server. (Optional)') - cli.ar(longOpt:'artifactRepositoryName', args:1, argName:'repoName', 'Artifact repository name to store the build. (Optional)') - cli.ad(longOpt:'artifactRepositoryDirectory', args:1, argName:'repoDirectory', 'Directory path in the repository to store the build . (Optional)') - cli.aU(longOpt:'artifactRepositoryUser', args:1, argName:'user', 'User to connect to the Artifact repository server. (Optional)') - cli.aP(longOpt:'artifactRepositoryPassword', args:1, argName:'password', 'Password to connect to the Artifact repository server. (Optional)') - cli.aprop(longOpt:'artifactRepositoryPropertyFile', args:1, argName:'propertyFile', 'Path of a property file containing application specific artifact repository details. (Optional) ** (Deprecated)') - - // old prop option (deprecated) - cli.artifactory(longOpt:'artifact repositoryPropertiesFile', args:1, argName:'Artifactory repositoryPropertiesFile', 'Path of a property file containing application specific Artifactory repository details. (Optional) ** (Deprecated)') - cli.prop(longOpt:'propertyFile', args:1, argName:'propertyFile', 'Path of a property file containing application specific artifact repository details. (Optional) ** (Deprecated)') - - cli.verb(longOpt:'verbose', 'Flag to provide more log output. (Optional)') - - // multiple build reports - cli.boFile(longOpt:'buildReportOrderFile', args:1, argName:'buildReportOrderFile', 'A file that lists build reports in order of processing') - cli.bO(longOpt:'buildReportOrder', args:1, argName:'buildReportOrder', 'List of build reports in order of processing ') - - - cli.h(longOpt:'help', 'Prints this message') - def opts = cli.parse(cliArgs) - if (opts.h) { // if help option used, print usage and exit - cli.usage() - System.exit(2) - } - - def props = new Properties() - - // read properties file - if (opts.properties){ - def propertiesFile = new File(opts.properties) - if (propertiesFile.exists()){ - props.packagingPropertiesFile = opts.properties - propertiesFile.withInputStream { props.load(it) } - } - } else { // read default sample properties file shipped with the script - def scriptDir = new File(getClass().protectionDomain.codeSource.location.path).parent - def defaultPackagePropFile = new File("$scriptDir/packageBuildOutputs.properties") - if (defaultPackagePropFile.exists()){ - props.packagingPropertiesFile = "$scriptDir/packageBuildOutputs.properties" - defaultPackagePropFile.withInputStream { props.load(it) } - } - } - - // set command line arguments - if (opts.w) props.workDir = opts.w - if (opts.d) props.deployTypeFilter = opts.d - if (opts.t) props.tarFileName = opts.t - if (opts.il) props.includeLogs = opts.il - props.addExtension = (opts.ae) ? 'true' : 'false' - - props.verbose = (opts.verb) ? 'true' : 'false' - - // Optional Artifact repository info to deploy package - - if (opts.v) props.versionName = opts.v - props.publish = (opts.p) ? 'true' : 'false' - - // read of artifact repository file - if (opts.aprop){ - def propertyFile = new File(opts.aprop) - if (propertyFile.exists()){ - propertyFile.withInputStream { props.load(it) } - } - } - - // read the artifact repo cli options, which take precedence over - // the properties file - - if (opts.aU) props.'artifactRepository.user' = opts.aU - if (opts.aP) props.'artifactRepository.password' = opts.aP - if (opts.au) props.'artifactRepository.url' = opts.au - if (opts.ar) props.'artifactRepository.repo' = opts.ar - if (opts.ad) props.'artifactRepository.directory' = opts.ad - - - - // ** Deprecated ** Optional artifact repository properties - // properties require to follow new naming conventions artifactRepository.* - if (opts.artifactory){ - def propertyFile = new File(opts.artifactory) - if (propertyFile.exists()){ - propertyFile.withInputStream { props.load(it) } - } - } - - // ** Deprecated ** Read of artifact repository properties - if (opts.prop){ - def propertyFile = new File(opts.prop) - if (propertyFile.exists()){ - propertyFile.withInputStream { props.load(it) } - } - } - - //add any build reports from the file first, then add any from a CLI after. - //if no file or CLI, go to default build report - def buildReports = [] - if (opts.boFile) { - new File (opts.boFile).eachLine { line -> - buildReports.add(line) - } - - if(opts.t == false) { - println("*! Error: tarFilename is only optional when no build report order is specified") - System.exit(3) - } - - } - if (opts.bO) { - opts.bO.split(',').each{ - buildReports.add(it) - } - if(opts.t == false) { - println("*! Error: tarFilename is only optional when no build report order is specified") - System.exit(3) - } - } else if (buildReports.isEmpty()){ - buildReports = [props.workDir + "/BuildReport.json"] - } - props.buildReportOrder = buildReports - - - - // validate required props - try { - assert props.workDir : "Missing property build work directory" - assert props.copyModeMap : "Missing property package.copyModeMap" - - if (props.publish && props.publish.toBoolean()){ - assert props.get("artifactRepository.url") : "Missing artifact repository URL" - assert props.get("artifactRepository.repo") : "Missing artifact repository name" - assert props.get("artifactRepository.user") : "Missing artifact repository Username" - assert props.get("artifactRepository.password") : "Missing artifact repository Password" - } - - } catch (AssertionError e) { - cli.usage() - throw e - } - return props + def cli = new CliBuilder(usage: "PackageBuildOutputs.groovy [options]") + // required packaging options + cli.w(longOpt:'workDir', args:1, argName:'dir', 'Absolute path to the DBB build output directory') + cli.properties(longOpt:'packagingPropertiesFile', args:1, argName:'packagingPropertiesFile', 'Path of a property file containing application specific packaging details.') + + // optional packaging options + cli.d(longOpt:'deployTypes', args:1, argName:'deployTypes','Comma-seperated list of deployTypes to filter on the scope of the tar file. (Optional)') + cli.t(longOpt:'tarFileName', args:1, argName:'filename', 'Name of the package tar file. (Optional unless using --buildReportOrder or --buildReportOrderFile)') + cli.il(longOpt:'includeLogs', args:1, argName:'includeLogs', 'Comma-separated list of files/patterns from the USS build workspace. (Optional)') + cli.ae(longOpt:'addExtension', 'Flag to add the deploy type extension to the member in the package tar file. (Optional)') + + // Artifact repository options :: + cli.p(longOpt:'publish', 'Flag to indicate package upload to the provided Artifact Repository server. (Optional)') + cli.v(longOpt:'versionName', args:1, argName:'versionName', 'Name of the version/package on the Artifact repository server. (Optional)') + + // Artifact repository info + cli.au(longOpt:'artifactRepositoryUrl', args:1, argName:'url', 'URL to the Artifact repository server. (Optional)') + cli.ar(longOpt:'artifactRepositoryName', args:1, argName:'repoName', 'Artifact repository name to store the build. (Optional)') + cli.ad(longOpt:'artifactRepositoryDirectory', args:1, argName:'repoDirectory', 'Directory path in the repository to store the build . (Optional)') + cli.aU(longOpt:'artifactRepositoryUser', args:1, argName:'user', 'User to connect to the Artifact repository server. (Optional)') + cli.aP(longOpt:'artifactRepositoryPassword', args:1, argName:'password', 'Password to connect to the Artifact repository server. (Optional)') + cli.aprop(longOpt:'artifactRepositoryPropertyFile', args:1, argName:'propertyFile', 'Path of a property file containing application specific artifact repository details. (Optional) ** (Deprecated)') + + // old prop option (deprecated) + cli.artifactory(longOpt:'artifact repositoryPropertiesFile', args:1, argName:'Artifactory repositoryPropertiesFile', 'Path of a property file containing application specific Artifactory repository details. (Optional) ** (Deprecated)') + cli.prop(longOpt:'propertyFile', args:1, argName:'propertyFile', 'Path of a property file containing application specific artifact repository details. (Optional) ** (Deprecated)') + + cli.verb(longOpt:'verbose', 'Flag to provide more log output. (Optional)') + + // multiple build reports + cli.boFile(longOpt:'buildReportOrderFile', args:1, argName:'buildReportOrderFile', 'A file that lists build reports in order of processing') + cli.bO(longOpt:'buildReportOrder', args:1, argName:'buildReportOrder', 'List of build reports in order of processing ') + + + cli.h(longOpt:'help', 'Prints this message') + def opts = cli.parse(cliArgs) + if (opts.h) { // if help option used, print usage and exit + cli.usage() + System.exit(2) + } + + def props = new Properties() + + // read properties file + if (opts.properties){ + def propertiesFile = new File(opts.properties) + if (propertiesFile.exists()){ + props.packagingPropertiesFile = opts.properties + propertiesFile.withInputStream { props.load(it) } + } + } else { // read default sample properties file shipped with the script + def scriptDir = new File(getClass().protectionDomain.codeSource.location.path).parent + def defaultPackagePropFile = new File("$scriptDir/packageBuildOutputs.properties") + if (defaultPackagePropFile.exists()){ + props.packagingPropertiesFile = "$scriptDir/packageBuildOutputs.properties" + defaultPackagePropFile.withInputStream { props.load(it) } + } + } + + // set command line arguments + if (opts.w) props.workDir = opts.w + if (opts.d) props.deployTypeFilter = opts.d + if (opts.t) props.tarFileName = opts.t + if (opts.il) props.includeLogs = opts.il + props.addExtension = (opts.ae) ? 'true' : 'false' + + props.verbose = (opts.verb) ? 'true' : 'false' + + // Optional Artifact repository info to deploy package + + if (opts.v) props.versionName = opts.v + props.publish = (opts.p) ? 'true' : 'false' + + // read of artifact repository file + if (opts.aprop){ + def propertyFile = new File(opts.aprop) + if (propertyFile.exists()){ + propertyFile.withInputStream { props.load(it) } + } + } + + // read the artifact repo cli options, which take precedence over + // the properties file + + if (opts.aU) props.'artifactRepository.user' = opts.aU + if (opts.aP) props.'artifactRepository.password' = opts.aP + if (opts.au) props.'artifactRepository.url' = opts.au + if (opts.ar) props.'artifactRepository.repo' = opts.ar + if (opts.ad) props.'artifactRepository.directory' = opts.ad + + + + // ** Deprecated ** Optional artifact repository properties + // properties require to follow new naming conventions artifactRepository.* + if (opts.artifactory){ + def propertyFile = new File(opts.artifactory) + if (propertyFile.exists()){ + propertyFile.withInputStream { props.load(it) } + } + } + + // ** Deprecated ** Read of artifact repository properties + if (opts.prop){ + def propertyFile = new File(opts.prop) + if (propertyFile.exists()){ + propertyFile.withInputStream { props.load(it) } + } + } + + //add any build reports from the file first, then add any from a CLI after. + //if no file or CLI, go to default build report + def buildReports = [] + if (opts.boFile) { + new File (opts.boFile).eachLine { line -> + buildReports.add(line) + } + + if(opts.t == false) { + println("*! Error: tarFilename is only optional when no build report order is specified") + System.exit(3) + } + + } + if (opts.bO) { + opts.bO.split(',').each{ + buildReports.add(it) + } + if(opts.t == false) { + println("*! Error: tarFilename is only optional when no build report order is specified") + System.exit(3) + } + } else if (buildReports.isEmpty()){ + buildReports = [props.workDir + "/BuildReport.json"] + } + props.buildReportOrder = buildReports + + + + // validate required props + try { + assert props.workDir : "Missing property build work directory" + assert props.copyModeMap : "Missing property package.copyModeMap" + + if (props.publish && props.publish.toBoolean()){ + assert props.get("artifactRepository.url") : "Missing artifact repository URL" + assert props.get("artifactRepository.repo") : "Missing artifact repository name" + assert props.get("artifactRepository.user") : "Missing artifact repository Username" + assert props.get("artifactRepository.password") : "Missing artifact repository Password" + } + + } catch (AssertionError e) { + cli.usage() + throw e + } + return props } /* * relativizePath - converts an absolute path to a relative path from the workspace directory */ def relativizePath(String path) { - if (!path.startsWith('/')) - return path - String relPath = new File(props.workDir).toURI().relativize(new File(path.trim()).toURI()).getPath() - // Directories have '/' added to the end. Lets remove it. - if (relPath.endsWith('/')) - relPath = relPath.take(relPath.length()-1) - return relPath + if (!path.startsWith('/')) + return path + String relPath = new File(props.workDir).toURI().relativize(new File(path.trim()).toURI()).getPath() + // Directories have '/' added to the end. Lets remove it. + if (relPath.endsWith('/')) + relPath = relPath.take(relPath.length()-1) + return relPath } /* @@ -516,35 +606,35 @@ def relativizePath(String path) { * and a deployType. Instances of this class are used in the main Map object to represent unique artifacts. */ class DeployableArtifact { - private final String file; - private final String deployType; - - DeployableArtifact(String file, String deployType) { - this.file = file; - this.deployType = deployType; - } - - @Override - public int hashCode() { - String concatenation = file + "." + deployType; - return concatenation.hashCode(); - } - - public boolean equals(DeployableArtifact other) { - return other.file.equals(file) & other.deployType.equals(deployType); - } - - @Override - public boolean equals(Object other) { - if (other instanceof DeployableArtifact) { - return equals((DeployableArtifact) other) - } else { - return false; - } - } - - @Override - public String toString() { - return file + "." + deployType; - } + private final String file; + private final String deployType; + + DeployableArtifact(String file, String deployType) { + this.file = file; + this.deployType = deployType; + } + + @Override + public int hashCode() { + String concatenation = file + "." + deployType; + return concatenation.hashCode(); + } + + public boolean equals(DeployableArtifact other) { + return other.file.equals(file) & other.deployType.equals(deployType); + } + + @Override + public boolean equals(Object other) { + if (other instanceof DeployableArtifact) { + return equals((DeployableArtifact) other) + } else { + return false; + } + } + + @Override + public String toString() { + return file + "." + deployType; + } } \ No newline at end of file diff --git a/Pipeline/PackageBuildOutputs/README.md b/Pipeline/PackageBuildOutputs/README.md index 0c038e61..dce90ad3 100644 --- a/Pipeline/PackageBuildOutputs/README.md +++ b/Pipeline/PackageBuildOutputs/README.md @@ -11,6 +11,9 @@ This sample Groovy script to package build outputs: - Extracts information about the build outputs from the Dependency Based Build (DBB) `BuildReport.json`. The script is able to take a single DBB build report or multiple build reports to build a cumulative package across multiple incremental builds. - Copies outputs to a temporary directory on Unix System Services and creates a tar file based on the temporary directory. + +The support for zFS files in the packaging process is performed through the use of an USS_RECORD type record in the DBB BuildReport. + ## Package Build Outputs Process - High-level Processing Flow This section provides a more detailed explanation of how the PackageBuildOutputs script works and what it does.