diff --git a/docs/contributing/Target Framework Strategy.md b/docs/contributing/Target Framework Strategy.md index a6545072f1920..7395188691ce2 100644 --- a/docs/contributing/Target Framework Strategy.md +++ b/docs/contributing/Target Framework Strategy.md @@ -26,6 +26,7 @@ Projects in our repository should include the following values in `` setting. Instead our repo uses the above values and when inside source build or VMR our properties are initialized with arcade properties. @@ -58,7 +59,7 @@ This problem primarily comes from our use of polyfill APIs. To avoid this we emp This comes up in two forms: -### Pattern for types +### Pattern for types When creating a polyfill for a type use the `#if !NET...` to declare the type and in the `#else` use a `TypeForwardedTo` for the actual type. @@ -99,6 +100,13 @@ When creating a polyfill for an extension use the `#if NET...` to declare the ex #endif ``` +## Transitioning to new .NET SDK +As the .NET team approaches releasing a new .NET SDK the Roslyn team will begin using preview versions of that SDK in our build. This will often lead to test failures in our CI system due to underlying behavior changes in the runtime. These failures will often not show up when running in Visual Studio due to the way the runtime for the test runner is chosen. +To ensure we have a simple developer environment such project should be moved to to the `$(NetRoslynNext)` target framework. That ensures the new runtime is loaded when running tests locally. + +When the .NET SDK RTMs and Roslyn adopts it all occurrences of `$(NetRoslynNext)` will be moved to simply `$(NetRoslyn)`. + +**DO NOT** include both `$(NetRoslyn)` and `$(NetRoslynNext)` in the same project unless there is a very specific reason that both tests are adding value. The most common case is that the runtime has changed behavior and we simply need to update our baselines to match it. Adding extra TFMs for this just increases test time for very little gain. diff --git a/eng/SourceBuildPrebuiltBaseline.xml b/eng/SourceBuildPrebuiltBaseline.xml index 592d3954059e8..8451ebe80c273 100644 --- a/eng/SourceBuildPrebuiltBaseline.xml +++ b/eng/SourceBuildPrebuiltBaseline.xml @@ -13,13 +13,15 @@ - + - + diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 4963ae650436c..369ff10342899 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -110,14 +110,14 @@ - + https://github.com/dotnet/arcade - 67d23f4ba1813b315e7e33c71d18b63475f5c5f8 + 15eea424d3b2dd25a5c0b10e8adc8aeed50129a1 - + https://github.com/dotnet/arcade - 67d23f4ba1813b315e7e33c71d18b63475f5c5f8 + 15eea424d3b2dd25a5c0b10e8adc8aeed50129a1 @@ -144,9 +144,9 @@ https://github.com/dotnet/roslyn 5d10d428050c0d6afef30a072c4ae68776621877 - + https://github.com/dotnet/arcade - 67d23f4ba1813b315e7e33c71d18b63475f5c5f8 + 15eea424d3b2dd25a5c0b10e8adc8aeed50129a1 https://github.com/dotnet/roslyn-analyzers diff --git a/eng/common/build.cmd b/eng/common/build.cmd new file mode 100644 index 0000000000000..99daf368abae7 --- /dev/null +++ b/eng/common/build.cmd @@ -0,0 +1,3 @@ +@echo off +powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0build.ps1""" %*" +exit /b %ErrorLevel% diff --git a/eng/common/build.ps1 b/eng/common/build.ps1 index 33a6f2d0e2481..438f9920c43e4 100644 --- a/eng/common/build.ps1 +++ b/eng/common/build.ps1 @@ -19,6 +19,7 @@ Param( [switch] $pack, [switch] $publish, [switch] $clean, + [switch][Alias('pb')]$productBuild, [switch][Alias('bl')]$binaryLog, [switch][Alias('nobl')]$excludeCIBinarylog, [switch] $ci, @@ -58,6 +59,7 @@ function Print-Usage() { Write-Host " -sign Sign build outputs" Write-Host " -publish Publish artifacts (e.g. symbols)" Write-Host " -clean Clean the solution" + Write-Host " -productBuild Build the solution in the way it will be built in the full .NET product (VMR) build (short: -pb)" Write-Host "" Write-Host "Advanced settings:" @@ -120,6 +122,7 @@ function Build { /p:Deploy=$deploy ` /p:Test=$test ` /p:Pack=$pack ` + /p:DotNetBuildRepo=$productBuild ` /p:IntegrationTest=$integrationTest ` /p:PerformanceTest=$performanceTest ` /p:Sign=$sign ` diff --git a/eng/common/build.sh b/eng/common/build.sh index 50af40cdd2ce6..ac1ee8620cd2a 100755 --- a/eng/common/build.sh +++ b/eng/common/build.sh @@ -22,6 +22,9 @@ usage() echo " --sourceBuild Source-build the solution (short: -sb)" echo " Will additionally trigger the following actions: --restore, --build, --pack" echo " If --configuration is not set explicitly, will also set it to 'Release'" + echo " --productBuild Build the solution in the way it will be built in the full .NET product (VMR) build (short: -pb)" + echo " Will additionally trigger the following actions: --restore, --build, --pack" + echo " If --configuration is not set explicitly, will also set it to 'Release'" echo " --rebuild Rebuild solution" echo " --test Run all unit tests in the solution (short: -t)" echo " --integrationTest Run all integration tests in the solution" @@ -59,6 +62,7 @@ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" restore=false build=false source_build=false +product_build=false rebuild=false test=false integration_test=false @@ -105,7 +109,7 @@ while [[ $# > 0 ]]; do -binarylog|-bl) binary_log=true ;; - -excludeCIBinarylog|-nobl) + -excludecibinarylog|-nobl) exclude_ci_binary_log=true ;; -pipelineslog|-pl) @@ -126,6 +130,13 @@ while [[ $# > 0 ]]; do -sourcebuild|-sb) build=true source_build=true + product_build=true + restore=true + pack=true + ;; + -productBuild|-pb) + build=true + product_build=true restore=true pack=true ;; @@ -219,7 +230,9 @@ function Build { /p:RepoRoot="$repo_root" \ /p:Restore=$restore \ /p:Build=$build \ + /p:DotNetBuildRepo=$product_build \ /p:ArcadeBuildFromSource=$source_build \ + /p:DotNetBuildSourceOnly=$source_build \ /p:Rebuild=$rebuild \ /p:Test=$test \ /p:Pack=$pack \ diff --git a/eng/common/core-templates/job/job.yml b/eng/common/core-templates/job/job.yml new file mode 100644 index 0000000000000..dc3bd560a50e2 --- /dev/null +++ b/eng/common/core-templates/job/job.yml @@ -0,0 +1,266 @@ +parameters: +# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + cancelTimeoutInMinutes: '' + condition: '' + container: '' + continueOnError: false + dependsOn: '' + displayName: '' + pool: '' + steps: [] + strategy: '' + timeoutInMinutes: '' + variables: [] + workspace: '' + templateContext: {} + +# Job base template specific parameters + # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md + # publishing defaults + artifacts: '' + enableMicrobuild: false + enablePublishBuildArtifacts: false + enablePublishBuildAssets: false + enablePublishTestResults: false + enablePublishUsingPipelines: false + enableBuildRetry: false + disableComponentGovernance: '' + componentGovernanceIgnoreDirectories: '' + mergeTestResults: false + testRunTitle: '' + testResultsFormat: '' + name: '' + preSteps: [] + artifactPublishSteps: [] + runAsPublic: false + +# Sbom related params + enableSbom: true + PackageVersion: 9.0.0 + BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + +# 1es specific parameters + is1ESPipeline: '' + +jobs: +- job: ${{ parameters.name }} + + ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}: + cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }} + + ${{ if ne(parameters.condition, '') }}: + condition: ${{ parameters.condition }} + + ${{ if ne(parameters.container, '') }}: + container: ${{ parameters.container }} + + ${{ if ne(parameters.continueOnError, '') }}: + continueOnError: ${{ parameters.continueOnError }} + + ${{ if ne(parameters.dependsOn, '') }}: + dependsOn: ${{ parameters.dependsOn }} + + ${{ if ne(parameters.displayName, '') }}: + displayName: ${{ parameters.displayName }} + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + + ${{ if ne(parameters.strategy, '') }}: + strategy: ${{ parameters.strategy }} + + ${{ if ne(parameters.timeoutInMinutes, '') }}: + timeoutInMinutes: ${{ parameters.timeoutInMinutes }} + + ${{ if ne(parameters.templateContext, '') }}: + templateContext: ${{ parameters.templateContext }} + + variables: + - ${{ if ne(parameters.enableTelemetry, 'false') }}: + - name: DOTNET_CLI_TELEMETRY_PROFILE + value: '$(Build.Repository.Uri)' + - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}: + - name: EnableRichCodeNavigation + value: 'true' + # Retry signature validation up to three times, waiting 2 seconds between attempts. + # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures + - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY + value: 3,2000 + - ${{ each variable in parameters.variables }}: + # handle name-value variable syntax + # example: + # - name: [key] + # value: [value] + - ${{ if ne(variable.name, '') }}: + - name: ${{ variable.name }} + value: ${{ variable.value }} + + # handle variable groups + - ${{ if ne(variable.group, '') }}: + - group: ${{ variable.group }} + + # handle template variable syntax + # example: + # - template: path/to/template.yml + # parameters: + # [key]: [value] + - ${{ if ne(variable.template, '') }}: + - template: ${{ variable.template }} + ${{ if ne(variable.parameters, '') }}: + parameters: ${{ variable.parameters }} + + # handle key-value variable syntax. + # example: + # - [key]: [value] + - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}: + - ${{ each pair in variable }}: + - name: ${{ pair.key }} + value: ${{ pair.value }} + + # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds + - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - group: DotNet-HelixApi-Access + + ${{ if ne(parameters.workspace, '') }}: + workspace: ${{ parameters.workspace }} + + steps: + - ${{ if eq(parameters.is1ESPipeline, '') }}: + - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error + + - ${{ if ne(parameters.preSteps, '') }}: + - ${{ each preStep in parameters.preSteps }}: + - ${{ preStep }} + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + - task: MicroBuildSigningPlugin@4 + displayName: Install MicroBuild plugin + inputs: + signType: $(_SignType) + zipSources: false + feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json + env: + TeamName: $(_TeamName) + MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)' + continueOnError: ${{ parameters.continueOnError }} + condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) + + - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}: + - task: NuGetAuthenticate@1 + + - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}: + - task: DownloadPipelineArtifact@2 + inputs: + buildType: current + artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }} + targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }} + itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }} + + - ${{ each step in parameters.steps }}: + - ${{ step }} + + - ${{ if eq(parameters.enableRichCodeNavigation, true) }}: + - task: RichCodeNavIndexer@0 + displayName: RichCodeNav Upload + inputs: + languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }} + environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }} + richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin + uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }} + continueOnError: true + + - template: /eng/common/core-templates/steps/component-governance.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + ${{ if eq(parameters.disableComponentGovernance, '') }}: + ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}: + disableComponentGovernance: false + ${{ else }}: + disableComponentGovernance: true + ${{ else }}: + disableComponentGovernance: ${{ parameters.disableComponentGovernance }} + componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} + + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - task: MicroBuildCleanup@1 + displayName: Execute Microbuild cleanup tasks + condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} + env: + TeamName: $(_TeamName) + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}: + - template: /eng/common/core-templates/steps/generate-sbom.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + PackageVersion: ${{ parameters.packageVersion}} + BuildDropPath: ${{ parameters.buildDropPath }} + IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} + publishArtifacts: false + + # Publish test results + - ${{ if and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')) }}: + - ${{ if eq(parameters.testResultsFormat, 'xunit') }}: + - task: PublishTestResults@2 + displayName: Publish XUnit Test Results + inputs: + testResultsFormat: 'xUnit' + testResultsFiles: '*.xml' + searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' + testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit + mergeTestResults: ${{ parameters.mergeTestResults }} + continueOnError: true + condition: always() + - ${{ if eq(parameters.testResultsFormat, 'vstest') }}: + - task: PublishTestResults@2 + displayName: Publish TRX Test Results + inputs: + testResultsFormat: 'VSTest' + testResultsFiles: '*.trx' + searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' + testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx + mergeTestResults: ${{ parameters.mergeTestResults }} + continueOnError: true + condition: always() + + # gather artifacts + - ${{ if ne(parameters.artifacts.publish, '') }}: + - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: + - task: CopyFiles@2 + displayName: Gather binaries for publish to artifacts + inputs: + SourceFolder: 'artifacts/bin' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin' + - task: CopyFiles@2 + displayName: Gather packages for publish to artifacts + inputs: + SourceFolder: 'artifacts/packages' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages' + - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: + - task: CopyFiles@2 + displayName: Gather logs for publish to artifacts + inputs: + SourceFolder: 'artifacts/log' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log' + + - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: + - task: CopyFiles@2 + displayName: Gather logs for publish to artifacts + inputs: + SourceFolder: 'artifacts/log/$(_BuildConfig)' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)' + - ${{ if eq(parameters.enableBuildRetry, 'true') }}: + - task: CopyFiles@2 + displayName: Gather buildconfiguration for build retry + inputs: + SourceFolder: '$(Build.SourcesDirectory)/eng/common/BuildConfiguration' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/eng/common/BuildConfiguration' + + - ${{ each step in parameters.artifactPublishSteps }}: + - ${{ step }} diff --git a/eng/common/core-templates/job/onelocbuild.yml b/eng/common/core-templates/job/onelocbuild.yml new file mode 100644 index 0000000000000..00feec8ebbc3a --- /dev/null +++ b/eng/common/core-templates/job/onelocbuild.yml @@ -0,0 +1,121 @@ +parameters: + # Optional: dependencies of the job + dependsOn: '' + + # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool + pool: '' + + CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex + GithubPat: $(BotAccount-dotnet-bot-repo-PAT) + + SourcesDirectory: $(Build.SourcesDirectory) + CreatePr: true + AutoCompletePr: false + ReusePr: true + UseLfLineEndings: true + UseCheckedInLocProjectJson: false + SkipLocProjectJsonGeneration: false + LanguageSet: VS_Main_Languages + LclSource: lclFilesInRepo + LclPackageId: '' + RepoType: gitHub + GitHubOrg: dotnet + MirrorRepo: '' + MirrorBranch: main + condition: '' + JobNameSuffix: '' + is1ESPipeline: '' +jobs: +- job: OneLocBuild${{ parameters.JobNameSuffix }} + + dependsOn: ${{ parameters.dependsOn }} + + displayName: OneLocBuild${{ parameters.JobNameSuffix }} + + variables: + - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat + - name: _GenerateLocProjectArguments + value: -SourcesDirectory ${{ parameters.SourcesDirectory }} + -LanguageSet "${{ parameters.LanguageSet }}" + -CreateNeutralXlfs + - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}: + - name: _GenerateLocProjectArguments + value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + ${{ if eq(parameters.pool, '') }}: + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + + steps: + - ${{ if eq(parameters.is1ESPipeline, '') }}: + - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error + + - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}: + - task: Powershell@2 + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1 + arguments: $(_GenerateLocProjectArguments) + displayName: Generate LocProject.json + condition: ${{ parameters.condition }} + + - task: OneLocBuild@2 + displayName: OneLocBuild + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + inputs: + locProj: eng/Localize/LocProject.json + outDir: $(Build.ArtifactStagingDirectory) + lclSource: ${{ parameters.LclSource }} + lclPackageId: ${{ parameters.LclPackageId }} + isCreatePrSelected: ${{ parameters.CreatePr }} + isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }} + ${{ if eq(parameters.CreatePr, true) }}: + isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }} + ${{ if eq(parameters.RepoType, 'gitHub') }}: + isShouldReusePrSelected: ${{ parameters.ReusePr }} + packageSourceAuth: patAuth + patVariable: ${{ parameters.CeapexPat }} + ${{ if eq(parameters.RepoType, 'gitHub') }}: + repoType: ${{ parameters.RepoType }} + gitHubPatVariable: "${{ parameters.GithubPat }}" + ${{ if ne(parameters.MirrorRepo, '') }}: + isMirrorRepoSelected: true + gitHubOrganization: ${{ parameters.GitHubOrg }} + mirrorRepo: ${{ parameters.MirrorRepo }} + mirrorBranch: ${{ parameters.MirrorBranch }} + condition: ${{ parameters.condition }} + + - template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + displayName: Publish Localization Files + pathToPublish: '$(Build.ArtifactStagingDirectory)/loc' + publishLocation: Container + artifactName: Loc + condition: ${{ parameters.condition }} + + - template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + displayName: Publish LocProject.json + pathToPublish: '$(Build.SourcesDirectory)/eng/Localize/' + publishLocation: Container + artifactName: Loc + condition: ${{ parameters.condition }} \ No newline at end of file diff --git a/eng/common/core-templates/job/publish-build-assets.yml b/eng/common/core-templates/job/publish-build-assets.yml new file mode 100644 index 0000000000000..8fe9299542c53 --- /dev/null +++ b/eng/common/core-templates/job/publish-build-assets.yml @@ -0,0 +1,172 @@ +parameters: + configuration: 'Debug' + + # Optional: condition for the job to run + condition: '' + + # Optional: 'true' if future jobs should run even if this job fails + continueOnError: false + + # Optional: dependencies of the job + dependsOn: '' + + # Optional: Include PublishBuildArtifacts task + enablePublishBuildArtifacts: false + + # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool + pool: {} + + # Optional: should run as a public build even in the internal project + # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. + runAsPublic: false + + # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing + publishUsingPipelines: false + + # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing + publishAssetsImmediately: false + + artifactsPublishingAdditionalParameters: '' + + signingValidationAdditionalParameters: '' + + is1ESPipeline: '' + +jobs: +- job: Asset_Registry_Publish + + dependsOn: ${{ parameters.dependsOn }} + timeoutInMinutes: 150 + + ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + displayName: Publish Assets + ${{ else }}: + displayName: Publish to Build Asset Registry + + variables: + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - group: Publish-Build-Assets + - group: AzureDevOps-Artifact-Feeds-Pats + - name: runCodesignValidationInjection + value: false + # unconditional - needed for logs publishing (redactor tool version) + - template: /eng/common/core-templates/post-build/common-variables.yml + + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: + name: NetCore1ESPool-Publishing-Internal + image: windows.vs2019.amd64 + os: windows + steps: + - ${{ if eq(parameters.is1ESPipeline, '') }}: + - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - checkout: self + fetchDepth: 3 + clean: true + + - task: DownloadBuildArtifacts@0 + displayName: Download artifact + inputs: + artifactName: AssetManifests + downloadPath: '$(Build.StagingDirectory)/Download' + checkDownloadedFiles: true + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + + - task: NuGetAuthenticate@1 + + - task: PowerShell@2 + displayName: Publish Build Assets + inputs: + filePath: eng\common\sdk-task.ps1 + arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet + /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests' + /p:BuildAssetRegistryToken=$(MaestroAccessToken) + /p:MaestroApiEndpoint=https://maestro.dot.net + /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }} + /p:OfficialBuildId=$(Build.BuildNumber) + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + + - task: powershell@2 + displayName: Create ReleaseConfigs Artifact + inputs: + targetType: inline + script: | + New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force + $filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt" + Add-Content -Path $filePath -Value $(BARBuildId) + Add-Content -Path $filePath -Value "$(DefaultChannels)" + Add-Content -Path $filePath -Value $(IsStableBuild) + + - template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + displayName: Publish ReleaseConfigs Artifact + pathToPublish: '$(Build.StagingDirectory)/ReleaseConfigs' + publishLocation: Container + artifactName: ReleaseConfigs + + - task: powershell@2 + displayName: Check if SymbolPublishingExclusionsFile.txt exists + inputs: + targetType: inline + script: | + $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt" + if(Test-Path -Path $symbolExclusionfile) + { + Write-Host "SymbolExclusionFile exists" + Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true" + } + else{ + Write-Host "Symbols Exclusion file does not exist" + Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false" + } + + - template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + displayName: Publish SymbolPublishingExclusionsFile Artifact + condition: eq(variables['SymbolExclusionFile'], 'true') + pathToPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt' + publishLocation: Container + artifactName: ReleaseConfigs + + - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: PowerShell@2 + displayName: Publish Using Darc + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 + arguments: -BuildId $(BARBuildId) + -PublishingInfraVersion 3 + -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' + -MaestroToken '$(MaestroApiAccessToken)' + -WaitPublishingFinish true + -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' + -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' + + - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: + - template: /eng/common/core-templates/steps/publish-logs.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + JobLabel: 'Publish_Artifacts_Logs' diff --git a/eng/common/core-templates/job/source-build.yml b/eng/common/core-templates/job/source-build.yml new file mode 100644 index 0000000000000..c0ce4b3c86186 --- /dev/null +++ b/eng/common/core-templates/job/source-build.yml @@ -0,0 +1,80 @@ +parameters: + # This template adds arcade-powered source-build to CI. The template produces a server job with a + # default ID 'Source_Build_Complete' to put in a dependency list if necessary. + + # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed. + jobNamePrefix: 'Source_Build' + + # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for + # managed-only repositories. This is an object with these properties: + # + # name: '' + # The name of the job. This is included in the job ID. + # targetRID: '' + # The name of the target RID to use, instead of the one auto-detected by Arcade. + # nonPortable: false + # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than + # linux-x64), and compiling against distro-provided packages rather than portable ones. + # skipPublishValidation: false + # Disables publishing validation. By default, a check is performed to ensure no packages are + # published by source-build. + # container: '' + # A container to use. Runs in docker. + # pool: {} + # A pool to use. Runs directly on an agent. + # buildScript: '' + # Specifies the build script to invoke to perform the build in the repo. The default + # './build.sh' should work for typical Arcade repositories, but this is customizable for + # difficult situations. + # jobProperties: {} + # A list of job properties to inject at the top level, for potential extensibility beyond + # container and pool. + platform: {} + + is1ESPipeline: '' + +jobs: +- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }} + displayName: Source-Build (${{ parameters.platform.name }}) + + ${{ each property in parameters.platform.jobProperties }}: + ${{ property.key }}: ${{ property.value }} + + ${{ if ne(parameters.platform.container, '') }}: + container: ${{ parameters.platform.container }} + + ${{ if eq(parameters.platform.pool, '') }}: + # The default VM host AzDO pool. This should be capable of running Docker containers: almost all + # source-build builds run in Docker, including the default managed platform. + # /eng/common/core-templates/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic + ${{ if eq(parameters.is1ESPipeline, 'true') }}: + pool: + ${{ if eq(variables['System.TeamProject'], 'public') }}: + name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')] + demands: ImageOverride -equals build.ubuntu.2004.amd64 + ${{ if eq(variables['System.TeamProject'], 'internal') }}: + name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')] + image: 1es-mariner-2 + os: linux + ${{ else }}: + pool: + ${{ if eq(variables['System.TeamProject'], 'public') }}: + name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')] + demands: ImageOverride -equals Build.Ubuntu.2204.Amd64.Open + ${{ if eq(variables['System.TeamProject'], 'internal') }}: + name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')] + demands: ImageOverride -equals Build.Ubuntu.2204.Amd64 + ${{ if ne(parameters.platform.pool, '') }}: + pool: ${{ parameters.platform.pool }} + + workspace: + clean: all + + steps: + - ${{ if eq(parameters.is1ESPipeline, '') }}: + - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error + + - template: /eng/common/core-templates/steps/source-build.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + platform: ${{ parameters.platform }} diff --git a/eng/common/core-templates/job/source-index-stage1.yml b/eng/common/core-templates/job/source-index-stage1.yml new file mode 100644 index 0000000000000..f1938eec10203 --- /dev/null +++ b/eng/common/core-templates/job/source-index-stage1.yml @@ -0,0 +1,91 @@ +parameters: + runAsPublic: false + sourceIndexUploadPackageVersion: 2.0.0-20240502.12 + sourceIndexProcessBinlogPackageVersion: 1.0.1-20240129.2 + sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json + sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci" + preSteps: [] + binlogPath: artifacts/log/Debug/Build.binlog + condition: '' + dependsOn: '' + pool: '' + is1ESPipeline: '' + +jobs: +- job: SourceIndexStage1 + dependsOn: ${{ parameters.dependsOn }} + condition: ${{ parameters.condition }} + variables: + - name: SourceIndexUploadPackageVersion + value: ${{ parameters.sourceIndexUploadPackageVersion }} + - name: SourceIndexProcessBinlogPackageVersion + value: ${{ parameters.sourceIndexProcessBinlogPackageVersion }} + - name: SourceIndexPackageSource + value: ${{ parameters.sourceIndexPackageSource }} + - name: BinlogPath + value: ${{ parameters.binlogPath }} + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + ${{ if eq(parameters.pool, '') }}: + pool: + ${{ if eq(variables['System.TeamProject'], 'public') }}: + name: $(DncEngPublicBuildPool) + image: windows.vs2022.amd64.open + ${{ if eq(variables['System.TeamProject'], 'internal') }}: + name: $(DncEngInternalBuildPool) + image: windows.vs2022.amd64 + + steps: + - ${{ if eq(parameters.is1ESPipeline, '') }}: + - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error + + - ${{ each preStep in parameters.preSteps }}: + - ${{ preStep }} + + - task: UseDotNet@2 + displayName: Use .NET 8 SDK + inputs: + packageType: sdk + version: 8.0.x + installationPath: $(Agent.TempDirectory)/dotnet + workingDirectory: $(Agent.TempDirectory) + + - script: | + $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(sourceIndexProcessBinlogPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools + $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(sourceIndexUploadPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools + displayName: Download Tools + # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk. + workingDirectory: $(Agent.TempDirectory) + + - script: ${{ parameters.sourceIndexBuildCommand }} + displayName: Build Repository + + - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output + displayName: Process Binlog into indexable sln + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - task: AzureCLI@2 + displayName: Get stage 1 auth token + inputs: + azureSubscription: 'SourceDotNet Stage1 Publish' + addSpnToEnvironment: true + scriptType: 'ps' + scriptLocation: 'inlineScript' + inlineScript: | + echo "##vso[task.setvariable variable=ARM_CLIENT_ID]$env:servicePrincipalId" + echo "##vso[task.setvariable variable=ARM_ID_TOKEN]$env:idToken" + echo "##vso[task.setvariable variable=ARM_TENANT_ID]$env:tenantId" + + - script: | + echo "Client ID: $(ARM_CLIENT_ID)" + echo "ID Token: $(ARM_ID_TOKEN)" + echo "Tenant ID: $(ARM_TENANT_ID)" + az login --service-principal -u $(ARM_CLIENT_ID) --tenant $(ARM_TENANT_ID) --allow-no-subscriptions --federated-token $(ARM_ID_TOKEN) + displayName: "Login to Azure" + + - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1 + displayName: Upload stage1 artifacts to source index \ No newline at end of file diff --git a/eng/common/core-templates/jobs/codeql-build.yml b/eng/common/core-templates/jobs/codeql-build.yml new file mode 100644 index 0000000000000..f2144252cc65c --- /dev/null +++ b/eng/common/core-templates/jobs/codeql-build.yml @@ -0,0 +1,33 @@ +parameters: + # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md + continueOnError: false + # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + jobs: [] + # Optional: if specified, restore and use this version of Guardian instead of the default. + overrideGuardianVersion: '' + is1ESPipeline: '' + +jobs: +- template: /eng/common/core-templates/jobs/jobs.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + enableMicrobuild: false + enablePublishBuildArtifacts: false + enablePublishTestResults: false + enablePublishBuildAssets: false + enablePublishUsingPipelines: false + enableTelemetry: true + + variables: + - group: Publish-Build-Assets + # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in + # sync with the packages.config file. + - name: DefaultGuardianVersion + value: 0.109.0 + - name: GuardianPackagesConfigFile + value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config + - name: GuardianVersion + value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} + + jobs: ${{ parameters.jobs }} + diff --git a/eng/common/core-templates/jobs/jobs.yml b/eng/common/core-templates/jobs/jobs.yml new file mode 100644 index 0000000000000..ea69be4341c62 --- /dev/null +++ b/eng/common/core-templates/jobs/jobs.yml @@ -0,0 +1,119 @@ +parameters: + # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md + continueOnError: false + + # Optional: Include PublishBuildArtifacts task + enablePublishBuildArtifacts: false + + # Optional: Enable publishing using release pipelines + enablePublishUsingPipelines: false + + # Optional: Enable running the source-build jobs to build repo from source + enableSourceBuild: false + + # Optional: Parameters for source-build template. + # See /eng/common/core-templates/jobs/source-build.yml for options + sourceBuildParameters: [] + + graphFileGeneration: + # Optional: Enable generating the graph files at the end of the build + enabled: false + # Optional: Include toolset dependencies in the generated graph files + includeToolset: false + + # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + jobs: [] + + # Optional: Override automatically derived dependsOn value for "publish build assets" job + publishBuildAssetsDependsOn: '' + + # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage. + publishAssetsImmediately: false + + # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml) + artifactsPublishingAdditionalParameters: '' + signingValidationAdditionalParameters: '' + + # Optional: should run as a public build even in the internal project + # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. + runAsPublic: false + + enableSourceIndex: false + sourceIndexParams: {} + + artifacts: {} + is1ESPipeline: '' + +# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, +# and some (Microbuild) should only be applied to non-PR cases for internal builds. + +jobs: +- ${{ each job in parameters.jobs }}: + - ${{ if eq(parameters.is1ESPipeline, 'true') }}: + - template: /eng/common/templates-official/job/job.yml + parameters: + # pass along parameters + ${{ each parameter in parameters }}: + ${{ if ne(parameter.key, 'jobs') }}: + ${{ parameter.key }}: ${{ parameter.value }} + + # pass along job properties + ${{ each property in job }}: + ${{ if ne(property.key, 'job') }}: + ${{ property.key }}: ${{ property.value }} + + name: ${{ job.job }} + + - ${{ else }}: + - template: /eng/common/templates/job/job.yml + parameters: + # pass along parameters + ${{ each parameter in parameters }}: + ${{ if ne(parameter.key, 'jobs') }}: + ${{ parameter.key }}: ${{ parameter.value }} + + # pass along job properties + ${{ each property in job }}: + ${{ if ne(property.key, 'job') }}: + ${{ property.key }}: ${{ property.value }} + + name: ${{ job.job }} + +- ${{ if eq(parameters.enableSourceBuild, true) }}: + - template: /eng/common/core-templates/jobs/source-build.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + allCompletedJobId: Source_Build_Complete + ${{ each parameter in parameters.sourceBuildParameters }}: + ${{ parameter.key }}: ${{ parameter.value }} + +- ${{ if eq(parameters.enableSourceIndex, 'true') }}: + - template: ../job/source-index-stage1.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + runAsPublic: ${{ parameters.runAsPublic }} + ${{ each parameter in parameters.sourceIndexParams }}: + ${{ parameter.key }}: ${{ parameter.value }} + +- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}: + - template: ../job/publish-build-assets.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + continueOnError: ${{ parameters.continueOnError }} + dependsOn: + - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}: + - ${{ each job in parameters.publishBuildAssetsDependsOn }}: + - ${{ job.job }} + - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}: + - ${{ each job in parameters.jobs }}: + - ${{ job.job }} + - ${{ if eq(parameters.enableSourceBuild, true) }}: + - Source_Build_Complete + + runAsPublic: ${{ parameters.runAsPublic }} + publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }} + publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }} + enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} + artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} + signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }} diff --git a/eng/common/core-templates/jobs/source-build.yml b/eng/common/core-templates/jobs/source-build.yml new file mode 100644 index 0000000000000..d8e5d00852268 --- /dev/null +++ b/eng/common/core-templates/jobs/source-build.yml @@ -0,0 +1,50 @@ +parameters: + # This template adds arcade-powered source-build to CI. A job is created for each platform, as + # well as an optional server job that completes when all platform jobs complete. + + # The name of the "join" job for all source-build platforms. If set to empty string, the job is + # not included. Existing repo pipelines can use this job depend on all source-build jobs + # completing without maintaining a separate list of every single job ID: just depend on this one + # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'. + allCompletedJobId: '' + + # See /eng/common/core-templates/job/source-build.yml + jobNamePrefix: 'Source_Build' + + # This is the default platform provided by Arcade, intended for use by a managed-only repo. + defaultManagedPlatform: + name: 'Managed' + container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream9' + + # Defines the platforms on which to run build jobs. One job is created for each platform, and the + # object in this array is sent to the job template as 'platform'. If no platforms are specified, + # one job runs on 'defaultManagedPlatform'. + platforms: [] + + is1ESPipeline: '' + +jobs: + +- ${{ if ne(parameters.allCompletedJobId, '') }}: + - job: ${{ parameters.allCompletedJobId }} + displayName: Source-Build Complete + pool: server + dependsOn: + - ${{ each platform in parameters.platforms }}: + - ${{ parameters.jobNamePrefix }}_${{ platform.name }} + - ${{ if eq(length(parameters.platforms), 0) }}: + - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }} + +- ${{ each platform in parameters.platforms }}: + - template: /eng/common/core-templates/job/source-build.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + jobNamePrefix: ${{ parameters.jobNamePrefix }} + platform: ${{ platform }} + +- ${{ if eq(length(parameters.platforms), 0) }}: + - template: /eng/common/core-templates/job/source-build.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + jobNamePrefix: ${{ parameters.jobNamePrefix }} + platform: ${{ parameters.defaultManagedPlatform }} diff --git a/eng/common/core-templates/post-build/common-variables.yml b/eng/common/core-templates/post-build/common-variables.yml new file mode 100644 index 0000000000000..b9ede10bf099a --- /dev/null +++ b/eng/common/core-templates/post-build/common-variables.yml @@ -0,0 +1,24 @@ +variables: + - group: Publish-Build-Assets + + # Whether the build is internal or not + - name: IsInternalBuild + value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }} + + # Default Maestro++ API Endpoint and API Version + - name: MaestroApiEndPoint + value: "https://maestro.dot.net" + - name: MaestroApiAccessToken + value: $(MaestroAccessToken) + - name: MaestroApiVersion + value: "2020-02-20" + + - name: SourceLinkCLIVersion + value: 3.0.0 + - name: SymbolToolVersion + value: 1.0.1 + - name: BinlogToolVersion + value: 1.0.11 + + - name: runCodesignValidationInjection + value: false diff --git a/eng/common/core-templates/post-build/post-build.yml b/eng/common/core-templates/post-build/post-build.yml new file mode 100644 index 0000000000000..865bc1ecb4f0f --- /dev/null +++ b/eng/common/core-templates/post-build/post-build.yml @@ -0,0 +1,314 @@ +parameters: + # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST. + # Publishing V1 is no longer supported + # Publishing V2 is no longer supported + # Publishing V3 is the default + - name: publishingInfraVersion + displayName: Which version of publishing should be used to promote the build definition? + type: number + default: 3 + values: + - 3 + + - name: BARBuildId + displayName: BAR Build Id + type: number + default: 0 + + - name: PromoteToChannelIds + displayName: Channel to promote BARBuildId to + type: string + default: '' + + - name: enableSourceLinkValidation + displayName: Enable SourceLink validation + type: boolean + default: false + + - name: enableSigningValidation + displayName: Enable signing validation + type: boolean + default: true + + - name: enableSymbolValidation + displayName: Enable symbol validation + type: boolean + default: false + + - name: enableNugetValidation + displayName: Enable NuGet validation + type: boolean + default: true + + - name: publishInstallersAndChecksums + displayName: Publish installers and checksums + type: boolean + default: true + + - name: SDLValidationParameters + type: object + default: + enable: false + publishGdn: false + continueOnError: false + params: '' + artifactNames: '' + downloadArtifacts: true + + # These parameters let the user customize the call to sdk-task.ps1 for publishing + # symbols & general artifacts as well as for signing validation + - name: symbolPublishingAdditionalParameters + displayName: Symbol publishing additional parameters + type: string + default: '' + + - name: artifactsPublishingAdditionalParameters + displayName: Artifact publishing additional parameters + type: string + default: '' + + - name: signingValidationAdditionalParameters + displayName: Signing validation additional parameters + type: string + default: '' + + # Which stages should finish execution before post-build stages start + - name: validateDependsOn + type: object + default: + - build + + - name: publishDependsOn + type: object + default: + - Validate + + # Optional: Call asset publishing rather than running in a separate stage + - name: publishAssetsImmediately + type: boolean + default: false + + - name: is1ESPipeline + type: boolean + default: false + +stages: +- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: + - stage: Validate + dependsOn: ${{ parameters.validateDependsOn }} + displayName: Validate Build Assets + variables: + - template: /eng/common/core-templates/post-build/common-variables.yml + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + jobs: + - job: + displayName: NuGet Validation + condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true')) + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + ${{ if eq(parameters.is1ESPipeline, true) }}: + name: $(DncEngInternalBuildPool) + image: windows.vs2022.amd64 + os: windows + ${{ else }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2022.amd64 + + steps: + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: PackageArtifacts + checkDownloadedFiles: true + + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1 + arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ + -ToolDestinationPath $(Agent.BuildDirectory)/Extract/ + + - job: + displayName: Signing Validation + condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true')) + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + ${{ if eq(parameters.is1ESPipeline, true) }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + ${{ else }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2022.amd64 + steps: + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: PackageArtifacts + checkDownloadedFiles: true + itemPattern: | + ** + !**/Microsoft.SourceBuild.Intermediate.*.nupkg + + # This is necessary whenever we want to publish/restore to an AzDO private feed + # Since sdk-task.ps1 tries to restore packages we need to do this authentication here + # otherwise it'll complain about accessing a private feed. + - task: NuGetAuthenticate@1 + displayName: 'Authenticate to AzDO Feeds' + + # Signing validation will optionally work with the buildmanifest file which is downloaded from + # Azure DevOps above. + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: eng\common\sdk-task.ps1 + arguments: -task SigningValidation -restore -msbuildEngine vs + /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' + /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt' + ${{ parameters.signingValidationAdditionalParameters }} + + - template: /eng/common/core-templates/steps/publish-logs.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + StageLabel: 'Validation' + JobLabel: 'Signing' + BinlogToolVersion: $(BinlogToolVersion) + + - job: + displayName: SourceLink Validation + condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true') + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + ${{ if eq(parameters.is1ESPipeline, true) }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + ${{ else }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2022.amd64 + steps: + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Blob Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: BlobArtifacts + checkDownloadedFiles: true + + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1 + arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ + -ExtractPath $(Agent.BuildDirectory)/Extract/ + -GHRepoName $(Build.Repository.Name) + -GHCommit $(Build.SourceVersion) + -SourcelinkCliVersion $(SourceLinkCLIVersion) + continueOnError: true + +- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}: + - stage: publish_using_darc + ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: + dependsOn: ${{ parameters.publishDependsOn }} + ${{ else }}: + dependsOn: ${{ parameters.validateDependsOn }} + displayName: Publish using Darc + variables: + - template: /eng/common/core-templates/post-build/common-variables.yml + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + jobs: + - job: + displayName: Publish Using Darc + timeoutInMinutes: 120 + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + ${{ if eq(parameters.is1ESPipeline, true) }}: + name: NetCore1ESPool-Publishing-Internal + image: windows.vs2019.amd64 + os: windows + ${{ else }}: + name: NetCore1ESPool-Publishing-Internal + demands: ImageOverride -equals windows.vs2019.amd64 + steps: + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: NuGetAuthenticate@1 + + - task: PowerShell@2 + displayName: Publish Using Darc + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 + arguments: -BuildId $(BARBuildId) + -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} + -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' + -MaestroToken '$(MaestroApiAccessToken)' + -WaitPublishingFinish true + -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' + -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' diff --git a/eng/common/core-templates/post-build/setup-maestro-vars.yml b/eng/common/core-templates/post-build/setup-maestro-vars.yml new file mode 100644 index 0000000000000..8d56b5726793f --- /dev/null +++ b/eng/common/core-templates/post-build/setup-maestro-vars.yml @@ -0,0 +1,74 @@ +parameters: + BARBuildId: '' + PromoteToChannelIds: '' + is1ESPipeline: '' + +steps: + - ${{ if eq(parameters.is1ESPipeline, '') }}: + - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error + + - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}: + - task: DownloadBuildArtifacts@0 + displayName: Download Release Configs + inputs: + buildType: current + artifactName: ReleaseConfigs + checkDownloadedFiles: true + + - task: PowerShell@2 + name: setReleaseVars + displayName: Set Release Configs Vars + inputs: + targetType: inline + pwsh: true + script: | + try { + if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') { + $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt + + $BarId = $Content | Select -Index 0 + $Channels = $Content | Select -Index 1 + $IsStableBuild = $Content | Select -Index 2 + + $AzureDevOpsProject = $Env:System_TeamProject + $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId + $AzureDevOpsBuildId = $Env:Build_BuildId + } + else { + $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}" + + $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]' + $apiHeaders.Add('Accept', 'application/json') + $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}") + + $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" } + + $BarId = $Env:BARBuildId + $Channels = $Env:PromoteToMaestroChannels -split "," + $Channels = $Channels -join "][" + $Channels = "[$Channels]" + + $IsStableBuild = $buildInfo.stable + $AzureDevOpsProject = $buildInfo.azureDevOpsProject + $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId + $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId + } + + Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId" + Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels" + Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild" + + Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject" + Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId" + Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId" + } + catch { + Write-Host $_ + Write-Host $_.Exception + Write-Host $_.ScriptStackTrace + exit 1 + } + env: + MAESTRO_API_TOKEN: $(MaestroApiAccessToken) + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }} diff --git a/eng/common/core-templates/post-build/trigger-subscription.yml b/eng/common/core-templates/post-build/trigger-subscription.yml new file mode 100644 index 0000000000000..da669030daf6e --- /dev/null +++ b/eng/common/core-templates/post-build/trigger-subscription.yml @@ -0,0 +1,13 @@ +parameters: + ChannelId: 0 + +steps: +- task: PowerShell@2 + displayName: Triggering subscriptions + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1 + arguments: -SourceRepo $(Build.Repository.Uri) + -ChannelId ${{ parameters.ChannelId }} + -MaestroApiAccessToken $(MaestroAccessToken) + -MaestroApiEndPoint $(MaestroApiEndPoint) + -MaestroApiVersion $(MaestroApiVersion) diff --git a/eng/common/core-templates/steps/add-build-to-channel.yml b/eng/common/core-templates/steps/add-build-to-channel.yml new file mode 100644 index 0000000000000..f67a210d62f3e --- /dev/null +++ b/eng/common/core-templates/steps/add-build-to-channel.yml @@ -0,0 +1,13 @@ +parameters: + ChannelId: 0 + +steps: +- task: PowerShell@2 + displayName: Add Build to Channel + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1 + arguments: -BuildId $(BARBuildId) + -ChannelId ${{ parameters.ChannelId }} + -MaestroApiAccessToken $(MaestroApiAccessToken) + -MaestroApiEndPoint $(MaestroApiEndPoint) + -MaestroApiVersion $(MaestroApiVersion) diff --git a/eng/common/core-templates/steps/component-governance.yml b/eng/common/core-templates/steps/component-governance.yml new file mode 100644 index 0000000000000..df449a34c1120 --- /dev/null +++ b/eng/common/core-templates/steps/component-governance.yml @@ -0,0 +1,14 @@ +parameters: + disableComponentGovernance: false + componentGovernanceIgnoreDirectories: '' + is1ESPipeline: false + +steps: +- ${{ if eq(parameters.disableComponentGovernance, 'true') }}: + - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true" + displayName: Set skipComponentGovernanceDetection variable +- ${{ if ne(parameters.disableComponentGovernance, 'true') }}: + - task: ComponentGovernanceComponentDetection@0 + continueOnError: true + inputs: + ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} \ No newline at end of file diff --git a/eng/common/core-templates/steps/generate-sbom.yml b/eng/common/core-templates/steps/generate-sbom.yml new file mode 100644 index 0000000000000..d938b60e1bb53 --- /dev/null +++ b/eng/common/core-templates/steps/generate-sbom.yml @@ -0,0 +1,54 @@ +# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated. +# PackageName - The name of the package this SBOM represents. +# PackageVersion - The version of the package this SBOM represents. +# ManifestDirPath - The path of the directory where the generated manifest files will be placed +# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector. + +parameters: + PackageVersion: 9.0.0 + BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + PackageName: '.NET' + ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom + IgnoreDirectories: '' + sbomContinueOnError: true + is1ESPipeline: false + # disable publishArtifacts if some other step is publishing the artifacts (like job.yml). + publishArtifacts: true + +steps: +- task: PowerShell@2 + displayName: Prep for SBOM generation in (Non-linux) + condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin')) + inputs: + filePath: ./eng/common/generate-sbom-prep.ps1 + arguments: ${{parameters.manifestDirPath}} + +# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461 +- script: | + chmod +x ./eng/common/generate-sbom-prep.sh + ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}} + displayName: Prep for SBOM generation in (Linux) + condition: eq(variables['Agent.Os'], 'Linux') + continueOnError: ${{ parameters.sbomContinueOnError }} + +- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0 + displayName: 'Generate SBOM manifest' + continueOnError: ${{ parameters.sbomContinueOnError }} + inputs: + PackageName: ${{ parameters.packageName }} + BuildDropPath: ${{ parameters.buildDropPath }} + PackageVersion: ${{ parameters.packageVersion }} + ManifestDirPath: ${{ parameters.manifestDirPath }} + ${{ if ne(parameters.IgnoreDirectories, '') }}: + AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}' + +- ${{ if eq(parameters.publishArtifacts, 'true')}}: + - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + displayName: Publish SBOM manifest + continueOnError: ${{parameters.sbomContinueOnError}} + targetPath: '${{ parameters.manifestDirPath }}' + artifactName: $(ARTIFACT_NAME) + diff --git a/eng/common/core-templates/steps/publish-build-artifacts.yml b/eng/common/core-templates/steps/publish-build-artifacts.yml new file mode 100644 index 0000000000000..f24ce346684e6 --- /dev/null +++ b/eng/common/core-templates/steps/publish-build-artifacts.yml @@ -0,0 +1,20 @@ +parameters: +- name: is1ESPipeline + type: boolean + default: false +- name: args + type: object + default: {} +steps: +- ${{ if ne(parameters.is1ESPipeline, true) }}: + - template: /eng/common/templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + ${{ each parameter in parameters.args }}: + ${{ parameter.key }}: ${{ parameter.value }} +- ${{ else }}: + - template: /eng/common/templates-official/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + ${{ each parameter in parameters.args }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/core-templates/steps/publish-logs.yml b/eng/common/core-templates/steps/publish-logs.yml new file mode 100644 index 0000000000000..8c5ea77b586d2 --- /dev/null +++ b/eng/common/core-templates/steps/publish-logs.yml @@ -0,0 +1,59 @@ +parameters: + StageLabel: '' + JobLabel: '' + CustomSensitiveDataList: '' + # A default - in case value from eng/common/core-templates/post-build/common-variables.yml is not passed + BinlogToolVersion: '1.0.11' + is1ESPipeline: false + +steps: +- task: Powershell@2 + displayName: Prepare Binlogs to Upload + inputs: + targetType: inline + script: | + New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ + Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ + continueOnError: true + condition: always() + +- task: PowerShell@2 + displayName: Redact Logs + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1 + # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml + # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt' + # If the file exists - sensitive data for redaction will be sourced from it + # (single entry per line, lines starting with '# ' are considered comments and skipped) + arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs' + -BinlogToolVersion ${{parameters.BinlogToolVersion}} + -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt' + '$(publishing-dnceng-devdiv-code-r-build-re)' + '$(MaestroAccessToken)' + '$(dn-bot-all-orgs-artifact-feeds-rw)' + '$(akams-client-id)' + '$(akams-client-secret)' + '$(microsoft-symbol-server-pat)' + '$(symweb-symbol-server-pat)' + '$(dn-bot-all-orgs-build-rw-code-rw)' + ${{parameters.CustomSensitiveDataList}} + continueOnError: true + condition: always() + +- task: CopyFiles@2 + displayName: Gather post build logs + inputs: + SourceFolder: '$(Build.SourcesDirectory)/PostBuildLogs' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/PostBuildLogs' + +- template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + displayName: Publish Logs + pathToPublish: '$(Build.ArtifactStagingDirectory)/PostBuildLogs' + publishLocation: Container + artifactName: PostBuildLogs + continueOnError: true + condition: always() diff --git a/eng/common/core-templates/steps/publish-pipeline-artifacts.yml b/eng/common/core-templates/steps/publish-pipeline-artifacts.yml new file mode 100644 index 0000000000000..2efec04dc2c16 --- /dev/null +++ b/eng/common/core-templates/steps/publish-pipeline-artifacts.yml @@ -0,0 +1,20 @@ +parameters: +- name: is1ESPipeline + type: boolean + default: false + +- name: args + type: object + default: {} + +steps: +- ${{ if ne(parameters.is1ESPipeline, true) }}: + - template: /eng/common/templates/steps/publish-pipeline-artifacts.yml + parameters: + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} +- ${{ else }}: + - template: /eng/common/templates-official/steps/publish-pipeline-artifacts.yml + parameters: + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/core-templates/steps/retain-build.yml b/eng/common/core-templates/steps/retain-build.yml new file mode 100644 index 0000000000000..83d97a26a01ff --- /dev/null +++ b/eng/common/core-templates/steps/retain-build.yml @@ -0,0 +1,28 @@ +parameters: + # Optional azure devops PAT with build execute permissions for the build's organization, + # only needed if the build that should be retained ran on a different organization than + # the pipeline where this template is executing from + Token: '' + # Optional BuildId to retain, defaults to the current running build + BuildId: '' + # Azure devops Organization URI for the build in the https://dev.azure.com/ format. + # Defaults to the organization the current pipeline is running on + AzdoOrgUri: '$(System.CollectionUri)' + # Azure devops project for the build. Defaults to the project the current pipeline is running on + AzdoProject: '$(System.TeamProject)' + +steps: + - task: powershell@2 + inputs: + targetType: 'filePath' + filePath: eng/common/retain-build.ps1 + pwsh: true + arguments: > + -AzdoOrgUri: ${{parameters.AzdoOrgUri}} + -AzdoProject ${{parameters.AzdoProject}} + -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }} + -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}} + displayName: Enable permanent build retention + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + BUILD_ID: $(Build.BuildId) \ No newline at end of file diff --git a/eng/common/core-templates/steps/send-to-helix.yml b/eng/common/core-templates/steps/send-to-helix.yml new file mode 100644 index 0000000000000..68fa739c4ab21 --- /dev/null +++ b/eng/common/core-templates/steps/send-to-helix.yml @@ -0,0 +1,93 @@ +# Please remember to update the documentation if you make changes to these parameters! +parameters: + HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/ + HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/' + HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number + HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues + HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group + HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY + HelixProjectArguments: '' # optional -- arguments passed to the build command + HelixConfiguration: '' # optional -- additional property attached to a job + HelixPreCommands: '' # optional -- commands to run before Helix work item execution + HelixPostCommands: '' # optional -- commands to run after Helix work item execution + WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects + WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects + WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects + CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload + XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true + XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects + XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects + XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner + XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects + IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion + DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json + DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json + WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget." + IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set + HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net ) + Creator: '' # optional -- if the build is external, use this to specify who is sending the job + DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO + condition: succeeded() # optional -- condition for step to execute; defaults to succeeded() + continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false + +steps: + - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"' + displayName: ${{ parameters.DisplayNamePrefix }} (Windows) + env: + BuildConfig: $(_BuildConfig) + HelixSource: ${{ parameters.HelixSource }} + HelixType: ${{ parameters.HelixType }} + HelixBuild: ${{ parameters.HelixBuild }} + HelixConfiguration: ${{ parameters.HelixConfiguration }} + HelixTargetQueues: ${{ parameters.HelixTargetQueues }} + HelixAccessToken: ${{ parameters.HelixAccessToken }} + HelixPreCommands: ${{ parameters.HelixPreCommands }} + HelixPostCommands: ${{ parameters.HelixPostCommands }} + WorkItemDirectory: ${{ parameters.WorkItemDirectory }} + WorkItemCommand: ${{ parameters.WorkItemCommand }} + WorkItemTimeout: ${{ parameters.WorkItemTimeout }} + CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} + XUnitProjects: ${{ parameters.XUnitProjects }} + XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} + XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} + XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} + XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} + IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} + DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} + DotNetCliVersion: ${{ parameters.DotNetCliVersion }} + WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} + HelixBaseUri: ${{ parameters.HelixBaseUri }} + Creator: ${{ parameters.Creator }} + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} + - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog + displayName: ${{ parameters.DisplayNamePrefix }} (Unix) + env: + BuildConfig: $(_BuildConfig) + HelixSource: ${{ parameters.HelixSource }} + HelixType: ${{ parameters.HelixType }} + HelixBuild: ${{ parameters.HelixBuild }} + HelixConfiguration: ${{ parameters.HelixConfiguration }} + HelixTargetQueues: ${{ parameters.HelixTargetQueues }} + HelixAccessToken: ${{ parameters.HelixAccessToken }} + HelixPreCommands: ${{ parameters.HelixPreCommands }} + HelixPostCommands: ${{ parameters.HelixPostCommands }} + WorkItemDirectory: ${{ parameters.WorkItemDirectory }} + WorkItemCommand: ${{ parameters.WorkItemCommand }} + WorkItemTimeout: ${{ parameters.WorkItemTimeout }} + CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} + XUnitProjects: ${{ parameters.XUnitProjects }} + XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} + XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} + XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} + XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} + IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} + DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} + DotNetCliVersion: ${{ parameters.DotNetCliVersion }} + WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} + HelixBaseUri: ${{ parameters.HelixBaseUri }} + Creator: ${{ parameters.Creator }} + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} diff --git a/eng/common/core-templates/steps/source-build.yml b/eng/common/core-templates/steps/source-build.yml new file mode 100644 index 0000000000000..bdd725b496f91 --- /dev/null +++ b/eng/common/core-templates/steps/source-build.yml @@ -0,0 +1,134 @@ +parameters: + # This template adds arcade-powered source-build to CI. + + # This is a 'steps' template, and is intended for advanced scenarios where the existing build + # infra has a careful build methodology that must be followed. For example, a repo + # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline + # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to + # GitHub. Using this steps template leaves room for that infra to be included. + + # Defines the platform on which to run the steps. See 'eng/common/core-templates/job/source-build.yml' + # for details. The entire object is described in the 'job' template for simplicity, even though + # the usage of the properties on this object is split between the 'job' and 'steps' templates. + platform: {} + is1ESPipeline: false + +steps: +# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.) +- script: | + set -x + df -h + + # If building on the internal project, the artifact feeds variable may be available (usually only if needed) + # In that case, call the feed setup script to add internal feeds corresponding to public ones. + # In addition, add an msbuild argument to copy the WIP from the repo to the target build location. + # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those + # changes. + internalRestoreArgs= + if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then + # Temporarily work around https://github.com/dotnet/arcade/issues/7709 + chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh + $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw) + internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true' + + # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo. + # This only works if there is a username/email configured, which won't be the case in most CI runs. + git config --get user.email + if [ $? -ne 0 ]; then + git config user.email dn-bot@microsoft.com + git config user.name dn-bot + fi + fi + + # If building on the internal project, the internal storage variable may be available (usually only if needed) + # In that case, add variables to allow the download of internal runtimes if the specified versions are not found + # in the default public locations. + internalRuntimeDownloadArgs= + if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then + internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)' + fi + + buildConfig=Release + # Check if AzDO substitutes in a build config from a variable, and use it if so. + if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then + buildConfig='$(_BuildConfig)' + fi + + officialBuildArgs= + if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then + officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)' + fi + + targetRidArgs= + if [ '${{ parameters.platform.targetRID }}' != '' ]; then + targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}' + fi + + runtimeOsArgs= + if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then + runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}' + fi + + baseOsArgs= + if [ '${{ parameters.platform.baseOS }}' != '' ]; then + baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}' + fi + + publishArgs= + if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then + publishArgs='--publish' + fi + + assetManifestFileName=SourceBuild_RidSpecific.xml + if [ '${{ parameters.platform.name }}' != '' ]; then + assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml + fi + + ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ + --configuration $buildConfig \ + --restore --build --pack $publishArgs -bl \ + $officialBuildArgs \ + $internalRuntimeDownloadArgs \ + $internalRestoreArgs \ + $targetRidArgs \ + $runtimeOsArgs \ + $baseOsArgs \ + /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \ + /p:ArcadeBuildFromSource=true \ + /p:DotNetBuildSourceOnly=true \ + /p:DotNetBuildRepo=true \ + /p:AssetManifestFileName=$assetManifestFileName + displayName: Build + +# Upload build logs for diagnosis. +- task: CopyFiles@2 + displayName: Prepare BuildLogs staging directory + inputs: + SourceFolder: '$(Build.SourcesDirectory)' + Contents: | + **/*.log + **/*.binlog + artifacts/sb/prebuilt-report/** + TargetFolder: '$(Build.StagingDirectory)/BuildLogs' + CleanTargetFolder: true + continueOnError: true + condition: succeededOrFailed() + +- template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + displayName: Publish BuildLogs + targetPath: '$(Build.StagingDirectory)/BuildLogs' + artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt) + continueOnError: true + condition: succeededOrFailed() + +# Manually inject component detection so that we can ignore the source build upstream cache, which contains +# a nupkg cache of input packages (a local feed). +# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir' +# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets +- task: ComponentGovernanceComponentDetection@0 + displayName: Component Detection (Exclude upstream cache) + inputs: + ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache' diff --git a/eng/common/core-templates/variables/pool-providers.yml b/eng/common/core-templates/variables/pool-providers.yml new file mode 100644 index 0000000000000..41053d382a2e1 --- /dev/null +++ b/eng/common/core-templates/variables/pool-providers.yml @@ -0,0 +1,8 @@ +parameters: + is1ESPipeline: false + +variables: + - ${{ if eq(parameters.is1ESPipeline, 'true') }}: + - template: /eng/common/templates-official/variables/pool-providers.yml + - ${{ else }}: + - template: /eng/common/templates/variables/pool-providers.yml \ No newline at end of file diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh index 9caf9b021dbdd..a8e35df7cee14 100755 --- a/eng/common/cross/build-rootfs.sh +++ b/eng/common/cross/build-rootfs.sh @@ -8,7 +8,7 @@ usage() echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86" echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine" echo " for alpine can be specified with version: alpineX.YY or alpineedge" - echo " for FreeBSD can be: freebsd12, freebsd13" + echo " for FreeBSD can be: freebsd13, freebsd14" echo " for illumos can be: illumos" echo " for Haiku can be: haiku." echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD" @@ -71,9 +71,9 @@ __AlpinePackages+=" krb5-dev" __AlpinePackages+=" openssl-dev" __AlpinePackages+=" zlib-dev" -__FreeBSDBase="12.4-RELEASE" +__FreeBSDBase="13.2-RELEASE" __FreeBSDPkg="1.17.0" -__FreeBSDABI="12" +__FreeBSDABI="13" __FreeBSDPackages="libunwind" __FreeBSDPackages+=" icu" __FreeBSDPackages+=" libinotify" @@ -142,7 +142,6 @@ while :; do case $lowerI in -\?|-h|--help) usage - exit 1 ;; arm) __BuildArch=arm @@ -182,12 +181,12 @@ while :; do __AlpinePackages="${__AlpinePackages// lldb-dev/}" __QEMUArch=riscv64 __UbuntuArch=riscv64 - __UbuntuRepo="http://deb.debian.org/debian-ports" + __UbuntuRepo="http://deb.debian.org/debian" __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" unset __LLDB_Package - if [[ -e "/usr/share/keyrings/debian-ports-archive-keyring.gpg" ]]; then - __Keyring="--keyring /usr/share/keyrings/debian-ports-archive-keyring.gpg --include=debian-ports-archive-keyring" + if [[ -e "/usr/share/keyrings/debian-archive-keyring.gpg" ]]; then + __Keyring="--keyring /usr/share/keyrings/debian-archive-keyring.gpg --include=debian-archive-keyring" fi ;; ppc64le) @@ -229,12 +228,19 @@ while :; do __UbuntuRepo="http://archive.ubuntu.com/ubuntu/" ;; lldb*) - version="${lowerI/lldb/}" - parts=(${version//./ }) + version="$(echo "$lowerI" | tr -d '[:alpha:]-=')" + majorVersion="${version%%.*}" + + [ -z "${version##*.*}" ] && minorVersion="${version#*.}" + if [ -z "$minorVersion" ]; then + minorVersion=0 + fi # for versions > 6.0, lldb has dropped the minor version - if [[ "${parts[0]}" -gt 6 ]]; then - version="${parts[0]}" + if [ "$majorVersion" -le 6 ]; then + version="$majorVersion.$minorVersion" + else + version="$majorVersion" fi __LLDB_Package="liblldb-${version}-dev" @@ -243,15 +249,19 @@ while :; do unset __LLDB_Package ;; llvm*) - version="${lowerI/llvm/}" - parts=(${version//./ }) - __LLVM_MajorVersion="${parts[0]}" - __LLVM_MinorVersion="${parts[1]}" - - # for versions > 6.0, llvm has dropped the minor version - if [[ -z "$__LLVM_MinorVersion" && "$__LLVM_MajorVersion" -le 6 ]]; then - __LLVM_MinorVersion=0; + version="$(echo "$lowerI" | tr -d '[:alpha:]-=')" + __LLVM_MajorVersion="${version%%.*}" + + [ -z "${version##*.*}" ] && __LLVM_MinorVersion="${version#*.}" + if [ -z "$__LLVM_MinorVersion" ]; then + __LLVM_MinorVersion=0 + fi + + # for versions > 6.0, lldb has dropped the minor version + if [ "$__LLVM_MajorVersion" -gt 6 ]; then + __LLVM_MinorVersion= fi + ;; xenial) # Ubuntu 16.04 if [[ "$__CodeName" != "jessie" ]]; then @@ -323,25 +333,24 @@ while :; do alpine*) __CodeName=alpine __UbuntuRepo= - version="${lowerI/alpine/}" - if [[ "$version" == "edge" ]]; then + if [[ "$lowerI" == "alpineedge" ]]; then __AlpineVersion=edge else - parts=(${version//./ }) - __AlpineMajorVersion="${parts[0]}" - __AlpineMinoVersion="${parts[1]}" - __AlpineVersion="$__AlpineMajorVersion.$__AlpineMinoVersion" + version="$(echo "$lowerI" | tr -d '[:alpha:]-=')" + __AlpineMajorVersion="${version%%.*}" + __AlpineMinorVersion="${version#*.}" + __AlpineVersion="$__AlpineMajorVersion.$__AlpineMinorVersion" fi ;; - freebsd12) + freebsd13) __CodeName=freebsd __SkipUnmount=1 ;; - freebsd13) + freebsd14) __CodeName=freebsd - __FreeBSDBase="13.2-RELEASE" - __FreeBSDABI="13" + __FreeBSDBase="14.0-RELEASE" + __FreeBSDABI="14" __SkipUnmount=1 ;; illumos) @@ -442,13 +451,39 @@ fi mkdir -p "$__RootfsDir" __RootfsDir="$( cd "$__RootfsDir" && pwd )" +__hasWget= +ensureDownloadTool() +{ + if command -v wget &> /dev/null; then + __hasWget=1 + elif command -v curl &> /dev/null; then + __hasWget=0 + else + >&2 echo "ERROR: either wget or curl is required by this script." + exit 1 + fi +} + if [[ "$__CodeName" == "alpine" ]]; then __ApkToolsVersion=2.12.11 - __ApkToolsSHA512SUM=53e57b49230da07ef44ee0765b9592580308c407a8d4da7125550957bb72cb59638e04f8892a18b584451c8d841d1c7cb0f0ab680cc323a3015776affaa3be33 __ApkToolsDir="$(mktemp -d)" __ApkKeysDir="$(mktemp -d)" + arch="$(uname -m)" - wget "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic//v$__ApkToolsVersion/x86_64/apk.static" -P "$__ApkToolsDir" + ensureDownloadTool + + if [[ "$__hasWget" == 1 ]]; then + wget -P "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static" + else + curl -SLO --create-dirs --output-dir "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static" + fi + if [[ "$arch" == "x86_64" ]]; then + __ApkToolsSHA512SUM="53e57b49230da07ef44ee0765b9592580308c407a8d4da7125550957bb72cb59638e04f8892a18b584451c8d841d1c7cb0f0ab680cc323a3015776affaa3be33" + elif [[ "$arch" == "aarch64" ]]; then + __ApkToolsSHA512SUM="9e2b37ecb2b56c05dad23d379be84fd494c14bd730b620d0d576bda760588e1f2f59a7fcb2f2080577e0085f23a0ca8eadd993b4e61c2ab29549fdb71969afd0" + else + echo "WARNING: add missing hash for your host architecture. To find the value, use: 'find /tmp -name apk.static -exec sha512sum {} \;'" + fi echo "$__ApkToolsSHA512SUM $__ApkToolsDir/apk.static" | sha512sum -c chmod +x "$__ApkToolsDir/apk.static" @@ -477,20 +512,23 @@ if [[ "$__CodeName" == "alpine" ]]; then fi # initialize DB + # shellcheck disable=SC2086 "$__ApkToolsDir/apk.static" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" --initdb add if [[ "$__AlpineLlvmLibsLookup" == 1 ]]; then + # shellcheck disable=SC2086 __AlpinePackages+=" $("$__ApkToolsDir/apk.static" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \ - search 'llvm*-libs' | sort | tail -1 | sed 's/-[^-]*//2g')" + search 'llvm*-libs' | grep -E '^llvm' | sort | tail -1 | sed 's/-[^-]*//2g')" fi # install all packages in one go + # shellcheck disable=SC2086 "$__ApkToolsDir/apk.static" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ @@ -501,12 +539,23 @@ if [[ "$__CodeName" == "alpine" ]]; then elif [[ "$__CodeName" == "freebsd" ]]; then mkdir -p "$__RootfsDir"/usr/local/etc JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} - wget -O - "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version + + ensureDownloadTool + + if [[ "$__hasWget" == 1 ]]; then + wget -O- "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version + else + curl -SL "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version + fi echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf mkdir -p "$__RootfsDir"/tmp # get and build package manager - wget -O - "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf - + if [[ "$__hasWget" == 1 ]]; then + wget -O- "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf - + else + curl -SL "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf - + fi cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}" # needed for install to succeed mkdir -p "$__RootfsDir"/host/etc @@ -514,20 +563,36 @@ elif [[ "$__CodeName" == "freebsd" ]]; then rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}" # install packages we need. INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update + # shellcheck disable=SC2086 INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages elif [[ "$__CodeName" == "illumos" ]]; then mkdir "$__RootfsDir/tmp" pushd "$__RootfsDir/tmp" JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} + + ensureDownloadTool + echo "Downloading sysroot." - wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf - + if [[ "$__hasWget" == 1 ]]; then + wget -O- https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf - + else + curl -SL https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf - + fi echo "Building binutils. Please wait.." - wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf - + if [[ "$__hasWget" == 1 ]]; then + wget -O- https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf - + else + curl -SL https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf - + fi mkdir build-binutils && cd build-binutils ../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" make -j "$JOBS" && make install && cd .. echo "Building gcc. Please wait.." - wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf - + if [[ "$__hasWget" == 1 ]]; then + wget -O- https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf - + else + curl -SL https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf - + fi CFLAGS="-fPIC" CXXFLAGS="-fPIC" CXXFLAGS_FOR_TARGET="-fPIC" @@ -544,7 +609,11 @@ elif [[ "$__CodeName" == "illumos" ]]; then fi BaseUrl="$BaseUrl/packages/SmartOS/trunk/${__illumosArch}/All" echo "Downloading manifest" - wget "$BaseUrl" + if [[ "$__hasWget" == 1 ]]; then + wget "$BaseUrl" + else + curl -SLO "$BaseUrl" + fi echo "Downloading dependencies." read -ra array <<<"$__IllumosPackages" for package in "${array[@]}"; do @@ -552,7 +621,11 @@ elif [[ "$__CodeName" == "illumos" ]]; then # find last occurrence of package in listing and extract its name package="$(sed -En '/.*href="('"$package"'-[0-9].*).tgz".*/h;$!d;g;s//\1/p' All)" echo "Resolved name '$package'" - wget "$BaseUrl"/"$package".tgz + if [[ "$__hasWget" == 1 ]]; then + wget "$BaseUrl"/"$package".tgz + else + curl -SLO "$BaseUrl"/"$package".tgz + fi ar -x "$package".tgz tar --skip-old-files -xzf "$package".tmp.tg* -C "$__RootfsDir" 2>/dev/null done @@ -561,10 +634,17 @@ elif [[ "$__CodeName" == "illumos" ]]; then rm -rf "$__RootfsDir"/{tmp,+*} mkdir -p "$__RootfsDir"/usr/include/net mkdir -p "$__RootfsDir"/usr/include/netpacket - wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h - wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h - wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h - wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h + if [[ "$__hasWget" == 1 ]]; then + wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h + wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h + wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h + wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h + else + curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h + curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h + curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h + curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h + fi elif [[ "$__CodeName" == "haiku" ]]; then JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} @@ -574,9 +654,16 @@ elif [[ "$__CodeName" == "haiku" ]]; then mkdir "$__RootfsDir/tmp/download" + ensureDownloadTool + echo "Downloading Haiku package tool" - git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 $__RootfsDir/tmp/script - wget -O "$__RootfsDir/tmp/download/hosttools.zip" $($__RootfsDir/tmp/script/fetch.sh --hosttools) + git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 "$__RootfsDir/tmp/script" + if [[ "$__hasWget" == 1 ]]; then + wget -O "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)" + else + curl -SLo "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)" + fi + unzip -o "$__RootfsDir/tmp/download/hosttools.zip" -d "$__RootfsDir/tmp/bin" DepotBaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg" @@ -589,14 +676,25 @@ elif [[ "$__CodeName" == "haiku" ]]; then echo "Downloading $package..." # API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60 # The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598 - hpkgDownloadUrl="$(wget -qO- --post-data='{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \ - --header='Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')" - wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl" + if [[ "$__hasWget" == 1 ]]; then + hpkgDownloadUrl="$(wget -qO- --post-data '{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \ + --header 'Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')" + wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl" + else + hpkgDownloadUrl="$(curl -sSL -XPOST --data '{"name":"'"$package"'","repositorySourceCode":"haikuports_'$__HaikuArch'","versionType":"LATEST","naturalLanguageCode":"en"}' \ + --header 'Content-Type:application/json' "$DepotBaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')" + curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$hpkgDownloadUrl" + fi done for package in haiku haiku_devel; do echo "Downloading $package..." - hpkgVersion="$(wget -qO- $HpkgBaseUrl | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')" - wget -P "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg" + if [[ "$__hasWget" == 1 ]]; then + hpkgVersion="$(wget -qO- "$HpkgBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')" + wget -P "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg" + else + hpkgVersion="$(curl -sSL "$HpkgBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')" + curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HpkgBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg" + fi done # Set up the sysroot @@ -609,7 +707,11 @@ elif [[ "$__CodeName" == "haiku" ]]; then # Download buildtools echo "Downloading Haiku buildtools" - wget -O "$__RootfsDir/tmp/download/buildtools.zip" $($__RootfsDir/tmp/script/fetch.sh --buildtools --arch=$__HaikuArch) + if [[ "$__hasWget" == 1 ]]; then + wget -O "$__RootfsDir/tmp/download/buildtools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --buildtools --arch=$__HaikuArch)" + else + curl -SLo "$__RootfsDir/tmp/download/buildtools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --buildtools --arch=$__HaikuArch)" + fi unzip -o "$__RootfsDir/tmp/download/buildtools.zip" -d "$__RootfsDir" # Cleaning up temporary files @@ -622,10 +724,12 @@ elif [[ -n "$__CodeName" ]]; then __Keyring="$__Keyring --force-check-gpg" fi + # shellcheck disable=SC2086 debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo" cp "$__CrossDir/$__BuildArch/sources.list.$__CodeName" "$__RootfsDir/etc/apt/sources.list" chroot "$__RootfsDir" apt-get update chroot "$__RootfsDir" apt-get -f -y install + # shellcheck disable=SC2086 chroot "$__RootfsDir" apt-get -y install $__UbuntuPackages chroot "$__RootfsDir" symlinks -cr /usr chroot "$__RootfsDir" apt-get clean @@ -643,6 +747,5 @@ elif [[ "$__Tizen" == "tizen" ]]; then ROOTFS_DIR="$__RootfsDir" "$__CrossDir/tizen-build-rootfs.sh" "$__BuildArch" else echo "Unsupported target platform." - usage; - exit 1 + usage fi diff --git a/eng/common/cross/riscv64/sources.list.sid b/eng/common/cross/riscv64/sources.list.sid index 65f730d224caa..b5f7a7e6e1eb5 100644 --- a/eng/common/cross/riscv64/sources.list.sid +++ b/eng/common/cross/riscv64/sources.list.sid @@ -1 +1 @@ -deb http://deb.debian.org/debian-ports sid main +deb http://deb.debian.org/debian sid main diff --git a/eng/common/cross/riscv64/tizen/tizen.patch b/eng/common/cross/riscv64/tizen/tizen.patch new file mode 100644 index 0000000000000..eb6d1c07470bf --- /dev/null +++ b/eng/common/cross/riscv64/tizen/tizen.patch @@ -0,0 +1,9 @@ +diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so +--- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900 ++++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900 +@@ -2,4 +2,4 @@ + Use the shared library, but some functions are only in + the static library, so try that secondarily. */ + OUTPUT_FORMAT(elf64-littleriscv) +-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-riscv64-lp64d.so.1 ) ) ++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-riscv64-lp64d.so.1 ) ) diff --git a/eng/common/cross/tizen-build-rootfs.sh b/eng/common/cross/tizen-build-rootfs.sh index ac84173d44fc3..ba31c93285f63 100644 --- a/eng/common/cross/tizen-build-rootfs.sh +++ b/eng/common/cross/tizen-build-rootfs.sh @@ -22,6 +22,10 @@ case "$ARCH" in TIZEN_ARCH="x86_64" LINK_ARCH="x86" ;; + riscv64) + TIZEN_ARCH="riscv64" + LINK_ARCH="riscv" + ;; *) echo "Unsupported architecture for tizen: $ARCH" exit 1 @@ -58,4 +62,21 @@ rm -rf $TIZEN_TMP_DIR echo ">>Start configuring Tizen rootfs" ln -sfn asm-${LINK_ARCH} ./usr/include/asm patch -p1 < $__TIZEN_CROSSDIR/tizen.patch +if [[ "$TIZEN_ARCH" == "riscv64" ]]; then + echo "Fixing broken symlinks in $PWD" + rm ./usr/lib64/libresolv.so + ln -s ../../lib64/libresolv.so.2 ./usr/lib64/libresolv.so + rm ./usr/lib64/libpthread.so + ln -s ../../lib64/libpthread.so.0 ./usr/lib64/libpthread.so + rm ./usr/lib64/libdl.so + ln -s ../../lib64/libdl.so.2 ./usr/lib64/libdl.so + rm ./usr/lib64/libutil.so + ln -s ../../lib64/libutil.so.1 ./usr/lib64/libutil.so + rm ./usr/lib64/libm.so + ln -s ../../lib64/libm.so.6 ./usr/lib64/libm.so + rm ./usr/lib64/librt.so + ln -s ../../lib64/librt.so.1 ./usr/lib64/librt.so + rm ./lib/ld-linux-riscv64-lp64d.so.1 + ln -s ../lib64/ld-linux-riscv64-lp64d.so.1 ./lib/ld-linux-riscv64-lp64d.so.1 +fi echo "<:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>) + add_link_options($<$:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>) +endif() + +option(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC "Statically link against the C++ standard library" OFF) +if(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC) + add_link_options($<$:-static-libstdc++>) +endif() + +set(CLR_CMAKE_CXX_ABI_LIBRARY "" CACHE STRING "C++ ABI implementation library to link against. Only supported with the Clang compiler.") +if (CLR_CMAKE_CXX_ABI_LIBRARY) + # The user may specify the ABI library with the 'lib' prefix, like 'libstdc++'. Strip the prefix here so the linker finds the right library. + string(REGEX REPLACE "^lib(.+)" "\\1" CLR_CMAKE_CXX_ABI_LIBRARY ${CLR_CMAKE_CXX_ABI_LIBRARY}) + # We need to specify this as a linker-backend option as Clang will filter this option out when linking to libc++. + add_link_options("LINKER:-l${CLR_CMAKE_CXX_ABI_LIBRARY}") +endif() + set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) diff --git a/eng/common/helixpublish.proj b/eng/common/helixpublish.proj index d7f185856e791..c1323bf412105 100644 --- a/eng/common/helixpublish.proj +++ b/eng/common/helixpublish.proj @@ -1,3 +1,4 @@ + diff --git a/eng/common/internal/Directory.Build.props b/eng/common/internal/Directory.Build.props index dbf99d82a5c2e..a735fe9a133ca 100644 --- a/eng/common/internal/Directory.Build.props +++ b/eng/common/internal/Directory.Build.props @@ -1,4 +1,6 @@ + + diff --git a/eng/common/internal/Tools.csproj b/eng/common/internal/Tools.csproj index 7f5ce6d608133..8fa77e5b181f8 100644 --- a/eng/common/internal/Tools.csproj +++ b/eng/common/internal/Tools.csproj @@ -1,5 +1,6 @@ + net472 false @@ -27,4 +28,5 @@ + diff --git a/eng/common/native/init-compiler.sh b/eng/common/native/init-compiler.sh index 2d5660642b8d4..ccd3a17268e24 100644 --- a/eng/common/native/init-compiler.sh +++ b/eng/common/native/init-compiler.sh @@ -64,7 +64,7 @@ if [ -z "$CLR_CC" ]; then if [ -z "$majorVersion" ]; then # note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero. if [ "$compiler" = "clang" ]; then versions="18 17 16 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5" - elif [ "$compiler" = "gcc" ]; then versions="13 12 11 10 9 8 7 6 5 4.9"; fi + elif [ "$compiler" = "gcc" ]; then versions="14 13 12 11 10 9 8 7 6 5 4.9"; fi for version in $versions; do _major="${version%%.*}" @@ -125,8 +125,8 @@ if [ -z "$CC" ]; then exit 1 fi -# Only lld version >= 9 can be considered stable. lld doesn't support s390x. -if [ "$compiler" = "clang" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -ge 9 ] && [ "$build_arch" != "s390x" ]; then +# Only lld version >= 9 can be considered stable. lld supports s390x starting from 18.0. +if [ "$compiler" = "clang" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -ge 9 ] && ([ "$build_arch" != "s390x" ] || [ "$majorVersion" -ge 18 ]); then if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then LDFLAGS="-fuse-ld=lld" fi diff --git a/eng/common/native/init-distro-rid.sh b/eng/common/native/init-distro-rid.sh index de1687b2ccbe7..83ea7aab0e081 100644 --- a/eng/common/native/init-distro-rid.sh +++ b/eng/common/native/init-distro-rid.sh @@ -1,4 +1,4 @@ -#!/usr/bin/env bash +#!/bin/sh # getNonPortableDistroRid # @@ -11,21 +11,16 @@ # non-portable rid getNonPortableDistroRid() { - local targetOs="$1" - local targetArch="$2" - local rootfsDir="$3" - local nonPortableRid="" + targetOs="$1" + targetArch="$2" + rootfsDir="$3" + nonPortableRid="" if [ "$targetOs" = "linux" ]; then + # shellcheck disable=SC1091 if [ -e "${rootfsDir}/etc/os-release" ]; then - source "${rootfsDir}/etc/os-release" - - if [[ "${ID}" == "rhel" || "${ID}" == "rocky" || "${ID}" == "alpine" ]]; then - # remove the last version digit - VERSION_ID="${VERSION_ID%.*}" - fi - - if [[ "${VERSION_ID:-}" =~ ^([[:digit:]]|\.)+$ ]]; then + . "${rootfsDir}/etc/os-release" + if echo "${VERSION_ID:-}" | grep -qE '^([[:digit:]]|\.)+$'; then nonPortableRid="${ID}.${VERSION_ID}-${targetArch}" else # Rolling release distros either do not set VERSION_ID, set it as blank or @@ -33,45 +28,33 @@ getNonPortableDistroRid() # so omit it here to be consistent with everything else. nonPortableRid="${ID}-${targetArch}" fi - elif [ -e "${rootfsDir}/android_platform" ]; then - source "$rootfsDir"/android_platform + # shellcheck disable=SC1091 + . "${rootfsDir}/android_platform" nonPortableRid="$RID" fi fi if [ "$targetOs" = "freebsd" ]; then - # $rootfsDir can be empty. freebsd-version is shell script and it should always work. - __freebsd_major_version=$($rootfsDir/bin/freebsd-version | { read v; echo "${v%%.*}"; }) + # $rootfsDir can be empty. freebsd-version is a shell script and should always work. + __freebsd_major_version=$("$rootfsDir"/bin/freebsd-version | cut -d'.' -f1) nonPortableRid="freebsd.$__freebsd_major_version-${targetArch}" - elif command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then + elif command -v getprop >/dev/null && getprop ro.product.system.model | grep -qi android; then __android_sdk_version=$(getprop ro.build.version.sdk) nonPortableRid="android.$__android_sdk_version-${targetArch}" elif [ "$targetOs" = "illumos" ]; then __uname_version=$(uname -v) - case "$__uname_version" in - omnios-*) - __omnios_major_version=$(echo "${__uname_version:8:2}") - nonPortableRid=omnios."$__omnios_major_version"-"$targetArch" - ;; - joyent_*) - __smartos_major_version=$(echo "${__uname_version:7:4}") - nonPortableRid=smartos."$__smartos_major_version"-"$targetArch" - ;; - illumos_*) - nonPortableRid=openindiana-"$targetArch" - ;; - esac + nonPortableRid="illumos-${targetArch}" elif [ "$targetOs" = "solaris" ]; then __uname_version=$(uname -v) - __solaris_major_version=$(echo "${__uname_version%.*}") - nonPortableRid=solaris."$__solaris_major_version"-"$targetArch" + __solaris_major_version=$(echo "$__uname_version" | cut -d'.' -f1) + nonPortableRid="solaris.$__solaris_major_version-${targetArch}" elif [ "$targetOs" = "haiku" ]; then - __uname_release=$(uname -r) + __uname_release="$(uname -r)" nonPortableRid=haiku.r"$__uname_release"-"$targetArch" fi - echo "$(echo $nonPortableRid | tr '[:upper:]' '[:lower:]')" + echo "$nonPortableRid" | tr '[:upper:]' '[:lower:]' } # initDistroRidGlobal @@ -85,26 +68,23 @@ getNonPortableDistroRid() # None # # Notes: -# -# It is important to note that the function does not return anything, but it -# exports the following variables on success: -# -# __DistroRid : Non-portable rid of the target platform. -# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform. -# +# It is important to note that the function does not return anything, but it +# exports the following variables on success: +# __DistroRid : Non-portable rid of the target platform. +# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform. initDistroRidGlobal() { - local targetOs="$1" - local targetArch="$2" - local rootfsDir="" - if [ "$#" -ge 3 ]; then + targetOs="$1" + targetArch="$2" + rootfsDir="" + if [ $# -ge 3 ]; then rootfsDir="$3" fi if [ -n "${rootfsDir}" ]; then # We may have a cross build. Check for the existence of the rootfsDir if [ ! -e "${rootfsDir}" ]; then - echo "Error rootfsDir has been passed, but the location is not valid." + echo "Error: rootfsDir has been passed, but the location is not valid." exit 1 fi fi @@ -119,7 +99,7 @@ initDistroRidGlobal() STRINGS="$(command -v llvm-strings || true)" fi - # Check for musl-based distros (e.g Alpine Linux, Void Linux). + # Check for musl-based distros (e.g. Alpine Linux, Void Linux). if "${rootfsDir}/usr/bin/ldd" --version 2>&1 | grep -q musl || ( [ -n "$STRINGS" ] && "$STRINGS" "${rootfsDir}/usr/bin/ldd" 2>&1 | grep -q musl ); then __PortableTargetOS="linux-musl" diff --git a/eng/common/native/init-os-and-arch.sh b/eng/common/native/init-os-and-arch.sh index e693617a6c2b6..38921d4338f74 100644 --- a/eng/common/native/init-os-and-arch.sh +++ b/eng/common/native/init-os-and-arch.sh @@ -1,4 +1,4 @@ -#!/usr/bin/env bash +#!/bin/sh # Use uname to determine what the OS is. OSName=$(uname -s | tr '[:upper:]' '[:lower:]') @@ -35,6 +35,10 @@ fi case "$CPUName" in arm64|aarch64) arch=arm64 + if [ "$(getconf LONG_BIT)" -lt 64 ]; then + # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS) + arch=arm + fi ;; loongarch64) @@ -50,6 +54,7 @@ case "$CPUName" in ;; armv7l|armv8l) + # shellcheck disable=SC1091 if (NAME=""; . /etc/os-release; test "$NAME" = "Tizen"); then arch=armel else diff --git a/eng/common/post-build/check-channel-consistency.ps1 b/eng/common/post-build/check-channel-consistency.ps1 index 63f3464c986a7..1728f035a93ed 100644 --- a/eng/common/post-build/check-channel-consistency.ps1 +++ b/eng/common/post-build/check-channel-consistency.ps1 @@ -7,7 +7,7 @@ try { . $PSScriptRoot\post-build-utils.ps1 if ($PromoteToChannels -eq "") { - Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info." + Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/main/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info." ExitWithExitCode 0 } diff --git a/eng/common/post-build/redact-logs.ps1 b/eng/common/post-build/redact-logs.ps1 new file mode 100644 index 0000000000000..82d91f6fd0226 --- /dev/null +++ b/eng/common/post-build/redact-logs.ps1 @@ -0,0 +1,81 @@ +[CmdletBinding(PositionalBinding=$False)] +param( + [Parameter(Mandatory=$true, Position=0)][string] $InputPath, + [Parameter(Mandatory=$true)][string] $BinlogToolVersion, + [Parameter(Mandatory=$false)][string] $DotnetPath, + [Parameter(Mandatory=$false)][string] $PackageFeed = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json', + # File with strings to redact - separated by newlines. + # For comments start the line with '# ' - such lines are ignored + [Parameter(Mandatory=$false)][string] $TokensFilePath, + [Parameter(ValueFromRemainingArguments=$true)][String[]]$TokensToRedact +) + +try { + . $PSScriptRoot\post-build-utils.ps1 + + $packageName = 'binlogtool' + + $dotnet = $DotnetPath + + if (!$dotnet) { + $dotnetRoot = InitializeDotNetCli -install:$true + $dotnet = "$dotnetRoot\dotnet.exe" + } + + $toolList = & "$dotnet" tool list -g + + if ($toolList -like "*$packageName*") { + & "$dotnet" tool uninstall $packageName -g + } + + $toolPath = "$PSScriptRoot\..\..\..\.tools" + $verbosity = 'minimal' + + New-Item -ItemType Directory -Force -Path $toolPath + + Push-Location -Path $toolPath + + try { + Write-Host "Installing Binlog redactor CLI..." + Write-Host "'$dotnet' new tool-manifest" + & "$dotnet" new tool-manifest + Write-Host "'$dotnet' tool install $packageName --local --add-source '$PackageFeed' -v $verbosity --version $BinlogToolVersion" + & "$dotnet" tool install $packageName --local --add-source "$PackageFeed" -v $verbosity --version $BinlogToolVersion + + if (Test-Path $TokensFilePath) { + Write-Host "Adding additional sensitive data for redaction from file: " $TokensFilePath + $TokensToRedact += Get-Content -Path $TokensFilePath | Foreach {$_.Trim()} | Where { $_ -notmatch "^# " } + } + + $optionalParams = [System.Collections.ArrayList]::new() + + Foreach ($p in $TokensToRedact) + { + if($p -match '^\$\(.*\)$') + { + Write-Host ("Ignoring token {0} as it is probably unexpanded AzDO variable" -f $p) + } + elseif($p) + { + $optionalParams.Add("-p:" + $p) | Out-Null + } + } + + & $dotnet binlogtool redact --input:$InputPath --recurse --in-place ` + @optionalParams + + if ($LastExitCode -ne 0) { + Write-PipelineTelemetryError -Category 'Redactor' -Type 'warning' -Message "Problems using Redactor tool (exit code: $LastExitCode). But ignoring them now." + } + } + finally { + Pop-Location + } + + Write-Host 'done.' +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Category 'Redactor' -Message "There was an error while trying to redact logs. Error: $_" + ExitWithExitCode 1 +} diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1 index 73828dd30d317..aab40de3fd9ac 100644 --- a/eng/common/sdk-task.ps1 +++ b/eng/common/sdk-task.ps1 @@ -64,7 +64,7 @@ try { $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty } if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) { - $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.8.1-2" -MemberType NoteProperty + $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.10.0-pre.4.0" -MemberType NoteProperty } if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") { $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true diff --git a/eng/common/sdl/trim-assets-version.ps1 b/eng/common/sdl/trim-assets-version.ps1 index a2e0048770452..0daa2a9e94628 100644 --- a/eng/common/sdl/trim-assets-version.ps1 +++ b/eng/common/sdl/trim-assets-version.ps1 @@ -72,4 +72,4 @@ catch { Write-Host $_ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ ExitWithExitCode 1 -} \ No newline at end of file +} diff --git a/eng/common/template-guidance.md b/eng/common/template-guidance.md new file mode 100644 index 0000000000000..c114bc28dcb95 --- /dev/null +++ b/eng/common/template-guidance.md @@ -0,0 +1,137 @@ +# Overview + +Arcade provides templates for public (`/templates`) and 1ES pipeline templates (`/templates-official`) scenarios. Pipelines which are required to be managed by 1ES pipeline templates should reference `/templates-offical`, all other pipelines may reference `/templates`. + +## How to use + +Basic guidance is: + +- 1ES Pipeline Template or 1ES Microbuild template runs should reference `eng/common/templates-official`. Any internal production-graded pipeline should use these templates. + +- All other runs should reference `eng/common/templates`. + +See [azure-pipelines.yml](../../azure-pipelines.yml) (templates-official example) or [azure-pipelines-pr.yml](../../azure-pipelines-pr.yml) (templates example) for examples. + +#### The `templateIs1ESManaged` parameter + +The `templateIs1ESManaged` is available on most templates and affects which of the variants is used for nested templates. See [Development Notes](#development-notes) below for more information on the `templateIs1ESManaged1 parameter. + +- For templates under `job/`, `jobs/`, `steps`, or `post-build/`, this parameter must be explicitly set. + +## Multiple outputs + +1ES pipeline templates impose a policy where every publish artifact execution results in additional security scans being injected into your pipeline. When using `templates-official/jobs/jobs.yml`, Arcade reduces the number of additional security injections by gathering all publishing outputs into the [Build.ArtifactStagingDirectory](https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#build-variables-devops-services), and utilizing the [outputParentDirectory](https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/outputs#multiple-outputs) feature of 1ES pipeline templates. When implementing your pipeline, if you ensure publish artifacts are located in the `$(Build.ArtifactStagingDirectory)`, and utilize the 1ES provided template context, then you can reduce the number of security scans for your pipeline. + +Example: +``` yaml +# azure-pipelines.yml +extends: + template: azure-pipelines/MicroBuild.1ES.Official.yml@MicroBuildTemplate + parameters: + stages: + - stage: build + jobs: + - template: /eng/common/templates-official/jobs/jobs.yml@self + parameters: + # 1ES makes use of outputs to reduce security task injection overhead + templateContext: + outputs: + - output: pipelineArtifact + displayName: 'Publish logs from source' + continueOnError: true + condition: always() + targetPath: $(Build.ArtifactStagingDirectory)/artifacts/log + artifactName: Logs + jobs: + - job: Windows + steps: + - script: echo "friendly neighborhood" > artifacts/marvel/spiderman.txt + # copy build outputs to artifact staging directory for publishing + - task: CopyFiles@2 + displayName: Gather build output + inputs: + SourceFolder: '$(Build.SourcesDirectory)/artifacts/marvel' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/marvel' +``` + +Note: Multiple outputs are ONLY applicable to 1ES PT publishing (only usable when referencing `templates-official`). + +# Development notes + +**Folder / file structure** + +``` text +eng\common\ + [templates || templates-official]\ + job\ + job.yml (shim + artifact publishing logic) + onelocbuild.yml (shim) + publish-build-assets.yml (shim) + source-build.yml (shim) + source-index-stage1.yml (shim) + jobs\ + codeql-build.yml (shim) + jobs.yml (shim) + source-build.yml (shim) + post-build\ + post-build.yml (shim) + trigger-subscription.yml (shim) + common-variabls.yml (shim) + setup-maestro-vars.yml (shim) + steps\ + publish-build-artifacts.yml (logic) + publish-pipeline-artifacts.yml (logic) + add-build-channel.yml (shim) + component-governance.yml (shim) + generate-sbom.yml (shim) + publish-logs.yml (shim) + retain-build.yml (shim) + send-to-helix.yml (shim) + source-build.yml (shim) + variables\ + pool-providers.yml (logic + redirect) # templates/variables/pool-providers.yml will redirect to templates-official/variables/pool-providers.yml if you are running in the internal project + sdl-variables.yml (logic) + core-templates\ + job\ + job.yml (logic) + onelocbuild.yml (logic) + publish-build-assets.yml (logic) + source-build.yml (logic) + source-index-stage1.yml (logic) + jobs\ + codeql-build.yml (logic) + jobs.yml (logic) + source-build.yml (logic) + post-build\ + common-variabls.yml (logic) + post-build.yml (logic) + setup-maestro-vars.yml (logic) + trigger-subscription.yml (logic) + steps\ + add-build-to-channel.yml (logic) + component-governance.yml (logic) + generate-sbom.yml (logic) + publish-build-artifacts.yml (redirect) + publish-logs.yml (logic) + publish-pipeline-artifacts.yml (redirect) + retain-build.yml (logic) + send-to-helix.yml (logic) + source-build.yml (logic) + variables\ + pool-providers.yml (redirect) +``` + +In the table above, a file is designated as "shim", "logic", or "redirect". + +- shim - represents a yaml file which is an intermediate step between pipeline logic and .Net Core Engineering's templates (`core-templates`) and defines the `is1ESPipeline` parameter value. + +- logic - represents actual base template logic. + +- redirect- represents a file in `core-templates` which redirects to the "logic" file in either `templates` or `templates-official`. + +Logic for Arcade's templates live **primarily** in the `core-templates` folder. The exceptions to the location of the logic files are around artifact publishing, which is handled differently between 1es pipeline templates and standard templates. `templates` and `templates-official` provide shim entry points which redirect to `core-templates` while also defining the `is1ESPipeline` parameter. If a shim is referenced in `templates`, then `is1ESPipeline` is set to `false`. If a shim is referenced in `templates-official`, then `is1ESPipeline` is set to `true`. + +Within `templates` and `templates-official`, the templates at the "stages", and "jobs" / "job" level have been replaced with shims. Templates at the "steps" and "variables" level are typically too granular to be replaced with shims and instead persist logic which is directly applicable to either scenario. + +Within `core-templates`, there are a handful of places where logic is dependent on which shim entry point was used. In those places, we redirect back to the respective logic file in `templates` or `templates-official`. diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml index 1f035fee73f4a..4724e9aaa8091 100644 --- a/eng/common/templates-official/job/job.yml +++ b/eng/common/templates-official/job/job.yml @@ -1,264 +1,62 @@ -# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, -# and some (Microbuild) should only be applied to non-PR cases for internal builds. - -parameters: -# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job - cancelTimeoutInMinutes: '' - condition: '' - container: '' - continueOnError: false - dependsOn: '' - displayName: '' - pool: '' - steps: [] - strategy: '' - timeoutInMinutes: '' - variables: [] - workspace: '' - templateContext: '' - -# Job base template specific parameters - # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md - artifacts: '' - enableMicrobuild: false - enablePublishBuildArtifacts: false - enablePublishBuildAssets: false - enablePublishTestResults: false - enablePublishUsingPipelines: false - enableBuildRetry: false - disableComponentGovernance: '' - componentGovernanceIgnoreDirectories: '' - mergeTestResults: false - testRunTitle: '' - testResultsFormat: '' - name: '' - preSteps: [] - runAsPublic: false -# Sbom related params - enableSbom: true - PackageVersion: 7.0.0 - BuildDropPath: '$(Build.SourcesDirectory)/artifacts' - jobs: -- job: ${{ parameters.name }} - - ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}: - cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }} - - ${{ if ne(parameters.condition, '') }}: - condition: ${{ parameters.condition }} - - ${{ if ne(parameters.container, '') }}: - container: ${{ parameters.container }} - - ${{ if ne(parameters.continueOnError, '') }}: - continueOnError: ${{ parameters.continueOnError }} - - ${{ if ne(parameters.dependsOn, '') }}: - dependsOn: ${{ parameters.dependsOn }} - - ${{ if ne(parameters.displayName, '') }}: - displayName: ${{ parameters.displayName }} - - ${{ if ne(parameters.pool, '') }}: - pool: ${{ parameters.pool }} - - ${{ if ne(parameters.strategy, '') }}: - strategy: ${{ parameters.strategy }} - - ${{ if ne(parameters.timeoutInMinutes, '') }}: - timeoutInMinutes: ${{ parameters.timeoutInMinutes }} - - ${{ if ne(parameters.templateContext, '') }}: - templateContext: ${{ parameters.templateContext }} - - variables: - - ${{ if ne(parameters.enableTelemetry, 'false') }}: - - name: DOTNET_CLI_TELEMETRY_PROFILE - value: '$(Build.Repository.Uri)' - - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}: - - name: EnableRichCodeNavigation - value: 'true' - # Retry signature validation up to three times, waiting 2 seconds between attempts. - # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures - - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY - value: 3,2000 - - ${{ each variable in parameters.variables }}: - # handle name-value variable syntax - # example: - # - name: [key] - # value: [value] - - ${{ if ne(variable.name, '') }}: - - name: ${{ variable.name }} - value: ${{ variable.value }} - - # handle variable groups - - ${{ if ne(variable.group, '') }}: - - group: ${{ variable.group }} - - # handle template variable syntax - # example: - # - template: path/to/template.yml - # parameters: - # [key]: [value] - - ${{ if ne(variable.template, '') }}: - - template: ${{ variable.template }} - ${{ if ne(variable.parameters, '') }}: - parameters: ${{ variable.parameters }} - - # handle key-value variable syntax. - # example: - # - [key]: [value] - - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}: - - ${{ each pair in variable }}: - - name: ${{ pair.key }} - value: ${{ pair.value }} - - # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds - - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - group: DotNet-HelixApi-Access - - ${{ if ne(parameters.workspace, '') }}: - workspace: ${{ parameters.workspace }} - - steps: - - ${{ if ne(parameters.preSteps, '') }}: - - ${{ each preStep in parameters.preSteps }}: - - ${{ preStep }} - - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - - task: MicroBuildSigningPlugin@4 - displayName: Install MicroBuild plugin - inputs: - signType: $(_SignType) - zipSources: false - feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json - env: - TeamName: $(_TeamName) - MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)' - continueOnError: ${{ parameters.continueOnError }} - condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) - - - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}: - - task: NuGetAuthenticate@1 - - - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}: - - task: DownloadPipelineArtifact@2 - inputs: - buildType: current - artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }} - targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }} - itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }} - - - ${{ each step in parameters.steps }}: - - ${{ step }} - - - ${{ if eq(parameters.enableRichCodeNavigation, true) }}: - - task: RichCodeNavIndexer@0 - displayName: RichCodeNav Upload - inputs: - languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }} - environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }} - richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin - uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }} - continueOnError: true - - - template: /eng/common/templates-official/steps/component-governance.yml - parameters: - ${{ if eq(parameters.disableComponentGovernance, '') }}: - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}: - disableComponentGovernance: false - ${{ else }}: - disableComponentGovernance: true - ${{ else }}: - disableComponentGovernance: ${{ parameters.disableComponentGovernance }} - componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} - - - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: MicroBuildCleanup@1 - displayName: Execute Microbuild cleanup tasks - condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) - continueOnError: ${{ parameters.continueOnError }} - env: - TeamName: $(_TeamName) - - - ${{ if ne(parameters.artifacts.publish, '') }}: - - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: - - task: CopyFiles@2 - displayName: Gather binaries for publish to artifacts - inputs: - SourceFolder: 'artifacts/bin' - Contents: '**' - TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin' - - task: CopyFiles@2 - displayName: Gather packages for publish to artifacts - inputs: - SourceFolder: 'artifacts/packages' - Contents: '**' - TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages' - - task: 1ES.PublishBuildArtifacts@1 - displayName: Publish pipeline artifacts - inputs: - PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts' - PublishLocation: Container - ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} - continueOnError: true - condition: always() - - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: 'artifacts/log' - artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }} - displayName: 'Publish logs' - continueOnError: true - condition: always() - - - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}: - - task: 1ES.PublishBuildArtifacts@1 - displayName: Publish Logs - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)' - PublishLocation: Container - ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }} - continueOnError: true - condition: always() - - - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}: - - task: PublishTestResults@2 - displayName: Publish XUnit Test Results - inputs: - testResultsFormat: 'xUnit' - testResultsFiles: '*.xml' - searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' - testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit - mergeTestResults: ${{ parameters.mergeTestResults }} - continueOnError: true - condition: always() - - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}: - - task: PublishTestResults@2 - displayName: Publish TRX Test Results - inputs: - testResultsFormat: 'VSTest' - testResultsFiles: '*.trx' - searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' - testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx - mergeTestResults: ${{ parameters.mergeTestResults }} - continueOnError: true - condition: always() - - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}: - - template: /eng/common/templates-official/steps/generate-sbom.yml - parameters: - PackageVersion: ${{ parameters.packageVersion}} - BuildDropPath: ${{ parameters.buildDropPath }} - IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} - - - ${{ if eq(parameters.enableBuildRetry, 'true') }}: - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration' - artifactName: 'BuildConfiguration' - displayName: 'Publish build retry configuration' - continueOnError: true \ No newline at end of file +- template: /eng/common/core-templates/job/job.yml + parameters: + is1ESPipeline: true + + # publish artifacts + # for 1ES managed templates, use the templateContext.output to handle multiple outputs. + templateContext: + outputParentDirectory: $(Build.ArtifactStagingDirectory) + outputs: + - ${{ if ne(parameters.artifacts.publish, '') }}: + - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: + - output: buildArtifacts + displayName: Publish pipeline artifacts + PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts' + ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} + condition: always() + continueOnError: true + - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: + - output: pipelineArtifact + targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log' + artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)_Attempt$(System.JobAttempt)') }} + displayName: 'Publish logs' + continueOnError: true + condition: always() + + - ${{ if eq(parameters.enablePublishBuildArtifacts, true) }}: + - output: buildArtifacts + displayName: Publish Logs + PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)' + publishLocation: Container + ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }} + continueOnError: true + condition: always() + + - ${{ if eq(parameters.enableBuildRetry, 'true') }}: + - output: pipelineArtifact + targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/eng/common/BuildConfiguration' + artifactName: 'BuildConfiguration' + displayName: 'Publish build retry configuration' + continueOnError: true + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}: + - output: pipelineArtifact + displayName: Publish SBOM manifest + continueOnError: true + targetPath: $(Build.ArtifactStagingDirectory)/sbom + artifactName: $(ARTIFACT_NAME) + + # add any outputs provided via root yaml + - ${{ if ne(parameters.templateContext.outputs, '') }}: + - ${{ each output in parameters.templateContext.outputs }}: + - ${{ output }} + + # add any remaining templateContext properties + ${{ each context in parameters.templateContext }}: + ${{ if and(ne(context.key, 'outputParentDirectory'), ne(context.key, 'outputs')) }}: + ${{ context.key }}: ${{ context.value }} + + ${{ each parameter in parameters }}: + ${{ if and(ne(parameter.key, 'templateContext'), ne(parameter.key, 'is1ESPipeline')) }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/job/onelocbuild.yml b/eng/common/templates-official/job/onelocbuild.yml index 52b4d05d3f8dd..0f0c514b912df 100644 --- a/eng/common/templates-official/job/onelocbuild.yml +++ b/eng/common/templates-official/job/onelocbuild.yml @@ -1,112 +1,7 @@ -parameters: - # Optional: dependencies of the job - dependsOn: '' - - # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool - pool: '' - - CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex - GithubPat: $(BotAccount-dotnet-bot-repo-PAT) - - SourcesDirectory: $(Build.SourcesDirectory) - CreatePr: true - AutoCompletePr: false - ReusePr: true - UseLfLineEndings: true - UseCheckedInLocProjectJson: false - SkipLocProjectJsonGeneration: false - LanguageSet: VS_Main_Languages - LclSource: lclFilesInRepo - LclPackageId: '' - RepoType: gitHub - GitHubOrg: dotnet - MirrorRepo: '' - MirrorBranch: main - condition: '' - JobNameSuffix: '' - jobs: -- job: OneLocBuild${{ parameters.JobNameSuffix }} - - dependsOn: ${{ parameters.dependsOn }} - - displayName: OneLocBuild${{ parameters.JobNameSuffix }} - - variables: - - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat - - name: _GenerateLocProjectArguments - value: -SourcesDirectory ${{ parameters.SourcesDirectory }} - -LanguageSet "${{ parameters.LanguageSet }}" - -CreateNeutralXlfs - - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}: - - name: _GenerateLocProjectArguments - value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson - - template: /eng/common/templates-official/variables/pool-providers.yml - - ${{ if ne(parameters.pool, '') }}: - pool: ${{ parameters.pool }} - ${{ if eq(parameters.pool, '') }}: - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: AzurePipelines-EO - image: 1ESPT-Windows2022 - demands: Cmd - os: windows - # If it's not devdiv, it's dnceng - ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: - name: $(DncEngInternalBuildPool) - image: 1es-windows-2022 - os: windows - - steps: - - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}: - - task: Powershell@2 - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1 - arguments: $(_GenerateLocProjectArguments) - displayName: Generate LocProject.json - condition: ${{ parameters.condition }} - - - task: OneLocBuild@2 - displayName: OneLocBuild - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - inputs: - locProj: eng/Localize/LocProject.json - outDir: $(Build.ArtifactStagingDirectory) - lclSource: ${{ parameters.LclSource }} - lclPackageId: ${{ parameters.LclPackageId }} - isCreatePrSelected: ${{ parameters.CreatePr }} - isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }} - ${{ if eq(parameters.CreatePr, true) }}: - isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }} - ${{ if eq(parameters.RepoType, 'gitHub') }}: - isShouldReusePrSelected: ${{ parameters.ReusePr }} - packageSourceAuth: patAuth - patVariable: ${{ parameters.CeapexPat }} - ${{ if eq(parameters.RepoType, 'gitHub') }}: - repoType: ${{ parameters.RepoType }} - gitHubPatVariable: "${{ parameters.GithubPat }}" - ${{ if ne(parameters.MirrorRepo, '') }}: - isMirrorRepoSelected: true - gitHubOrganization: ${{ parameters.GitHubOrg }} - mirrorRepo: ${{ parameters.MirrorRepo }} - mirrorBranch: ${{ parameters.MirrorBranch }} - condition: ${{ parameters.condition }} - - - task: 1ES.PublishBuildArtifacts@1 - displayName: Publish Localization Files - inputs: - PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc' - PublishLocation: Container - ArtifactName: Loc - condition: ${{ parameters.condition }} +- template: /eng/common/core-templates/job/onelocbuild.yml + parameters: + is1ESPipeline: true - - task: 1ES.PublishBuildArtifacts@1 - displayName: Publish LocProject.json - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/' - PublishLocation: Container - ArtifactName: Loc - condition: ${{ parameters.condition }} \ No newline at end of file + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/job/publish-build-assets.yml b/eng/common/templates-official/job/publish-build-assets.yml index 589ac80a18b74..d667a70e8de74 100644 --- a/eng/common/templates-official/job/publish-build-assets.yml +++ b/eng/common/templates-official/job/publish-build-assets.yml @@ -1,155 +1,7 @@ -parameters: - configuration: 'Debug' - - # Optional: condition for the job to run - condition: '' - - # Optional: 'true' if future jobs should run even if this job fails - continueOnError: false - - # Optional: dependencies of the job - dependsOn: '' - - # Optional: Include PublishBuildArtifacts task - enablePublishBuildArtifacts: false - - # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool - pool: {} - - # Optional: should run as a public build even in the internal project - # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. - runAsPublic: false - - # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing - publishUsingPipelines: false - - # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing - publishAssetsImmediately: false - - artifactsPublishingAdditionalParameters: '' - - signingValidationAdditionalParameters: '' - jobs: -- job: Asset_Registry_Publish - - dependsOn: ${{ parameters.dependsOn }} - timeoutInMinutes: 150 - - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: - displayName: Publish Assets - ${{ else }}: - displayName: Publish to Build Asset Registry - - variables: - - template: /eng/common/templates-official/variables/pool-providers.yml - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - group: Publish-Build-Assets - - group: AzureDevOps-Artifact-Feeds-Pats - - name: runCodesignValidationInjection - value: false - - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: - - template: /eng/common/templates-official/post-build/common-variables.yml - - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: AzurePipelines-EO - image: 1ESPT-Windows2022 - demands: Cmd - os: windows - # If it's not devdiv, it's dnceng - ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: - name: NetCore1ESPool-Publishing-Internal - image: windows.vs2019.amd64 - os: windows - steps: - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: DownloadBuildArtifacts@0 - displayName: Download artifact - inputs: - artifactName: AssetManifests - downloadPath: '$(Build.StagingDirectory)/Download' - checkDownloadedFiles: true - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - - - task: NuGetAuthenticate@1 - - - task: PowerShell@2 - displayName: Publish Build Assets - inputs: - filePath: eng\common\sdk-task.ps1 - arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet - /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests' - /p:BuildAssetRegistryToken=$(MaestroAccessToken) - /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com - /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }} - /p:OfficialBuildId=$(Build.BuildNumber) - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - - - task: powershell@2 - displayName: Create ReleaseConfigs Artifact - inputs: - targetType: inline - script: | - New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force - $filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt" - Add-Content -Path $filePath -Value $(BARBuildId) - Add-Content -Path $filePath -Value "$(DefaultChannels)" - Add-Content -Path $filePath -Value $(IsStableBuild) - - - task: 1ES.PublishBuildArtifacts@1 - displayName: Publish ReleaseConfigs Artifact - inputs: - PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs' - PublishLocation: Container - ArtifactName: ReleaseConfigs - - - task: powershell@2 - displayName: Check if SymbolPublishingExclusionsFile.txt exists - inputs: - targetType: inline - script: | - $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt" - if(Test-Path -Path $symbolExclusionfile) - { - Write-Host "SymbolExclusionFile exists" - Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true" - } - else{ - Write-Host "Symbols Exclusion file does not exists" - Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false" - } - - - task: 1ES.PublishBuildArtifacts@1 - displayName: Publish SymbolPublishingExclusionsFile Artifact - condition: eq(variables['SymbolExclusionFile'], 'true') - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt' - PublishLocation: Container - ArtifactName: ReleaseConfigs - - - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: - - template: /eng/common/templates-official/post-build/setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: PowerShell@2 - displayName: Publish Using Darc - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 - arguments: -BuildId $(BARBuildId) - -PublishingInfraVersion 3 - -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' - -MaestroToken '$(MaestroApiAccessToken)' - -WaitPublishingFinish true - -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' - -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' +- template: /eng/common/core-templates/job/publish-build-assets.yml + parameters: + is1ESPipeline: true - - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: - - template: /eng/common/templates-official/steps/publish-logs.yml - parameters: - JobLabel: 'Publish_Artifacts_Logs' + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/job/source-build.yml b/eng/common/templates-official/job/source-build.yml index f193dfbe23668..1a480034b678e 100644 --- a/eng/common/templates-official/job/source-build.yml +++ b/eng/common/templates-official/job/source-build.yml @@ -1,67 +1,7 @@ -parameters: - # This template adds arcade-powered source-build to CI. The template produces a server job with a - # default ID 'Source_Build_Complete' to put in a dependency list if necessary. - - # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed. - jobNamePrefix: 'Source_Build' - - # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for - # managed-only repositories. This is an object with these properties: - # - # name: '' - # The name of the job. This is included in the job ID. - # targetRID: '' - # The name of the target RID to use, instead of the one auto-detected by Arcade. - # nonPortable: false - # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than - # linux-x64), and compiling against distro-provided packages rather than portable ones. - # skipPublishValidation: false - # Disables publishing validation. By default, a check is performed to ensure no packages are - # published by source-build. - # container: '' - # A container to use. Runs in docker. - # pool: {} - # A pool to use. Runs directly on an agent. - # buildScript: '' - # Specifies the build script to invoke to perform the build in the repo. The default - # './build.sh' should work for typical Arcade repositories, but this is customizable for - # difficult situations. - # jobProperties: {} - # A list of job properties to inject at the top level, for potential extensibility beyond - # container and pool. - platform: {} - jobs: -- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }} - displayName: Source-Build (${{ parameters.platform.name }}) - - ${{ each property in parameters.platform.jobProperties }}: - ${{ property.key }}: ${{ property.value }} - - ${{ if ne(parameters.platform.container, '') }}: - container: ${{ parameters.platform.container }} - - ${{ if eq(parameters.platform.pool, '') }}: - # The default VM host AzDO pool. This should be capable of running Docker containers: almost all - # source-build builds run in Docker, including the default managed platform. - # /eng/common/templates-official/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic - pool: - ${{ if eq(variables['System.TeamProject'], 'public') }}: - name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')] - demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open - - ${{ if eq(variables['System.TeamProject'], 'internal') }}: - name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')] - image: 1es-mariner-2 - os: linux - - ${{ if ne(parameters.platform.pool, '') }}: - pool: ${{ parameters.platform.pool }} - - workspace: - clean: all +- template: /eng/common/core-templates/job/source-build.yml + parameters: + is1ESPipeline: true - steps: - - template: /eng/common/templates-official/steps/source-build.yml - parameters: - platform: ${{ parameters.platform }} + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/job/source-index-stage1.yml b/eng/common/templates-official/job/source-index-stage1.yml index f0513aee5b0da..6d5ead316f92b 100644 --- a/eng/common/templates-official/job/source-index-stage1.yml +++ b/eng/common/templates-official/job/source-index-stage1.yml @@ -1,68 +1,7 @@ -parameters: - runAsPublic: false - sourceIndexPackageVersion: 1.0.1-20230228.2 - sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json - sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci" - preSteps: [] - binlogPath: artifacts/log/Debug/Build.binlog - condition: '' - dependsOn: '' - pool: '' - jobs: -- job: SourceIndexStage1 - dependsOn: ${{ parameters.dependsOn }} - condition: ${{ parameters.condition }} - variables: - - name: SourceIndexPackageVersion - value: ${{ parameters.sourceIndexPackageVersion }} - - name: SourceIndexPackageSource - value: ${{ parameters.sourceIndexPackageSource }} - - name: BinlogPath - value: ${{ parameters.binlogPath }} - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - group: source-dot-net stage1 variables - - template: /eng/common/templates-official/variables/pool-providers.yml - - ${{ if ne(parameters.pool, '') }}: - pool: ${{ parameters.pool }} - ${{ if eq(parameters.pool, '') }}: - pool: - ${{ if eq(variables['System.TeamProject'], 'public') }}: - name: $(DncEngPublicBuildPool) - demands: ImageOverride -equals windows.vs2019.amd64.open - ${{ if eq(variables['System.TeamProject'], 'internal') }}: - name: $(DncEngInternalBuildPool) - image: windows.vs2022.amd64 - os: windows - - steps: - - ${{ each preStep in parameters.preSteps }}: - - ${{ preStep }} - - - task: UseDotNet@2 - displayName: Use .NET Core SDK 6 - inputs: - packageType: sdk - version: 6.0.x - installationPath: $(Agent.TempDirectory)/dotnet - workingDirectory: $(Agent.TempDirectory) - - - script: | - $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools - $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools - displayName: Download Tools - # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk. - workingDirectory: $(Agent.TempDirectory) - - - script: ${{ parameters.sourceIndexBuildCommand }} - displayName: Build Repository - - - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output - displayName: Process Binlog into indexable sln +- template: /eng/common/core-templates/job/source-index-stage1.yml + parameters: + is1ESPipeline: true - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) - displayName: Upload stage1 artifacts to source index - env: - BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url) + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/jobs/codeql-build.yml b/eng/common/templates-official/jobs/codeql-build.yml index b68d3c2f31990..a726322ecfe01 100644 --- a/eng/common/templates-official/jobs/codeql-build.yml +++ b/eng/common/templates-official/jobs/codeql-build.yml @@ -1,31 +1,7 @@ -parameters: - # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md - continueOnError: false - # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job - jobs: [] - # Optional: if specified, restore and use this version of Guardian instead of the default. - overrideGuardianVersion: '' - jobs: -- template: /eng/common/templates-official/jobs/jobs.yml +- template: /eng/common/core-templates/jobs/codeql-build.yml parameters: - enableMicrobuild: false - enablePublishBuildArtifacts: false - enablePublishTestResults: false - enablePublishBuildAssets: false - enablePublishUsingPipelines: false - enableTelemetry: true + is1ESPipeline: true - variables: - - group: Publish-Build-Assets - # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in - # sync with the packages.config file. - - name: DefaultGuardianVersion - value: 0.109.0 - - name: GuardianPackagesConfigFile - value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config - - name: GuardianVersion - value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} - - jobs: ${{ parameters.jobs }} - + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/jobs/jobs.yml b/eng/common/templates-official/jobs/jobs.yml index 857a0f8ba43e8..007deddaea0f5 100644 --- a/eng/common/templates-official/jobs/jobs.yml +++ b/eng/common/templates-official/jobs/jobs.yml @@ -1,97 +1,7 @@ -parameters: - # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md - continueOnError: false - - # Optional: Include PublishBuildArtifacts task - enablePublishBuildArtifacts: false - - # Optional: Enable publishing using release pipelines - enablePublishUsingPipelines: false - - # Optional: Enable running the source-build jobs to build repo from source - enableSourceBuild: false - - # Optional: Parameters for source-build template. - # See /eng/common/templates-official/jobs/source-build.yml for options - sourceBuildParameters: [] - - graphFileGeneration: - # Optional: Enable generating the graph files at the end of the build - enabled: false - # Optional: Include toolset dependencies in the generated graph files - includeToolset: false - - # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job - jobs: [] - - # Optional: Override automatically derived dependsOn value for "publish build assets" job - publishBuildAssetsDependsOn: '' - - # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage. - publishAssetsImmediately: false - - # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml) - artifactsPublishingAdditionalParameters: '' - signingValidationAdditionalParameters: '' - - # Optional: should run as a public build even in the internal project - # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. - runAsPublic: false - - enableSourceIndex: false - sourceIndexParams: {} - -# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, -# and some (Microbuild) should only be applied to non-PR cases for internal builds. - jobs: -- ${{ each job in parameters.jobs }}: - - template: ../job/job.yml - parameters: - # pass along parameters - ${{ each parameter in parameters }}: - ${{ if ne(parameter.key, 'jobs') }}: - ${{ parameter.key }}: ${{ parameter.value }} - - # pass along job properties - ${{ each property in job }}: - ${{ if ne(property.key, 'job') }}: - ${{ property.key }}: ${{ property.value }} - - name: ${{ job.job }} - -- ${{ if eq(parameters.enableSourceBuild, true) }}: - - template: /eng/common/templates-official/jobs/source-build.yml - parameters: - allCompletedJobId: Source_Build_Complete - ${{ each parameter in parameters.sourceBuildParameters }}: - ${{ parameter.key }}: ${{ parameter.value }} - -- ${{ if eq(parameters.enableSourceIndex, 'true') }}: - - template: ../job/source-index-stage1.yml - parameters: - runAsPublic: ${{ parameters.runAsPublic }} - ${{ each parameter in parameters.sourceIndexParams }}: - ${{ parameter.key }}: ${{ parameter.value }} - -- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}: - - template: ../job/publish-build-assets.yml - parameters: - continueOnError: ${{ parameters.continueOnError }} - dependsOn: - - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}: - - ${{ each job in parameters.publishBuildAssetsDependsOn }}: - - ${{ job.job }} - - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}: - - ${{ each job in parameters.jobs }}: - - ${{ job.job }} - - ${{ if eq(parameters.enableSourceBuild, true) }}: - - Source_Build_Complete +- template: /eng/common/core-templates/jobs/jobs.yml + parameters: + is1ESPipeline: true - runAsPublic: ${{ parameters.runAsPublic }} - publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }} - publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }} - enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }} + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/jobs/source-build.yml b/eng/common/templates-official/jobs/source-build.yml index 08e5db9bb1161..483e7b611f346 100644 --- a/eng/common/templates-official/jobs/source-build.yml +++ b/eng/common/templates-official/jobs/source-build.yml @@ -1,46 +1,7 @@ -parameters: - # This template adds arcade-powered source-build to CI. A job is created for each platform, as - # well as an optional server job that completes when all platform jobs complete. - - # The name of the "join" job for all source-build platforms. If set to empty string, the job is - # not included. Existing repo pipelines can use this job depend on all source-build jobs - # completing without maintaining a separate list of every single job ID: just depend on this one - # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'. - allCompletedJobId: '' - - # See /eng/common/templates-official/job/source-build.yml - jobNamePrefix: 'Source_Build' - - # This is the default platform provided by Arcade, intended for use by a managed-only repo. - defaultManagedPlatform: - name: 'Managed' - container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8' - - # Defines the platforms on which to run build jobs. One job is created for each platform, and the - # object in this array is sent to the job template as 'platform'. If no platforms are specified, - # one job runs on 'defaultManagedPlatform'. - platforms: [] - jobs: +- template: /eng/common/core-templates/jobs/source-build.yml + parameters: + is1ESPipeline: true -- ${{ if ne(parameters.allCompletedJobId, '') }}: - - job: ${{ parameters.allCompletedJobId }} - displayName: Source-Build Complete - pool: server - dependsOn: - - ${{ each platform in parameters.platforms }}: - - ${{ parameters.jobNamePrefix }}_${{ platform.name }} - - ${{ if eq(length(parameters.platforms), 0) }}: - - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }} - -- ${{ each platform in parameters.platforms }}: - - template: /eng/common/templates-official/job/source-build.yml - parameters: - jobNamePrefix: ${{ parameters.jobNamePrefix }} - platform: ${{ platform }} - -- ${{ if eq(length(parameters.platforms), 0) }}: - - template: /eng/common/templates-official/job/source-build.yml - parameters: - jobNamePrefix: ${{ parameters.jobNamePrefix }} - platform: ${{ parameters.defaultManagedPlatform }} + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates-official/post-build/common-variables.yml b/eng/common/templates-official/post-build/common-variables.yml index c24193acfc981..c32fc49233f8f 100644 --- a/eng/common/templates-official/post-build/common-variables.yml +++ b/eng/common/templates-official/post-build/common-variables.yml @@ -1,22 +1,8 @@ variables: - - group: Publish-Build-Assets +- template: /eng/common/core-templates/post-build/common-variables.yml + parameters: + # Specifies whether to use 1ES + is1ESPipeline: true - # Whether the build is internal or not - - name: IsInternalBuild - value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }} - - # Default Maestro++ API Endpoint and API Version - - name: MaestroApiEndPoint - value: "https://maestro-prod.westus2.cloudapp.azure.com" - - name: MaestroApiAccessToken - value: $(MaestroAccessToken) - - name: MaestroApiVersion - value: "2020-02-20" - - - name: SourceLinkCLIVersion - value: 3.0.0 - - name: SymbolToolVersion - value: 1.0.1 - - - name: runCodesignValidationInjection - value: false + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates-official/post-build/post-build.yml b/eng/common/templates-official/post-build/post-build.yml index da1f40958b450..2364c0fd4a527 100644 --- a/eng/common/templates-official/post-build/post-build.yml +++ b/eng/common/templates-official/post-build/post-build.yml @@ -1,285 +1,8 @@ -parameters: - # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST. - # Publishing V1 is no longer supported - # Publishing V2 is no longer supported - # Publishing V3 is the default - - name: publishingInfraVersion - displayName: Which version of publishing should be used to promote the build definition? - type: number - default: 3 - values: - - 3 - - - name: BARBuildId - displayName: BAR Build Id - type: number - default: 0 - - - name: PromoteToChannelIds - displayName: Channel to promote BARBuildId to - type: string - default: '' - - - name: enableSourceLinkValidation - displayName: Enable SourceLink validation - type: boolean - default: false - - - name: enableSigningValidation - displayName: Enable signing validation - type: boolean - default: true - - - name: enableSymbolValidation - displayName: Enable symbol validation - type: boolean - default: false - - - name: enableNugetValidation - displayName: Enable NuGet validation - type: boolean - default: true - - - name: publishInstallersAndChecksums - displayName: Publish installers and checksums - type: boolean - default: true - - - name: SDLValidationParameters - type: object - default: - enable: false - publishGdn: false - continueOnError: false - params: '' - artifactNames: '' - downloadArtifacts: true - - # These parameters let the user customize the call to sdk-task.ps1 for publishing - # symbols & general artifacts as well as for signing validation - - name: symbolPublishingAdditionalParameters - displayName: Symbol publishing additional parameters - type: string - default: '' - - - name: artifactsPublishingAdditionalParameters - displayName: Artifact publishing additional parameters - type: string - default: '' - - - name: signingValidationAdditionalParameters - displayName: Signing validation additional parameters - type: string - default: '' - - # Which stages should finish execution before post-build stages start - - name: validateDependsOn - type: object - default: - - build - - - name: publishDependsOn - type: object - default: - - Validate - - # Optional: Call asset publishing rather than running in a separate stage - - name: publishAssetsImmediately - type: boolean - default: false - stages: -- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: - - stage: Validate - dependsOn: ${{ parameters.validateDependsOn }} - displayName: Validate Build Assets - variables: - - template: common-variables.yml - - template: /eng/common/templates-official/variables/pool-providers.yml - jobs: - - job: - displayName: NuGet Validation - condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true')) - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: AzurePipelines-EO - image: 1ESPT-Windows2022 - demands: Cmd - os: windows - # If it's not devdiv, it's dnceng - ${{ else }}: - name: $(DncEngInternalBuildPool) - image: 1es-windows-2022 - os: windows - - steps: - - template: setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Package Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: PackageArtifacts - checkDownloadedFiles: true - - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1 - arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ - -ToolDestinationPath $(Agent.BuildDirectory)/Extract/ - - - job: - displayName: Signing Validation - condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true')) - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: AzurePipelines-EO - image: 1ESPT-Windows2022 - demands: Cmd - os: windows - # If it's not devdiv, it's dnceng - ${{ else }}: - name: $(DncEngInternalBuildPool) - image: 1es-windows-2022 - os: windows - steps: - - template: setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Package Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: PackageArtifacts - checkDownloadedFiles: true - itemPattern: | - ** - !**/Microsoft.SourceBuild.Intermediate.*.nupkg - - # This is necessary whenever we want to publish/restore to an AzDO private feed - # Since sdk-task.ps1 tries to restore packages we need to do this authentication here - # otherwise it'll complain about accessing a private feed. - - task: NuGetAuthenticate@1 - displayName: 'Authenticate to AzDO Feeds' - - # Signing validation will optionally work with the buildmanifest file which is downloaded from - # Azure DevOps above. - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: eng\common\sdk-task.ps1 - arguments: -task SigningValidation -restore -msbuildEngine vs - /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' - /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt' - ${{ parameters.signingValidationAdditionalParameters }} - - - template: ../steps/publish-logs.yml - parameters: - StageLabel: 'Validation' - JobLabel: 'Signing' - BinlogToolVersion: $(BinlogToolVersion) - - - job: - displayName: SourceLink Validation - condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true') - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: AzurePipelines-EO - image: 1ESPT-Windows2022 - demands: Cmd - os: windows - # If it's not devdiv, it's dnceng - ${{ else }}: - name: $(DncEngInternalBuildPool) - image: 1es-windows-2022 - os: windows - steps: - - template: setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Blob Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: BlobArtifacts - checkDownloadedFiles: true - - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1 - arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ - -ExtractPath $(Agent.BuildDirectory)/Extract/ - -GHRepoName $(Build.Repository.Name) - -GHCommit $(Build.SourceVersion) - -SourcelinkCliVersion $(SourceLinkCLIVersion) - continueOnError: true - -- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}: - - stage: publish_using_darc - ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: - dependsOn: ${{ parameters.publishDependsOn }} - ${{ else }}: - dependsOn: ${{ parameters.validateDependsOn }} - displayName: Publish using Darc - variables: - - template: common-variables.yml - - template: /eng/common/templates-official/variables/pool-providers.yml - jobs: - - job: - displayName: Publish Using Darc - timeoutInMinutes: 120 - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: AzurePipelines-EO - image: 1ESPT-Windows2022 - demands: Cmd - os: windows - # If it's not devdiv, it's dnceng - ${{ else }}: - name: NetCore1ESPool-Publishing-Internal - image: windows.vs2019.amd64 - os: windows - steps: - - template: setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: NuGetAuthenticate@1 +- template: /eng/common/core-templates/post-build/post-build.yml + parameters: + # Specifies whether to use 1ES + is1ESPipeline: true - - task: PowerShell@2 - displayName: Publish Using Darc - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 - arguments: -BuildId $(BARBuildId) - -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} - -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' - -MaestroToken '$(MaestroApiAccessToken)' - -WaitPublishingFinish true - -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' - -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/post-build/setup-maestro-vars.yml b/eng/common/templates-official/post-build/setup-maestro-vars.yml index 0c87f149a4ad7..024397d878645 100644 --- a/eng/common/templates-official/post-build/setup-maestro-vars.yml +++ b/eng/common/templates-official/post-build/setup-maestro-vars.yml @@ -1,70 +1,8 @@ -parameters: - BARBuildId: '' - PromoteToChannelIds: '' - steps: - - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}: - - task: DownloadBuildArtifacts@0 - displayName: Download Release Configs - inputs: - buildType: current - artifactName: ReleaseConfigs - checkDownloadedFiles: true - - - task: PowerShell@2 - name: setReleaseVars - displayName: Set Release Configs Vars - inputs: - targetType: inline - pwsh: true - script: | - try { - if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') { - $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt - - $BarId = $Content | Select -Index 0 - $Channels = $Content | Select -Index 1 - $IsStableBuild = $Content | Select -Index 2 - - $AzureDevOpsProject = $Env:System_TeamProject - $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId - $AzureDevOpsBuildId = $Env:Build_BuildId - } - else { - $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}" - - $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]' - $apiHeaders.Add('Accept', 'application/json') - $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}") - - $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" } - - $BarId = $Env:BARBuildId - $Channels = $Env:PromoteToMaestroChannels -split "," - $Channels = $Channels -join "][" - $Channels = "[$Channels]" - - $IsStableBuild = $buildInfo.stable - $AzureDevOpsProject = $buildInfo.azureDevOpsProject - $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId - $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId - } - - Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId" - Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels" - Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild" +- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + # Specifies whether to use 1ES + is1ESPipeline: true - Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject" - Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId" - Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId" - } - catch { - Write-Host $_ - Write-Host $_.Exception - Write-Host $_.ScriptStackTrace - exit 1 - } - env: - MAESTRO_API_TOKEN: $(MaestroApiAccessToken) - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }} + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates-official/steps/add-build-to-channel.yml b/eng/common/templates-official/steps/add-build-to-channel.yml index f67a210d62f3e..543dea8c6969a 100644 --- a/eng/common/templates-official/steps/add-build-to-channel.yml +++ b/eng/common/templates-official/steps/add-build-to-channel.yml @@ -1,13 +1,7 @@ -parameters: - ChannelId: 0 - steps: -- task: PowerShell@2 - displayName: Add Build to Channel - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1 - arguments: -BuildId $(BARBuildId) - -ChannelId ${{ parameters.ChannelId }} - -MaestroApiAccessToken $(MaestroApiAccessToken) - -MaestroApiEndPoint $(MaestroApiEndPoint) - -MaestroApiVersion $(MaestroApiVersion) +- template: /eng/common/core-templates/steps/add-build-to-channel.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/build-reason.yml b/eng/common/templates-official/steps/build-reason.yml deleted file mode 100644 index eba58109b52c9..0000000000000 --- a/eng/common/templates-official/steps/build-reason.yml +++ /dev/null @@ -1,12 +0,0 @@ -# build-reason.yml -# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons -# to include steps (',' separated). -parameters: - conditions: '' - steps: [] - -steps: - - ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}: - - ${{ parameters.steps }} - - ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}: - - ${{ parameters.steps }} diff --git a/eng/common/templates-official/steps/component-governance.yml b/eng/common/templates-official/steps/component-governance.yml index cbba0596709da..30bb3985ca2bf 100644 --- a/eng/common/templates-official/steps/component-governance.yml +++ b/eng/common/templates-official/steps/component-governance.yml @@ -1,13 +1,7 @@ -parameters: - disableComponentGovernance: false - componentGovernanceIgnoreDirectories: '' - steps: -- ${{ if eq(parameters.disableComponentGovernance, 'true') }}: - - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true" - displayName: Set skipComponentGovernanceDetection variable -- ${{ if ne(parameters.disableComponentGovernance, 'true') }}: - - task: ComponentGovernanceComponentDetection@0 - continueOnError: true - inputs: - ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} \ No newline at end of file +- template: /eng/common/core-templates/steps/component-governance.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/execute-codeql.yml b/eng/common/templates-official/steps/execute-codeql.yml deleted file mode 100644 index 9b4a5ffa30a78..0000000000000 --- a/eng/common/templates-official/steps/execute-codeql.yml +++ /dev/null @@ -1,32 +0,0 @@ -parameters: - # Language that should be analyzed. Defaults to csharp - language: csharp - # Build Commands - buildCommands: '' - overrideParameters: '' # Optional: to override values for parameters. - additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")' - # Optional: if specified, restore and use this version of Guardian instead of the default. - overrideGuardianVersion: '' - # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth - # diagnosis of problems with specific tool configurations. - publishGuardianDirectoryToPipeline: false - # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL - # parameters rather than relying on YAML. It may be better to use a local script, because you can - # reproduce results locally without piecing together a command based on the YAML. - executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1' - # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named - # 'continueOnError', the parameter value is not correctly picked up. - # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter - # optional: determines whether to continue the build if the step errors; - sdlContinueOnError: false - -steps: -- template: /eng/common/templates-official/steps/execute-sdl.yml - parameters: - overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }} - executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }} - overrideParameters: ${{ parameters.overrideParameters }} - additionalParameters: '${{ parameters.additionalParameters }} - -CodeQLAdditionalRunConfigParams @("BuildCommands < ${{ parameters.buildCommands }}", "Language < ${{ parameters.language }}")' - publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }} - sdlContinueOnError: ${{ parameters.sdlContinueOnError }} \ No newline at end of file diff --git a/eng/common/templates-official/steps/execute-sdl.yml b/eng/common/templates-official/steps/execute-sdl.yml deleted file mode 100644 index 07426fde05d82..0000000000000 --- a/eng/common/templates-official/steps/execute-sdl.yml +++ /dev/null @@ -1,88 +0,0 @@ -parameters: - overrideGuardianVersion: '' - executeAllSdlToolsScript: '' - overrideParameters: '' - additionalParameters: '' - publishGuardianDirectoryToPipeline: false - sdlContinueOnError: false - condition: '' - -steps: -- task: NuGetAuthenticate@1 - inputs: - nuGetServiceConnections: GuardianConnect - -- task: NuGetToolInstaller@1 - displayName: 'Install NuGet.exe' - -- ${{ if ne(parameters.overrideGuardianVersion, '') }}: - - pwsh: | - Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl - . .\sdl.ps1 - $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }} - Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation" - displayName: Install Guardian (Overridden) - -- ${{ if eq(parameters.overrideGuardianVersion, '') }}: - - pwsh: | - Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl - . .\sdl.ps1 - $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts - Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation" - displayName: Install Guardian - -- ${{ if ne(parameters.overrideParameters, '') }}: - - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }} - displayName: Execute SDL (Overridden) - continueOnError: ${{ parameters.sdlContinueOnError }} - condition: ${{ parameters.condition }} - -- ${{ if eq(parameters.overrideParameters, '') }}: - - powershell: ${{ parameters.executeAllSdlToolsScript }} - -GuardianCliLocation $(GuardianCliLocation) - -NugetPackageDirectory $(Build.SourcesDirectory)\.packages - -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw) - ${{ parameters.additionalParameters }} - displayName: Execute SDL - continueOnError: ${{ parameters.sdlContinueOnError }} - condition: ${{ parameters.condition }} - -- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}: - # We want to publish the Guardian results and configuration for easy diagnosis. However, the - # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default - # tooling files. Some of these files are large and aren't useful during an investigation, so - # exclude them by simply deleting them before publishing. (As of writing, there is no documented - # way to selectively exclude a dir from the pipeline artifact publish task.) - - task: DeleteFiles@1 - displayName: Delete Guardian dependencies to avoid uploading - inputs: - SourceFolder: $(Agent.BuildDirectory)/.gdn - Contents: | - c - i - condition: succeededOrFailed() - - - publish: $(Agent.BuildDirectory)/.gdn - artifact: GuardianConfiguration - displayName: Publish GuardianConfiguration - condition: succeededOrFailed() - - # Publish the SARIF files in a container named CodeAnalysisLogs to enable integration - # with the "SARIF SAST Scans Tab" Azure DevOps extension - - task: CopyFiles@2 - displayName: Copy SARIF files - inputs: - flattenFolders: true - sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/ - contents: '**/*.sarif' - targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs - condition: succeededOrFailed() - - # Use PublishBuildArtifacts because the SARIF extension only checks this case - # see microsoft/sarif-azuredevops-extension#4 - - task: PublishBuildArtifacts@1 - displayName: Publish SARIF files to CodeAnalysisLogs container - inputs: - pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs - artifactName: CodeAnalysisLogs - condition: succeededOrFailed() \ No newline at end of file diff --git a/eng/common/templates-official/steps/generate-sbom.yml b/eng/common/templates-official/steps/generate-sbom.yml index 1bf43bf807af3..9a89a4706d94e 100644 --- a/eng/common/templates-official/steps/generate-sbom.yml +++ b/eng/common/templates-official/steps/generate-sbom.yml @@ -1,48 +1,7 @@ -# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated. -# PackageName - The name of the package this SBOM represents. -# PackageVersion - The version of the package this SBOM represents. -# ManifestDirPath - The path of the directory where the generated manifest files will be placed -# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector. - -parameters: - PackageVersion: 8.0.0 - BuildDropPath: '$(Build.SourcesDirectory)/artifacts' - PackageName: '.NET' - ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom - IgnoreDirectories: '' - sbomContinueOnError: true - steps: -- task: PowerShell@2 - displayName: Prep for SBOM generation in (Non-linux) - condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin')) - inputs: - filePath: ./eng/common/generate-sbom-prep.ps1 - arguments: ${{parameters.manifestDirPath}} - -# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461 -- script: | - chmod +x ./eng/common/generate-sbom-prep.sh - ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}} - displayName: Prep for SBOM generation in (Linux) - condition: eq(variables['Agent.Os'], 'Linux') - continueOnError: ${{ parameters.sbomContinueOnError }} - -- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0 - displayName: 'Generate SBOM manifest' - continueOnError: ${{ parameters.sbomContinueOnError }} - inputs: - PackageName: ${{ parameters.packageName }} - BuildDropPath: ${{ parameters.buildDropPath }} - PackageVersion: ${{ parameters.packageVersion }} - ManifestDirPath: ${{ parameters.manifestDirPath }} - ${{ if ne(parameters.IgnoreDirectories, '') }}: - AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}' - -- task: 1ES.PublishPipelineArtifact@1 - displayName: Publish SBOM manifest - continueOnError: ${{parameters.sbomContinueOnError}} - inputs: - targetPath: '${{parameters.manifestDirPath}}' - artifactName: $(ARTIFACT_NAME) +- template: /eng/common/core-templates/steps/generate-sbom.yml + parameters: + is1ESPipeline: true + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/publish-build-artifacts.yml b/eng/common/templates-official/steps/publish-build-artifacts.yml new file mode 100644 index 0000000000000..100a3fc98493c --- /dev/null +++ b/eng/common/templates-official/steps/publish-build-artifacts.yml @@ -0,0 +1,41 @@ +parameters: +- name: displayName + type: string + default: 'Publish to Build Artifact' + +- name: condition + type: string + default: succeeded() + +- name: artifactName + type: string + +- name: pathToPublish + type: string + +- name: continueOnError + type: boolean + default: false + +- name: publishLocation + type: string + default: 'Container' + +- name: is1ESPipeline + type: boolean + default: true + +steps: +- ${{ if ne(parameters.is1ESPipeline, true) }}: + - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error +- task: 1ES.PublishBuildArtifacts@1 + displayName: ${{ parameters.displayName }} + condition: ${{ parameters.condition }} + ${{ if parameters.continueOnError }}: + continueOnError: ${{ parameters.continueOnError }} + inputs: + PublishLocation: ${{ parameters.publishLocation }} + PathtoPublish: ${{ parameters.pathToPublish }} + ${{ if parameters.artifactName }}: + ArtifactName: ${{ parameters.artifactName }} + diff --git a/eng/common/templates-official/steps/publish-logs.yml b/eng/common/templates-official/steps/publish-logs.yml index 04012fed182a1..579fd531e94c3 100644 --- a/eng/common/templates-official/steps/publish-logs.yml +++ b/eng/common/templates-official/steps/publish-logs.yml @@ -1,23 +1,7 @@ -parameters: - StageLabel: '' - JobLabel: '' - steps: -- task: Powershell@2 - displayName: Prepare Binlogs to Upload - inputs: - targetType: inline - script: | - New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ - Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ - continueOnError: true - condition: always() +- template: /eng/common/core-templates/steps/publish-logs.yml + parameters: + is1ESPipeline: true -- task: 1ES.PublishBuildArtifacts@1 - displayName: Publish Logs - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs' - PublishLocation: Container - ArtifactName: PostBuildLogs - continueOnError: true - condition: always() + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/publish-pipeline-artifacts.yml b/eng/common/templates-official/steps/publish-pipeline-artifacts.yml new file mode 100644 index 0000000000000..d71eb0c743986 --- /dev/null +++ b/eng/common/templates-official/steps/publish-pipeline-artifacts.yml @@ -0,0 +1,26 @@ +parameters: +- name: is1ESPipeline + type: boolean + default: true + +- name: args + type: object + default: {} + +steps: +- ${{ if ne(parameters.is1ESPipeline, true) }}: + - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error +- task: 1ES.PublishPipelineArtifact@1 + displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }} + ${{ if parameters.args.condition }}: + condition: ${{ parameters.args.condition }} + ${{ else }}: + condition: succeeded() + ${{ if parameters.args.continueOnError }}: + continueOnError: ${{ parameters.args.continueOnError }} + inputs: + targetPath: ${{ parameters.args.targetPath }} + ${{ if parameters.args.artifactName }}: + artifactName: ${{ parameters.args.artifactName }} + ${{ if parameters.args.properties }}: + properties: ${{ parameters.args.properties }} \ No newline at end of file diff --git a/eng/common/templates-official/steps/retain-build.yml b/eng/common/templates-official/steps/retain-build.yml index 83d97a26a01ff..5594551508a3c 100644 --- a/eng/common/templates-official/steps/retain-build.yml +++ b/eng/common/templates-official/steps/retain-build.yml @@ -1,28 +1,7 @@ -parameters: - # Optional azure devops PAT with build execute permissions for the build's organization, - # only needed if the build that should be retained ran on a different organization than - # the pipeline where this template is executing from - Token: '' - # Optional BuildId to retain, defaults to the current running build - BuildId: '' - # Azure devops Organization URI for the build in the https://dev.azure.com/ format. - # Defaults to the organization the current pipeline is running on - AzdoOrgUri: '$(System.CollectionUri)' - # Azure devops project for the build. Defaults to the project the current pipeline is running on - AzdoProject: '$(System.TeamProject)' - steps: - - task: powershell@2 - inputs: - targetType: 'filePath' - filePath: eng/common/retain-build.ps1 - pwsh: true - arguments: > - -AzdoOrgUri: ${{parameters.AzdoOrgUri}} - -AzdoProject ${{parameters.AzdoProject}} - -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }} - -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}} - displayName: Enable permanent build retention - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - BUILD_ID: $(Build.BuildId) \ No newline at end of file +- template: /eng/common/core-templates/steps/retain-build.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/send-to-helix.yml b/eng/common/templates-official/steps/send-to-helix.yml index 3eb7e2d5f840c..6500f21bf845c 100644 --- a/eng/common/templates-official/steps/send-to-helix.yml +++ b/eng/common/templates-official/steps/send-to-helix.yml @@ -1,91 +1,7 @@ -# Please remember to update the documentation if you make changes to these parameters! -parameters: - HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/ - HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/' - HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number - HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues - HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group - HelixConfiguration: '' # optional -- additional property attached to a job - HelixPreCommands: '' # optional -- commands to run before Helix work item execution - HelixPostCommands: '' # optional -- commands to run after Helix work item execution - WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects - WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects - WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects - CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload - XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true - XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects - XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects - XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner - XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects - IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion - DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json - DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json - WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget." - IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set - HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net ) - Creator: '' # optional -- if the build is external, use this to specify who is sending the job - DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO - condition: succeeded() # optional -- condition for step to execute; defaults to succeeded() - continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false - steps: - - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"' - displayName: ${{ parameters.DisplayNamePrefix }} (Windows) - env: - BuildConfig: $(_BuildConfig) - HelixSource: ${{ parameters.HelixSource }} - HelixType: ${{ parameters.HelixType }} - HelixBuild: ${{ parameters.HelixBuild }} - HelixConfiguration: ${{ parameters.HelixConfiguration }} - HelixTargetQueues: ${{ parameters.HelixTargetQueues }} - HelixAccessToken: ${{ parameters.HelixAccessToken }} - HelixPreCommands: ${{ parameters.HelixPreCommands }} - HelixPostCommands: ${{ parameters.HelixPostCommands }} - WorkItemDirectory: ${{ parameters.WorkItemDirectory }} - WorkItemCommand: ${{ parameters.WorkItemCommand }} - WorkItemTimeout: ${{ parameters.WorkItemTimeout }} - CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} - XUnitProjects: ${{ parameters.XUnitProjects }} - XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} - XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} - XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} - XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} - IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} - DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} - DotNetCliVersion: ${{ parameters.DotNetCliVersion }} - WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} - HelixBaseUri: ${{ parameters.HelixBaseUri }} - Creator: ${{ parameters.Creator }} - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT')) - continueOnError: ${{ parameters.continueOnError }} - - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog - displayName: ${{ parameters.DisplayNamePrefix }} (Unix) - env: - BuildConfig: $(_BuildConfig) - HelixSource: ${{ parameters.HelixSource }} - HelixType: ${{ parameters.HelixType }} - HelixBuild: ${{ parameters.HelixBuild }} - HelixConfiguration: ${{ parameters.HelixConfiguration }} - HelixTargetQueues: ${{ parameters.HelixTargetQueues }} - HelixAccessToken: ${{ parameters.HelixAccessToken }} - HelixPreCommands: ${{ parameters.HelixPreCommands }} - HelixPostCommands: ${{ parameters.HelixPostCommands }} - WorkItemDirectory: ${{ parameters.WorkItemDirectory }} - WorkItemCommand: ${{ parameters.WorkItemCommand }} - WorkItemTimeout: ${{ parameters.WorkItemTimeout }} - CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} - XUnitProjects: ${{ parameters.XUnitProjects }} - XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} - XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} - XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} - XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} - IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} - DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} - DotNetCliVersion: ${{ parameters.DotNetCliVersion }} - WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} - HelixBaseUri: ${{ parameters.HelixBaseUri }} - Creator: ${{ parameters.Creator }} - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT')) - continueOnError: ${{ parameters.continueOnError }} +- template: /eng/common/core-templates/steps/send-to-helix.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates-official/steps/source-build.yml b/eng/common/templates-official/steps/source-build.yml index 829f17c34d118..8f92c49e7b06f 100644 --- a/eng/common/templates-official/steps/source-build.yml +++ b/eng/common/templates-official/steps/source-build.yml @@ -1,129 +1,7 @@ -parameters: - # This template adds arcade-powered source-build to CI. - - # This is a 'steps' template, and is intended for advanced scenarios where the existing build - # infra has a careful build methodology that must be followed. For example, a repo - # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline - # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to - # GitHub. Using this steps template leaves room for that infra to be included. - - # Defines the platform on which to run the steps. See 'eng/common/templates-official/job/source-build.yml' - # for details. The entire object is described in the 'job' template for simplicity, even though - # the usage of the properties on this object is split between the 'job' and 'steps' templates. - platform: {} - steps: -# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.) -- script: | - set -x - df -h - - # If building on the internal project, the artifact feeds variable may be available (usually only if needed) - # In that case, call the feed setup script to add internal feeds corresponding to public ones. - # In addition, add an msbuild argument to copy the WIP from the repo to the target build location. - # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those - # changes. - internalRestoreArgs= - if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then - # Temporarily work around https://github.com/dotnet/arcade/issues/7709 - chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh - $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw) - internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true' - - # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo. - # This only works if there is a username/email configured, which won't be the case in most CI runs. - git config --get user.email - if [ $? -ne 0 ]; then - git config user.email dn-bot@microsoft.com - git config user.name dn-bot - fi - fi - - # If building on the internal project, the internal storage variable may be available (usually only if needed) - # In that case, add variables to allow the download of internal runtimes if the specified versions are not found - # in the default public locations. - internalRuntimeDownloadArgs= - if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then - internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)' - fi - - buildConfig=Release - # Check if AzDO substitutes in a build config from a variable, and use it if so. - if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then - buildConfig='$(_BuildConfig)' - fi - - officialBuildArgs= - if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then - officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)' - fi - - targetRidArgs= - if [ '${{ parameters.platform.targetRID }}' != '' ]; then - targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}' - fi - - runtimeOsArgs= - if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then - runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}' - fi - - baseOsArgs= - if [ '${{ parameters.platform.baseOS }}' != '' ]; then - baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}' - fi - - publishArgs= - if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then - publishArgs='--publish' - fi - - assetManifestFileName=SourceBuild_RidSpecific.xml - if [ '${{ parameters.platform.name }}' != '' ]; then - assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml - fi - - ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ - --configuration $buildConfig \ - --restore --build --pack $publishArgs -bl \ - $officialBuildArgs \ - $internalRuntimeDownloadArgs \ - $internalRestoreArgs \ - $targetRidArgs \ - $runtimeOsArgs \ - $baseOsArgs \ - /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \ - /p:ArcadeBuildFromSource=true \ - /p:AssetManifestFileName=$assetManifestFileName - displayName: Build - -# Upload build logs for diagnosis. -- task: CopyFiles@2 - displayName: Prepare BuildLogs staging directory - inputs: - SourceFolder: '$(Build.SourcesDirectory)' - Contents: | - **/*.log - **/*.binlog - artifacts/source-build/self/prebuilt-report/** - TargetFolder: '$(Build.StagingDirectory)/BuildLogs' - CleanTargetFolder: true - continueOnError: true - condition: succeededOrFailed() - -- task: 1ES.PublishPipelineArtifact@1 - displayName: Publish BuildLogs - inputs: - targetPath: '$(Build.StagingDirectory)/BuildLogs' - artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt) - continueOnError: true - condition: succeededOrFailed() +- template: /eng/common/core-templates/steps/source-build.yml + parameters: + is1ESPipeline: true -# Manually inject component detection so that we can ignore the source build upstream cache, which contains -# a nupkg cache of input packages (a local feed). -# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir' -# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets -- task: ComponentGovernanceComponentDetection@0 - displayName: Component Detection (Exclude upstream cache) - inputs: - ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/source-build/self/src/artifacts/obj/source-built-upstream-cache' + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml deleted file mode 100644 index 7870f93bc1765..0000000000000 --- a/eng/common/templates/job/execute-sdl.yml +++ /dev/null @@ -1,139 +0,0 @@ -parameters: - enable: 'false' # Whether the SDL validation job should execute or not - overrideParameters: '' # Optional: to override values for parameters. - additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")' - # Optional: if specified, restore and use this version of Guardian instead of the default. - overrideGuardianVersion: '' - # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth - # diagnosis of problems with specific tool configurations. - publishGuardianDirectoryToPipeline: false - # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL - # parameters rather than relying on YAML. It may be better to use a local script, because you can - # reproduce results locally without piecing together a command based on the YAML. - executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1' - # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named - # 'continueOnError', the parameter value is not correctly picked up. - # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter - sdlContinueOnError: false # optional: determines whether to continue the build if the step errors; - # optional: determines if build artifacts should be downloaded. - downloadArtifacts: true - # optional: determines if this job should search the directory of downloaded artifacts for - # 'tar.gz' and 'zip' archive files and extract them before running SDL validation tasks. - extractArchiveArtifacts: false - dependsOn: '' # Optional: dependencies of the job - artifactNames: '' # Optional: patterns supplied to DownloadBuildArtifacts - # Usage: - # artifactNames: - # - 'BlobArtifacts' - # - 'Artifacts_Windows_NT_Release' - # Optional: download a list of pipeline artifacts. 'downloadArtifacts' controls build artifacts, - # not pipeline artifacts, so doesn't affect the use of this parameter. - pipelineArtifactNames: [] - -jobs: -- job: Run_SDL - dependsOn: ${{ parameters.dependsOn }} - displayName: Run SDL tool - condition: and(succeededOrFailed(), eq( ${{ parameters.enable }}, 'true')) - variables: - - group: DotNet-VSTS-Bot - - name: AzDOProjectName - value: ${{ parameters.AzDOProjectName }} - - name: AzDOPipelineId - value: ${{ parameters.AzDOPipelineId }} - - name: AzDOBuildId - value: ${{ parameters.AzDOBuildId }} - - template: /eng/common/templates/variables/sdl-variables.yml - - name: GuardianVersion - value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} - - template: /eng/common/templates/variables/pool-providers.yml - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: VSEngSS-MicroBuild2022-1ES - demands: Cmd - # If it's not devdiv, it's dnceng - ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: - name: $(DncEngInternalBuildPool) - demands: ImageOverride -equals windows.vs2019.amd64 - steps: - - checkout: self - clean: true - - # If the template caller didn't provide an AzDO parameter, set them all up as Maestro vars. - - ${{ if not(and(parameters.AzDOProjectName, parameters.AzDOPipelineId, parameters.AzDOBuildId)) }}: - - template: /eng/common/templates/post-build/setup-maestro-vars.yml - - - ${{ if ne(parameters.downloadArtifacts, 'false')}}: - - ${{ if ne(parameters.artifactNames, '') }}: - - ${{ each artifactName in parameters.artifactNames }}: - - task: DownloadBuildArtifacts@0 - displayName: Download Build Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: ${{ artifactName }} - downloadPath: $(Build.ArtifactStagingDirectory)\artifacts - checkDownloadedFiles: true - - ${{ if eq(parameters.artifactNames, '') }}: - - task: DownloadBuildArtifacts@0 - displayName: Download Build Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - downloadType: specific files - itemPattern: "**" - downloadPath: $(Build.ArtifactStagingDirectory)\artifacts - checkDownloadedFiles: true - - - ${{ each artifactName in parameters.pipelineArtifactNames }}: - - task: DownloadPipelineArtifact@2 - displayName: Download Pipeline Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: ${{ artifactName }} - downloadPath: $(Build.ArtifactStagingDirectory)\artifacts - checkDownloadedFiles: true - - - powershell: eng/common/sdl/trim-assets-version.ps1 - -InputPath $(Build.ArtifactStagingDirectory)\artifacts - displayName: Trim the version from the NuGet packages - continueOnError: ${{ parameters.sdlContinueOnError }} - - - powershell: eng/common/sdl/extract-artifact-packages.ps1 - -InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts - -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts - displayName: Extract Blob Artifacts - continueOnError: ${{ parameters.sdlContinueOnError }} - - - powershell: eng/common/sdl/extract-artifact-packages.ps1 - -InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts - -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts - displayName: Extract Package Artifacts - continueOnError: ${{ parameters.sdlContinueOnError }} - - - ${{ if ne(parameters.extractArchiveArtifacts, 'false') }}: - - powershell: eng/common/sdl/extract-artifact-archives.ps1 - -InputPath $(Build.ArtifactStagingDirectory)\artifacts - -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts - displayName: Extract Archive Artifacts - continueOnError: ${{ parameters.sdlContinueOnError }} - - - template: /eng/common/templates/steps/execute-sdl.yml - parameters: - overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }} - executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }} - overrideParameters: ${{ parameters.overrideParameters }} - additionalParameters: ${{ parameters.additionalParameters }} - publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }} - sdlContinueOnError: ${{ parameters.sdlContinueOnError }} diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml index 8ec5c4f2d9f91..1cf9a6d48127b 100644 --- a/eng/common/templates/job/job.yml +++ b/eng/common/templates/job/job.yml @@ -1,259 +1,61 @@ -# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, -# and some (Microbuild) should only be applied to non-PR cases for internal builds. - -parameters: -# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job - cancelTimeoutInMinutes: '' - condition: '' - container: '' - continueOnError: false - dependsOn: '' - displayName: '' - pool: '' - steps: [] - strategy: '' - timeoutInMinutes: '' - variables: [] - workspace: '' - templateContext: '' - -# Job base template specific parameters - # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md - artifacts: '' - enableMicrobuild: false +parameters: enablePublishBuildArtifacts: false - enablePublishBuildAssets: false - enablePublishTestResults: false - enablePublishUsingPipelines: false - enableBuildRetry: false - disableComponentGovernance: '' - componentGovernanceIgnoreDirectories: '' - mergeTestResults: false - testRunTitle: '' - testResultsFormat: '' - name: '' - preSteps: [] - runAsPublic: false -# Sbom related params - enableSbom: true - PackageVersion: 7.0.0 - BuildDropPath: '$(Build.SourcesDirectory)/artifacts' jobs: -- job: ${{ parameters.name }} - - ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}: - cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }} - - ${{ if ne(parameters.condition, '') }}: - condition: ${{ parameters.condition }} - - ${{ if ne(parameters.container, '') }}: - container: ${{ parameters.container }} - - ${{ if ne(parameters.continueOnError, '') }}: - continueOnError: ${{ parameters.continueOnError }} - - ${{ if ne(parameters.dependsOn, '') }}: - dependsOn: ${{ parameters.dependsOn }} - - ${{ if ne(parameters.displayName, '') }}: - displayName: ${{ parameters.displayName }} - - ${{ if ne(parameters.pool, '') }}: - pool: ${{ parameters.pool }} - - ${{ if ne(parameters.strategy, '') }}: - strategy: ${{ parameters.strategy }} - - ${{ if ne(parameters.timeoutInMinutes, '') }}: - timeoutInMinutes: ${{ parameters.timeoutInMinutes }} - - ${{ if ne(parameters.templateContext, '') }}: - templateContext: ${{ parameters.templateContext }} - - variables: - - ${{ if ne(parameters.enableTelemetry, 'false') }}: - - name: DOTNET_CLI_TELEMETRY_PROFILE - value: '$(Build.Repository.Uri)' - - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}: - - name: EnableRichCodeNavigation - value: 'true' - # Retry signature validation up to three times, waiting 2 seconds between attempts. - # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures - - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY - value: 3,2000 - - ${{ each variable in parameters.variables }}: - # handle name-value variable syntax - # example: - # - name: [key] - # value: [value] - - ${{ if ne(variable.name, '') }}: - - name: ${{ variable.name }} - value: ${{ variable.value }} - - # handle variable groups - - ${{ if ne(variable.group, '') }}: - - group: ${{ variable.group }} - - # handle template variable syntax - # example: - # - template: path/to/template.yml - # parameters: - # [key]: [value] - - ${{ if ne(variable.template, '') }}: - - template: ${{ variable.template }} - ${{ if ne(variable.parameters, '') }}: - parameters: ${{ variable.parameters }} - - # handle key-value variable syntax. - # example: - # - [key]: [value] - - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}: - - ${{ each pair in variable }}: - - name: ${{ pair.key }} - value: ${{ pair.value }} - - # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds - - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - group: DotNet-HelixApi-Access - - ${{ if ne(parameters.workspace, '') }}: - workspace: ${{ parameters.workspace }} - - steps: - - ${{ if ne(parameters.preSteps, '') }}: - - ${{ each preStep in parameters.preSteps }}: - - ${{ preStep }} - - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - - task: MicroBuildSigningPlugin@3 - displayName: Install MicroBuild plugin - inputs: - signType: $(_SignType) - zipSources: false - feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json - env: - TeamName: $(_TeamName) - continueOnError: ${{ parameters.continueOnError }} - condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) - - - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}: - - task: NuGetAuthenticate@1 - - - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}: - - task: DownloadPipelineArtifact@2 - inputs: - buildType: current - artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }} - targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }} - itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }} - - - ${{ each step in parameters.steps }}: - - ${{ step }} - - - ${{ if eq(parameters.enableRichCodeNavigation, true) }}: - - task: RichCodeNavIndexer@0 - displayName: RichCodeNav Upload - inputs: - languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }} - environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }} - richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin - uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }} - continueOnError: true - - - template: /eng/common/templates/steps/component-governance.yml - parameters: - ${{ if eq(parameters.disableComponentGovernance, '') }}: - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}: - disableComponentGovernance: false - ${{ else }}: - disableComponentGovernance: true - ${{ else }}: - disableComponentGovernance: ${{ parameters.disableComponentGovernance }} - componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} - - - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: MicroBuildCleanup@1 - displayName: Execute Microbuild cleanup tasks - condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT')) - continueOnError: ${{ parameters.continueOnError }} - env: - TeamName: $(_TeamName) - - - ${{ if ne(parameters.artifacts.publish, '') }}: - - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: - - task: CopyFiles@2 - displayName: Gather binaries for publish to artifacts - inputs: - SourceFolder: 'artifacts/bin' - Contents: '**' - TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin' - - task: CopyFiles@2 - displayName: Gather packages for publish to artifacts - inputs: - SourceFolder: 'artifacts/packages' - Contents: '**' - TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages' - - task: PublishBuildArtifacts@1 - displayName: Publish pipeline artifacts - inputs: - PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts' - PublishLocation: Container - ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} - continueOnError: true - condition: always() - - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: - - publish: artifacts/log - artifact: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }} - displayName: Publish logs - continueOnError: true - condition: always() - - - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}: - - task: PublishBuildArtifacts@1 - displayName: Publish Logs - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)' - PublishLocation: Container - ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }} - continueOnError: true - condition: always() - - - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}: - - task: PublishTestResults@2 - displayName: Publish XUnit Test Results - inputs: - testResultsFormat: 'xUnit' - testResultsFiles: '*.xml' - searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' - testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit - mergeTestResults: ${{ parameters.mergeTestResults }} - continueOnError: true - condition: always() - - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}: - - task: PublishTestResults@2 - displayName: Publish TRX Test Results - inputs: - testResultsFormat: 'VSTest' - testResultsFiles: '*.trx' - searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' - testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx - mergeTestResults: ${{ parameters.mergeTestResults }} - continueOnError: true - condition: always() - - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}: - - template: /eng/common/templates/steps/generate-sbom.yml - parameters: - PackageVersion: ${{ parameters.packageVersion}} - BuildDropPath: ${{ parameters.buildDropPath }} - IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} - - - ${{ if eq(parameters.enableBuildRetry, 'true') }}: - - publish: $(Build.SourcesDirectory)\eng\common\BuildConfiguration - artifact: BuildConfiguration - displayName: Publish build retry configuration - continueOnError: true +- template: /eng/common/core-templates/job/job.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ if and(ne(parameter.key, 'steps'), ne(parameter.key, 'is1ESPipeline')) }}: + ${{ parameter.key }}: ${{ parameter.value }} + + steps: + - ${{ each step in parameters.steps }}: + - ${{ step }} + + artifactPublishSteps: + - ${{ if ne(parameters.artifacts.publish, '') }}: + - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: + - template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: false + args: + displayName: Publish pipeline artifacts + pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts' + publishLocation: Container + artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} + continueOnError: true + condition: always() + - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: + - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml + parameters: + is1ESPipeline: false + args: + targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log' + artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }} + displayName: 'Publish logs' + continueOnError: true + condition: always() + + - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}: + - template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: false + args: + displayName: Publish Logs + pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)' + publishLocation: Container + artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }} + continueOnError: true + condition: always() + + - ${{ if eq(parameters.enableBuildRetry, 'true') }}: + - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml + parameters: + is1ESPipeline: false + args: + targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration' + artifactName: 'BuildConfiguration' + displayName: 'Publish build retry configuration' + continueOnError: true diff --git a/eng/common/templates/job/onelocbuild.yml b/eng/common/templates/job/onelocbuild.yml index 60ab00c4de3ac..ff829dc4c700c 100644 --- a/eng/common/templates/job/onelocbuild.yml +++ b/eng/common/templates/job/onelocbuild.yml @@ -1,109 +1,7 @@ -parameters: - # Optional: dependencies of the job - dependsOn: '' - - # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool - pool: '' - - CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex - GithubPat: $(BotAccount-dotnet-bot-repo-PAT) - - SourcesDirectory: $(Build.SourcesDirectory) - CreatePr: true - AutoCompletePr: false - ReusePr: true - UseLfLineEndings: true - UseCheckedInLocProjectJson: false - SkipLocProjectJsonGeneration: false - LanguageSet: VS_Main_Languages - LclSource: lclFilesInRepo - LclPackageId: '' - RepoType: gitHub - GitHubOrg: dotnet - MirrorRepo: '' - MirrorBranch: main - condition: '' - JobNameSuffix: '' - jobs: -- job: OneLocBuild${{ parameters.JobNameSuffix }} - - dependsOn: ${{ parameters.dependsOn }} - - displayName: OneLocBuild${{ parameters.JobNameSuffix }} - - variables: - - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat - - name: _GenerateLocProjectArguments - value: -SourcesDirectory ${{ parameters.SourcesDirectory }} - -LanguageSet "${{ parameters.LanguageSet }}" - -CreateNeutralXlfs - - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}: - - name: _GenerateLocProjectArguments - value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson - - template: /eng/common/templates/variables/pool-providers.yml - - ${{ if ne(parameters.pool, '') }}: - pool: ${{ parameters.pool }} - ${{ if eq(parameters.pool, '') }}: - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: VSEngSS-MicroBuild2022-1ES - demands: Cmd - # If it's not devdiv, it's dnceng - ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: - name: $(DncEngInternalBuildPool) - demands: ImageOverride -equals windows.vs2019.amd64 - - steps: - - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}: - - task: Powershell@2 - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1 - arguments: $(_GenerateLocProjectArguments) - displayName: Generate LocProject.json - condition: ${{ parameters.condition }} - - - task: OneLocBuild@2 - displayName: OneLocBuild - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - inputs: - locProj: eng/Localize/LocProject.json - outDir: $(Build.ArtifactStagingDirectory) - lclSource: ${{ parameters.LclSource }} - lclPackageId: ${{ parameters.LclPackageId }} - isCreatePrSelected: ${{ parameters.CreatePr }} - isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }} - ${{ if eq(parameters.CreatePr, true) }}: - isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }} - ${{ if eq(parameters.RepoType, 'gitHub') }}: - isShouldReusePrSelected: ${{ parameters.ReusePr }} - packageSourceAuth: patAuth - patVariable: ${{ parameters.CeapexPat }} - ${{ if eq(parameters.RepoType, 'gitHub') }}: - repoType: ${{ parameters.RepoType }} - gitHubPatVariable: "${{ parameters.GithubPat }}" - ${{ if ne(parameters.MirrorRepo, '') }}: - isMirrorRepoSelected: true - gitHubOrganization: ${{ parameters.GitHubOrg }} - mirrorRepo: ${{ parameters.MirrorRepo }} - mirrorBranch: ${{ parameters.MirrorBranch }} - condition: ${{ parameters.condition }} - - - task: PublishBuildArtifacts@1 - displayName: Publish Localization Files - inputs: - PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc' - PublishLocation: Container - ArtifactName: Loc - condition: ${{ parameters.condition }} +- template: /eng/common/core-templates/job/onelocbuild.yml + parameters: + is1ESPipeline: false - - task: PublishBuildArtifacts@1 - displayName: Publish LocProject.json - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/' - PublishLocation: Container - ArtifactName: Loc - condition: ${{ parameters.condition }} \ No newline at end of file + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml index 8ec0151def21a..ab2edec2adb54 100644 --- a/eng/common/templates/job/publish-build-assets.yml +++ b/eng/common/templates/job/publish-build-assets.yml @@ -1,151 +1,7 @@ -parameters: - configuration: 'Debug' - - # Optional: condition for the job to run - condition: '' - - # Optional: 'true' if future jobs should run even if this job fails - continueOnError: false - - # Optional: dependencies of the job - dependsOn: '' - - # Optional: Include PublishBuildArtifacts task - enablePublishBuildArtifacts: false - - # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool - pool: {} - - # Optional: should run as a public build even in the internal project - # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. - runAsPublic: false - - # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing - publishUsingPipelines: false - - # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing - publishAssetsImmediately: false - - artifactsPublishingAdditionalParameters: '' - - signingValidationAdditionalParameters: '' - jobs: -- job: Asset_Registry_Publish - - dependsOn: ${{ parameters.dependsOn }} - timeoutInMinutes: 150 - - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: - displayName: Publish Assets - ${{ else }}: - displayName: Publish to Build Asset Registry - - variables: - - template: /eng/common/templates/variables/pool-providers.yml - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - group: Publish-Build-Assets - - group: AzureDevOps-Artifact-Feeds-Pats - - name: runCodesignValidationInjection - value: false - - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: - - template: /eng/common/templates/post-build/common-variables.yml - - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: VSEngSS-MicroBuild2022-1ES - demands: Cmd - # If it's not devdiv, it's dnceng - ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: - name: NetCore1ESPool-Publishing-Internal - demands: ImageOverride -equals windows.vs2019.amd64 - - steps: - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: DownloadBuildArtifacts@0 - displayName: Download artifact - inputs: - artifactName: AssetManifests - downloadPath: '$(Build.StagingDirectory)/Download' - checkDownloadedFiles: true - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - - - task: NuGetAuthenticate@1 - - - task: PowerShell@2 - displayName: Publish Build Assets - inputs: - filePath: eng\common\sdk-task.ps1 - arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet - /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests' - /p:BuildAssetRegistryToken=$(MaestroAccessToken) - /p:MaestroApiEndpoint=https://maestro.dot.net - /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }} - /p:OfficialBuildId=$(Build.BuildNumber) - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} - - - task: powershell@2 - displayName: Create ReleaseConfigs Artifact - inputs: - targetType: inline - script: | - Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId) - Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)" - Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild) - - - task: PublishBuildArtifacts@1 - displayName: Publish ReleaseConfigs Artifact - inputs: - PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt' - PublishLocation: Container - ArtifactName: ReleaseConfigs - - - task: powershell@2 - displayName: Check if SymbolPublishingExclusionsFile.txt exists - inputs: - targetType: inline - script: | - $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt" - if(Test-Path -Path $symbolExclusionfile) - { - Write-Host "SymbolExclusionFile exists" - Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true" - } - else{ - Write-Host "Symbols Exclusion file does not exists" - Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false" - } - - - task: PublishBuildArtifacts@1 - displayName: Publish SymbolPublishingExclusionsFile Artifact - condition: eq(variables['SymbolExclusionFile'], 'true') - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt' - PublishLocation: Container - ArtifactName: ReleaseConfigs - - - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: - - template: /eng/common/templates/post-build/setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: PowerShell@2 - displayName: Publish Using Darc - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 - arguments: -BuildId $(BARBuildId) - -PublishingInfraVersion 3 - -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' - -MaestroToken '$(MaestroApiAccessToken)' - -WaitPublishingFinish true - -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' - -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' +- template: /eng/common/core-templates/job/publish-build-assets.yml + parameters: + is1ESPipeline: false - - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: - - template: /eng/common/templates/steps/publish-logs.yml - parameters: - JobLabel: 'Publish_Artifacts_Logs' + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/job/source-build.yml b/eng/common/templates/job/source-build.yml index 8a3deef2b7274..e44d47b1d760c 100644 --- a/eng/common/templates/job/source-build.yml +++ b/eng/common/templates/job/source-build.yml @@ -1,66 +1,7 @@ -parameters: - # This template adds arcade-powered source-build to CI. The template produces a server job with a - # default ID 'Source_Build_Complete' to put in a dependency list if necessary. - - # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed. - jobNamePrefix: 'Source_Build' - - # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for - # managed-only repositories. This is an object with these properties: - # - # name: '' - # The name of the job. This is included in the job ID. - # targetRID: '' - # The name of the target RID to use, instead of the one auto-detected by Arcade. - # nonPortable: false - # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than - # linux-x64), and compiling against distro-provided packages rather than portable ones. - # skipPublishValidation: false - # Disables publishing validation. By default, a check is performed to ensure no packages are - # published by source-build. - # container: '' - # A container to use. Runs in docker. - # pool: {} - # A pool to use. Runs directly on an agent. - # buildScript: '' - # Specifies the build script to invoke to perform the build in the repo. The default - # './build.sh' should work for typical Arcade repositories, but this is customizable for - # difficult situations. - # jobProperties: {} - # A list of job properties to inject at the top level, for potential extensibility beyond - # container and pool. - platform: {} - jobs: -- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }} - displayName: Source-Build (${{ parameters.platform.name }}) - - ${{ each property in parameters.platform.jobProperties }}: - ${{ property.key }}: ${{ property.value }} - - ${{ if ne(parameters.platform.container, '') }}: - container: ${{ parameters.platform.container }} - - ${{ if eq(parameters.platform.pool, '') }}: - # The default VM host AzDO pool. This should be capable of running Docker containers: almost all - # source-build builds run in Docker, including the default managed platform. - # /eng/common/templates/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic - pool: - ${{ if eq(variables['System.TeamProject'], 'public') }}: - name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')] - demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open - - ${{ if eq(variables['System.TeamProject'], 'internal') }}: - name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')] - demands: ImageOverride -equals Build.Ubuntu.1804.Amd64 - - ${{ if ne(parameters.platform.pool, '') }}: - pool: ${{ parameters.platform.pool }} - - workspace: - clean: all +- template: /eng/common/core-templates/job/source-build.yml + parameters: + is1ESPipeline: false - steps: - - template: /eng/common/templates/steps/source-build.yml - parameters: - platform: ${{ parameters.platform }} + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/job/source-index-stage1.yml b/eng/common/templates/job/source-index-stage1.yml index b98202aa02d82..89f3291593cb7 100644 --- a/eng/common/templates/job/source-index-stage1.yml +++ b/eng/common/templates/job/source-index-stage1.yml @@ -1,67 +1,7 @@ -parameters: - runAsPublic: false - sourceIndexPackageVersion: 1.0.1-20230228.2 - sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json - sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci" - preSteps: [] - binlogPath: artifacts/log/Debug/Build.binlog - condition: '' - dependsOn: '' - pool: '' - jobs: -- job: SourceIndexStage1 - dependsOn: ${{ parameters.dependsOn }} - condition: ${{ parameters.condition }} - variables: - - name: SourceIndexPackageVersion - value: ${{ parameters.sourceIndexPackageVersion }} - - name: SourceIndexPackageSource - value: ${{ parameters.sourceIndexPackageSource }} - - name: BinlogPath - value: ${{ parameters.binlogPath }} - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - group: source-dot-net stage1 variables - - template: /eng/common/templates/variables/pool-providers.yml - - ${{ if ne(parameters.pool, '') }}: - pool: ${{ parameters.pool }} - ${{ if eq(parameters.pool, '') }}: - pool: - ${{ if eq(variables['System.TeamProject'], 'public') }}: - name: $(DncEngPublicBuildPool) - demands: ImageOverride -equals windows.vs2019.amd64.open - ${{ if eq(variables['System.TeamProject'], 'internal') }}: - name: $(DncEngInternalBuildPool) - demands: ImageOverride -equals windows.vs2019.amd64 - - steps: - - ${{ each preStep in parameters.preSteps }}: - - ${{ preStep }} - - - task: UseDotNet@2 - displayName: Use .NET Core SDK 6 - inputs: - packageType: sdk - version: 6.0.x - installationPath: $(Agent.TempDirectory)/dotnet - workingDirectory: $(Agent.TempDirectory) - - - script: | - $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools - $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools - displayName: Download Tools - # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk. - workingDirectory: $(Agent.TempDirectory) - - - script: ${{ parameters.sourceIndexBuildCommand }} - displayName: Build Repository - - - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output - displayName: Process Binlog into indexable sln +- template: /eng/common/core-templates/job/source-index-stage1.yml + parameters: + is1ESPipeline: false - - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) - displayName: Upload stage1 artifacts to source index - env: - BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url) + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/jobs/codeql-build.yml b/eng/common/templates/jobs/codeql-build.yml index f7dc5ea4aaa63..517f24d6a52ce 100644 --- a/eng/common/templates/jobs/codeql-build.yml +++ b/eng/common/templates/jobs/codeql-build.yml @@ -1,31 +1,7 @@ -parameters: - # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md - continueOnError: false - # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job - jobs: [] - # Optional: if specified, restore and use this version of Guardian instead of the default. - overrideGuardianVersion: '' - jobs: -- template: /eng/common/templates/jobs/jobs.yml +- template: /eng/common/core-templates/jobs/codeql-build.yml parameters: - enableMicrobuild: false - enablePublishBuildArtifacts: false - enablePublishTestResults: false - enablePublishBuildAssets: false - enablePublishUsingPipelines: false - enableTelemetry: true + is1ESPipeline: false - variables: - - group: Publish-Build-Assets - # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in - # sync with the packages.config file. - - name: DefaultGuardianVersion - value: 0.109.0 - - name: GuardianPackagesConfigFile - value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config - - name: GuardianVersion - value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} - - jobs: ${{ parameters.jobs }} - + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/jobs/jobs.yml b/eng/common/templates/jobs/jobs.yml index 289bb2396ce83..388e9037b3e60 100644 --- a/eng/common/templates/jobs/jobs.yml +++ b/eng/common/templates/jobs/jobs.yml @@ -1,97 +1,7 @@ -parameters: - # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md - continueOnError: false - - # Optional: Include PublishBuildArtifacts task - enablePublishBuildArtifacts: false - - # Optional: Enable publishing using release pipelines - enablePublishUsingPipelines: false - - # Optional: Enable running the source-build jobs to build repo from source - enableSourceBuild: false - - # Optional: Parameters for source-build template. - # See /eng/common/templates/jobs/source-build.yml for options - sourceBuildParameters: [] - - graphFileGeneration: - # Optional: Enable generating the graph files at the end of the build - enabled: false - # Optional: Include toolset dependencies in the generated graph files - includeToolset: false - - # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job - jobs: [] - - # Optional: Override automatically derived dependsOn value for "publish build assets" job - publishBuildAssetsDependsOn: '' - - # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage. - publishAssetsImmediately: false - - # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml) - artifactsPublishingAdditionalParameters: '' - signingValidationAdditionalParameters: '' - - # Optional: should run as a public build even in the internal project - # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. - runAsPublic: false - - enableSourceIndex: false - sourceIndexParams: {} - -# Internal resources (telemetry, microbuild) can only be accessed from non-public projects, -# and some (Microbuild) should only be applied to non-PR cases for internal builds. - jobs: -- ${{ each job in parameters.jobs }}: - - template: ../job/job.yml - parameters: - # pass along parameters - ${{ each parameter in parameters }}: - ${{ if ne(parameter.key, 'jobs') }}: - ${{ parameter.key }}: ${{ parameter.value }} - - # pass along job properties - ${{ each property in job }}: - ${{ if ne(property.key, 'job') }}: - ${{ property.key }}: ${{ property.value }} - - name: ${{ job.job }} - -- ${{ if eq(parameters.enableSourceBuild, true) }}: - - template: /eng/common/templates/jobs/source-build.yml - parameters: - allCompletedJobId: Source_Build_Complete - ${{ each parameter in parameters.sourceBuildParameters }}: - ${{ parameter.key }}: ${{ parameter.value }} - -- ${{ if eq(parameters.enableSourceIndex, 'true') }}: - - template: ../job/source-index-stage1.yml - parameters: - runAsPublic: ${{ parameters.runAsPublic }} - ${{ each parameter in parameters.sourceIndexParams }}: - ${{ parameter.key }}: ${{ parameter.value }} - -- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}: - - template: ../job/publish-build-assets.yml - parameters: - continueOnError: ${{ parameters.continueOnError }} - dependsOn: - - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}: - - ${{ each job in parameters.publishBuildAssetsDependsOn }}: - - ${{ job.job }} - - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}: - - ${{ each job in parameters.jobs }}: - - ${{ job.job }} - - ${{ if eq(parameters.enableSourceBuild, true) }}: - - Source_Build_Complete +- template: /eng/common/core-templates/jobs/jobs.yml + parameters: + is1ESPipeline: false - runAsPublic: ${{ parameters.runAsPublic }} - publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }} - publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }} - enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} - artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} - signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }} + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/jobs/source-build.yml b/eng/common/templates/jobs/source-build.yml index a15b07eb51d9d..818d4c326dbbf 100644 --- a/eng/common/templates/jobs/source-build.yml +++ b/eng/common/templates/jobs/source-build.yml @@ -1,46 +1,7 @@ -parameters: - # This template adds arcade-powered source-build to CI. A job is created for each platform, as - # well as an optional server job that completes when all platform jobs complete. - - # The name of the "join" job for all source-build platforms. If set to empty string, the job is - # not included. Existing repo pipelines can use this job depend on all source-build jobs - # completing without maintaining a separate list of every single job ID: just depend on this one - # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'. - allCompletedJobId: '' - - # See /eng/common/templates/job/source-build.yml - jobNamePrefix: 'Source_Build' - - # This is the default platform provided by Arcade, intended for use by a managed-only repo. - defaultManagedPlatform: - name: 'Managed' - container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8' - - # Defines the platforms on which to run build jobs. One job is created for each platform, and the - # object in this array is sent to the job template as 'platform'. If no platforms are specified, - # one job runs on 'defaultManagedPlatform'. - platforms: [] - jobs: +- template: /eng/common/core-templates/jobs/source-build.yml + parameters: + is1ESPipeline: false -- ${{ if ne(parameters.allCompletedJobId, '') }}: - - job: ${{ parameters.allCompletedJobId }} - displayName: Source-Build Complete - pool: server - dependsOn: - - ${{ each platform in parameters.platforms }}: - - ${{ parameters.jobNamePrefix }}_${{ platform.name }} - - ${{ if eq(length(parameters.platforms), 0) }}: - - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }} - -- ${{ each platform in parameters.platforms }}: - - template: /eng/common/templates/job/source-build.yml - parameters: - jobNamePrefix: ${{ parameters.jobNamePrefix }} - platform: ${{ platform }} - -- ${{ if eq(length(parameters.platforms), 0) }}: - - template: /eng/common/templates/job/source-build.yml - parameters: - jobNamePrefix: ${{ parameters.jobNamePrefix }} - platform: ${{ parameters.defaultManagedPlatform }} + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates/post-build/common-variables.yml b/eng/common/templates/post-build/common-variables.yml index 173914f2364a7..7fa105875592c 100644 --- a/eng/common/templates/post-build/common-variables.yml +++ b/eng/common/templates/post-build/common-variables.yml @@ -1,22 +1,8 @@ variables: - - group: Publish-Build-Assets +- template: /eng/common/core-templates/post-build/common-variables.yml + parameters: + # Specifies whether to use 1ES + is1ESPipeline: false - # Whether the build is internal or not - - name: IsInternalBuild - value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }} - - # Default Maestro++ API Endpoint and API Version - - name: MaestroApiEndPoint - value: "https://maestro.dot.net" - - name: MaestroApiAccessToken - value: $(MaestroAccessToken) - - name: MaestroApiVersion - value: "2020-02-20" - - - name: SourceLinkCLIVersion - value: 3.0.0 - - name: SymbolToolVersion - value: 1.0.1 - - - name: runCodesignValidationInjection - value: false + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml index aba44a25a3387..53ede714bdd20 100644 --- a/eng/common/templates/post-build/post-build.yml +++ b/eng/common/templates/post-build/post-build.yml @@ -1,281 +1,8 @@ -parameters: - # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST. - # Publishing V1 is no longer supported - # Publishing V2 is no longer supported - # Publishing V3 is the default - - name: publishingInfraVersion - displayName: Which version of publishing should be used to promote the build definition? - type: number - default: 3 - values: - - 3 - - - name: BARBuildId - displayName: BAR Build Id - type: number - default: 0 - - - name: PromoteToChannelIds - displayName: Channel to promote BARBuildId to - type: string - default: '' - - - name: enableSourceLinkValidation - displayName: Enable SourceLink validation - type: boolean - default: false - - - name: enableSigningValidation - displayName: Enable signing validation - type: boolean - default: true - - - name: enableSymbolValidation - displayName: Enable symbol validation - type: boolean - default: false - - - name: enableNugetValidation - displayName: Enable NuGet validation - type: boolean - default: true - - - name: publishInstallersAndChecksums - displayName: Publish installers and checksums - type: boolean - default: true - - - name: SDLValidationParameters - type: object - default: - enable: false - publishGdn: false - continueOnError: false - params: '' - artifactNames: '' - downloadArtifacts: true - - # These parameters let the user customize the call to sdk-task.ps1 for publishing - # symbols & general artifacts as well as for signing validation - - name: symbolPublishingAdditionalParameters - displayName: Symbol publishing additional parameters - type: string - default: '' - - - name: artifactsPublishingAdditionalParameters - displayName: Artifact publishing additional parameters - type: string - default: '' - - - name: signingValidationAdditionalParameters - displayName: Signing validation additional parameters - type: string - default: '' - - # Which stages should finish execution before post-build stages start - - name: validateDependsOn - type: object - default: - - build - - - name: publishDependsOn - type: object - default: - - Validate - - # Optional: Call asset publishing rather than running in a separate stage - - name: publishAssetsImmediately - type: boolean - default: false - stages: -- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: - - stage: Validate - dependsOn: ${{ parameters.validateDependsOn }} - displayName: Validate Build Assets - variables: - - template: common-variables.yml - - template: /eng/common/templates/variables/pool-providers.yml - jobs: - - job: - displayName: NuGet Validation - condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true')) - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: VSEngSS-MicroBuild2022-1ES - demands: Cmd - # If it's not devdiv, it's dnceng - ${{ else }}: - name: $(DncEngInternalBuildPool) - demands: ImageOverride -equals windows.vs2019.amd64 - - steps: - - template: setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Package Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: PackageArtifacts - checkDownloadedFiles: true - - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1 - arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ - -ToolDestinationPath $(Agent.BuildDirectory)/Extract/ - - - job: - displayName: Signing Validation - condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true')) - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: VSEngSS-MicroBuild2022-1ES - demands: Cmd - # If it's not devdiv, it's dnceng - ${{ else }}: - name: $(DncEngInternalBuildPool) - demands: ImageOverride -equals windows.vs2019.amd64 - steps: - - template: setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Package Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: PackageArtifacts - checkDownloadedFiles: true - itemPattern: | - ** - !**/Microsoft.SourceBuild.Intermediate.*.nupkg - - # This is necessary whenever we want to publish/restore to an AzDO private feed - # Since sdk-task.ps1 tries to restore packages we need to do this authentication here - # otherwise it'll complain about accessing a private feed. - - task: NuGetAuthenticate@1 - displayName: 'Authenticate to AzDO Feeds' - - # Signing validation will optionally work with the buildmanifest file which is downloaded from - # Azure DevOps above. - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: eng\common\sdk-task.ps1 - arguments: -task SigningValidation -restore -msbuildEngine vs - /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' - /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt' - ${{ parameters.signingValidationAdditionalParameters }} - - - template: ../steps/publish-logs.yml - parameters: - StageLabel: 'Validation' - JobLabel: 'Signing' - - - job: - displayName: SourceLink Validation - condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true') - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: VSEngSS-MicroBuild2022-1ES - demands: Cmd - # If it's not devdiv, it's dnceng - ${{ else }}: - name: $(DncEngInternalBuildPool) - demands: ImageOverride -equals windows.vs2019.amd64 - steps: - - template: setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Blob Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: BlobArtifacts - checkDownloadedFiles: true - - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1 - arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ - -ExtractPath $(Agent.BuildDirectory)/Extract/ - -GHRepoName $(Build.Repository.Name) - -GHCommit $(Build.SourceVersion) - -SourcelinkCliVersion $(SourceLinkCLIVersion) - continueOnError: true - - - template: /eng/common/templates/job/execute-sdl.yml - parameters: - enable: ${{ parameters.SDLValidationParameters.enable }} - publishGuardianDirectoryToPipeline: ${{ parameters.SDLValidationParameters.publishGdn }} - additionalParameters: ${{ parameters.SDLValidationParameters.params }} - continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }} - artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }} - downloadArtifacts: ${{ parameters.SDLValidationParameters.downloadArtifacts }} - -- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}: - - stage: publish_using_darc - ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: - dependsOn: ${{ parameters.publishDependsOn }} - ${{ else }}: - dependsOn: ${{ parameters.validateDependsOn }} - displayName: Publish using Darc - variables: - - template: common-variables.yml - - template: /eng/common/templates/variables/pool-providers.yml - jobs: - - job: - displayName: Publish Using Darc - timeoutInMinutes: 120 - pool: - # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - name: VSEngSS-MicroBuild2022-1ES - demands: Cmd - # If it's not devdiv, it's dnceng - ${{ else }}: - name: NetCore1ESPool-Publishing-Internal - demands: ImageOverride -equals windows.vs2019.amd64 - steps: - - template: setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - - - task: NuGetAuthenticate@1 +- template: /eng/common/core-templates/post-build/post-build.yml + parameters: + # Specifies whether to use 1ES + is1ESPipeline: false - - task: PowerShell@2 - displayName: Publish Using Darc - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 - arguments: -BuildId $(BARBuildId) - -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} - -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)' - -MaestroToken '$(MaestroApiAccessToken)' - -WaitPublishingFinish true - -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' - -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates/post-build/setup-maestro-vars.yml b/eng/common/templates/post-build/setup-maestro-vars.yml index 0c87f149a4ad7..a79fab5b441e8 100644 --- a/eng/common/templates/post-build/setup-maestro-vars.yml +++ b/eng/common/templates/post-build/setup-maestro-vars.yml @@ -1,70 +1,8 @@ -parameters: - BARBuildId: '' - PromoteToChannelIds: '' - steps: - - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}: - - task: DownloadBuildArtifacts@0 - displayName: Download Release Configs - inputs: - buildType: current - artifactName: ReleaseConfigs - checkDownloadedFiles: true - - - task: PowerShell@2 - name: setReleaseVars - displayName: Set Release Configs Vars - inputs: - targetType: inline - pwsh: true - script: | - try { - if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') { - $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt - - $BarId = $Content | Select -Index 0 - $Channels = $Content | Select -Index 1 - $IsStableBuild = $Content | Select -Index 2 - - $AzureDevOpsProject = $Env:System_TeamProject - $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId - $AzureDevOpsBuildId = $Env:Build_BuildId - } - else { - $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}" - - $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]' - $apiHeaders.Add('Accept', 'application/json') - $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}") - - $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" } - - $BarId = $Env:BARBuildId - $Channels = $Env:PromoteToMaestroChannels -split "," - $Channels = $Channels -join "][" - $Channels = "[$Channels]" - - $IsStableBuild = $buildInfo.stable - $AzureDevOpsProject = $buildInfo.azureDevOpsProject - $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId - $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId - } - - Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId" - Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels" - Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild" +- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + # Specifies whether to use 1ES + is1ESPipeline: false - Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject" - Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId" - Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId" - } - catch { - Write-Host $_ - Write-Host $_.Exception - Write-Host $_.ScriptStackTrace - exit 1 - } - env: - MAESTRO_API_TOKEN: $(MaestroApiAccessToken) - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }} + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/eng/common/templates/steps/add-build-to-channel.yml b/eng/common/templates/steps/add-build-to-channel.yml index f67a210d62f3e..42bbba161b9b6 100644 --- a/eng/common/templates/steps/add-build-to-channel.yml +++ b/eng/common/templates/steps/add-build-to-channel.yml @@ -1,13 +1,7 @@ -parameters: - ChannelId: 0 - steps: -- task: PowerShell@2 - displayName: Add Build to Channel - inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1 - arguments: -BuildId $(BARBuildId) - -ChannelId ${{ parameters.ChannelId }} - -MaestroApiAccessToken $(MaestroApiAccessToken) - -MaestroApiEndPoint $(MaestroApiEndPoint) - -MaestroApiVersion $(MaestroApiVersion) +- template: /eng/common/core-templates/steps/add-build-to-channel.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/build-reason.yml b/eng/common/templates/steps/build-reason.yml deleted file mode 100644 index eba58109b52c9..0000000000000 --- a/eng/common/templates/steps/build-reason.yml +++ /dev/null @@ -1,12 +0,0 @@ -# build-reason.yml -# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons -# to include steps (',' separated). -parameters: - conditions: '' - steps: [] - -steps: - - ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}: - - ${{ parameters.steps }} - - ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}: - - ${{ parameters.steps }} diff --git a/eng/common/templates/steps/component-governance.yml b/eng/common/templates/steps/component-governance.yml index cbba0596709da..c12a5f8d21d76 100644 --- a/eng/common/templates/steps/component-governance.yml +++ b/eng/common/templates/steps/component-governance.yml @@ -1,13 +1,7 @@ -parameters: - disableComponentGovernance: false - componentGovernanceIgnoreDirectories: '' - steps: -- ${{ if eq(parameters.disableComponentGovernance, 'true') }}: - - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true" - displayName: Set skipComponentGovernanceDetection variable -- ${{ if ne(parameters.disableComponentGovernance, 'true') }}: - - task: ComponentGovernanceComponentDetection@0 - continueOnError: true - inputs: - ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} \ No newline at end of file +- template: /eng/common/core-templates/steps/component-governance.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/execute-codeql.yml b/eng/common/templates/steps/execute-codeql.yml deleted file mode 100644 index 3930b1630214b..0000000000000 --- a/eng/common/templates/steps/execute-codeql.yml +++ /dev/null @@ -1,32 +0,0 @@ -parameters: - # Language that should be analyzed. Defaults to csharp - language: csharp - # Build Commands - buildCommands: '' - overrideParameters: '' # Optional: to override values for parameters. - additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")' - # Optional: if specified, restore and use this version of Guardian instead of the default. - overrideGuardianVersion: '' - # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth - # diagnosis of problems with specific tool configurations. - publishGuardianDirectoryToPipeline: false - # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL - # parameters rather than relying on YAML. It may be better to use a local script, because you can - # reproduce results locally without piecing together a command based on the YAML. - executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1' - # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named - # 'continueOnError', the parameter value is not correctly picked up. - # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter - # optional: determines whether to continue the build if the step errors; - sdlContinueOnError: false - -steps: -- template: /eng/common/templates/steps/execute-sdl.yml - parameters: - overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }} - executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }} - overrideParameters: ${{ parameters.overrideParameters }} - additionalParameters: '${{ parameters.additionalParameters }} - -CodeQLAdditionalRunConfigParams @("BuildCommands < ${{ parameters.buildCommands }}", "Language < ${{ parameters.language }}")' - publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }} - sdlContinueOnError: ${{ parameters.sdlContinueOnError }} \ No newline at end of file diff --git a/eng/common/templates/steps/execute-sdl.yml b/eng/common/templates/steps/execute-sdl.yml deleted file mode 100644 index 07426fde05d82..0000000000000 --- a/eng/common/templates/steps/execute-sdl.yml +++ /dev/null @@ -1,88 +0,0 @@ -parameters: - overrideGuardianVersion: '' - executeAllSdlToolsScript: '' - overrideParameters: '' - additionalParameters: '' - publishGuardianDirectoryToPipeline: false - sdlContinueOnError: false - condition: '' - -steps: -- task: NuGetAuthenticate@1 - inputs: - nuGetServiceConnections: GuardianConnect - -- task: NuGetToolInstaller@1 - displayName: 'Install NuGet.exe' - -- ${{ if ne(parameters.overrideGuardianVersion, '') }}: - - pwsh: | - Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl - . .\sdl.ps1 - $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }} - Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation" - displayName: Install Guardian (Overridden) - -- ${{ if eq(parameters.overrideGuardianVersion, '') }}: - - pwsh: | - Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl - . .\sdl.ps1 - $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts - Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation" - displayName: Install Guardian - -- ${{ if ne(parameters.overrideParameters, '') }}: - - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }} - displayName: Execute SDL (Overridden) - continueOnError: ${{ parameters.sdlContinueOnError }} - condition: ${{ parameters.condition }} - -- ${{ if eq(parameters.overrideParameters, '') }}: - - powershell: ${{ parameters.executeAllSdlToolsScript }} - -GuardianCliLocation $(GuardianCliLocation) - -NugetPackageDirectory $(Build.SourcesDirectory)\.packages - -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw) - ${{ parameters.additionalParameters }} - displayName: Execute SDL - continueOnError: ${{ parameters.sdlContinueOnError }} - condition: ${{ parameters.condition }} - -- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}: - # We want to publish the Guardian results and configuration for easy diagnosis. However, the - # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default - # tooling files. Some of these files are large and aren't useful during an investigation, so - # exclude them by simply deleting them before publishing. (As of writing, there is no documented - # way to selectively exclude a dir from the pipeline artifact publish task.) - - task: DeleteFiles@1 - displayName: Delete Guardian dependencies to avoid uploading - inputs: - SourceFolder: $(Agent.BuildDirectory)/.gdn - Contents: | - c - i - condition: succeededOrFailed() - - - publish: $(Agent.BuildDirectory)/.gdn - artifact: GuardianConfiguration - displayName: Publish GuardianConfiguration - condition: succeededOrFailed() - - # Publish the SARIF files in a container named CodeAnalysisLogs to enable integration - # with the "SARIF SAST Scans Tab" Azure DevOps extension - - task: CopyFiles@2 - displayName: Copy SARIF files - inputs: - flattenFolders: true - sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/ - contents: '**/*.sarif' - targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs - condition: succeededOrFailed() - - # Use PublishBuildArtifacts because the SARIF extension only checks this case - # see microsoft/sarif-azuredevops-extension#4 - - task: PublishBuildArtifacts@1 - displayName: Publish SARIF files to CodeAnalysisLogs container - inputs: - pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs - artifactName: CodeAnalysisLogs - condition: succeededOrFailed() \ No newline at end of file diff --git a/eng/common/templates/steps/generate-sbom.yml b/eng/common/templates/steps/generate-sbom.yml index 2b21eae427328..26dc00a2e0f31 100644 --- a/eng/common/templates/steps/generate-sbom.yml +++ b/eng/common/templates/steps/generate-sbom.yml @@ -1,48 +1,7 @@ -# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated. -# PackageName - The name of the package this SBOM represents. -# PackageVersion - The version of the package this SBOM represents. -# ManifestDirPath - The path of the directory where the generated manifest files will be placed -# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector. - -parameters: - PackageVersion: 8.0.0 - BuildDropPath: '$(Build.SourcesDirectory)/artifacts' - PackageName: '.NET' - ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom - IgnoreDirectories: '' - sbomContinueOnError: true - steps: -- task: PowerShell@2 - displayName: Prep for SBOM generation in (Non-linux) - condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin')) - inputs: - filePath: ./eng/common/generate-sbom-prep.ps1 - arguments: ${{parameters.manifestDirPath}} - -# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461 -- script: | - chmod +x ./eng/common/generate-sbom-prep.sh - ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}} - displayName: Prep for SBOM generation in (Linux) - condition: eq(variables['Agent.Os'], 'Linux') - continueOnError: ${{ parameters.sbomContinueOnError }} - -- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0 - displayName: 'Generate SBOM manifest' - continueOnError: ${{ parameters.sbomContinueOnError }} - inputs: - PackageName: ${{ parameters.packageName }} - BuildDropPath: ${{ parameters.buildDropPath }} - PackageVersion: ${{ parameters.packageVersion }} - ManifestDirPath: ${{ parameters.manifestDirPath }} - ${{ if ne(parameters.IgnoreDirectories, '') }}: - AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}' - -- task: PublishPipelineArtifact@1 - displayName: Publish SBOM manifest - continueOnError: ${{parameters.sbomContinueOnError}} - inputs: - targetPath: '${{parameters.manifestDirPath}}' - artifactName: $(ARTIFACT_NAME) +- template: /eng/common/core-templates/steps/generate-sbom.yml + parameters: + is1ESPipeline: false + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/publish-build-artifacts.yml b/eng/common/templates/steps/publish-build-artifacts.yml new file mode 100644 index 0000000000000..6428a98dfef68 --- /dev/null +++ b/eng/common/templates/steps/publish-build-artifacts.yml @@ -0,0 +1,40 @@ +parameters: +- name: is1ESPipeline + type: boolean + default: false + +- name: displayName + type: string + default: 'Publish to Build Artifact' + +- name: condition + type: string + default: succeeded() + +- name: artifactName + type: string + +- name: pathToPublish + type: string + +- name: continueOnError + type: boolean + default: false + +- name: publishLocation + type: string + default: 'Container' + +steps: +- ${{ if eq(parameters.is1ESPipeline, true) }}: + - 'eng/common/templates cannot be referenced from a 1ES managed template': error +- task: PublishBuildArtifacts@1 + displayName: ${{ parameters.displayName }} + condition: ${{ parameters.condition }} + ${{ if parameters.continueOnError }}: + continueOnError: ${{ parameters.continueOnError }} + inputs: + PublishLocation: ${{ parameters.publishLocation }} + PathtoPublish: ${{ parameters.pathToPublish }} + ${{ if parameters.artifactName }}: + ArtifactName: ${{ parameters.artifactName }} \ No newline at end of file diff --git a/eng/common/templates/steps/publish-logs.yml b/eng/common/templates/steps/publish-logs.yml index 88f238f36bfd8..4ea86bd882355 100644 --- a/eng/common/templates/steps/publish-logs.yml +++ b/eng/common/templates/steps/publish-logs.yml @@ -1,23 +1,7 @@ -parameters: - StageLabel: '' - JobLabel: '' - steps: -- task: Powershell@2 - displayName: Prepare Binlogs to Upload - inputs: - targetType: inline - script: | - New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ - Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ - continueOnError: true - condition: always() +- template: /eng/common/core-templates/steps/publish-logs.yml + parameters: + is1ESPipeline: false -- task: PublishBuildArtifacts@1 - displayName: Publish Logs - inputs: - PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs' - PublishLocation: Container - ArtifactName: PostBuildLogs - continueOnError: true - condition: always() + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/publish-pipeline-artifacts.yml b/eng/common/templates/steps/publish-pipeline-artifacts.yml new file mode 100644 index 0000000000000..5dd698b212fc6 --- /dev/null +++ b/eng/common/templates/steps/publish-pipeline-artifacts.yml @@ -0,0 +1,34 @@ +parameters: +- name: is1ESPipeline + type: boolean + default: false + +- name: args + type: object + default: {} + +steps: +- ${{ if eq(parameters.is1ESPipeline, true) }}: + - 'eng/common/templates cannot be referenced from a 1ES managed template': error +- task: PublishPipelineArtifact@1 + displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }} + ${{ if parameters.args.condition }}: + condition: ${{ parameters.args.condition }} + ${{ else }}: + condition: succeeded() + ${{ if parameters.args.continueOnError }}: + continueOnError: ${{ parameters.args.continueOnError }} + inputs: + targetPath: ${{ parameters.args.targetPath }} + ${{ if parameters.args.artifactName }}: + artifactName: ${{ parameters.args.artifactName }} + ${{ if parameters.args.publishLocation }}: + publishLocation: ${{ parameters.args.publishLocation }} + ${{ if parameters.args.fileSharePath }}: + fileSharePath: ${{ parameters.args.fileSharePath }} + ${{ if parameters.args.Parallel }}: + parallel: ${{ parameters.args.Parallel }} + ${{ if parameters.args.parallelCount }}: + parallelCount: ${{ parameters.args.parallelCount }} + ${{ if parameters.args.properties }}: + properties: ${{ parameters.args.properties }} \ No newline at end of file diff --git a/eng/common/templates/steps/retain-build.yml b/eng/common/templates/steps/retain-build.yml index 83d97a26a01ff..8e841ace3d293 100644 --- a/eng/common/templates/steps/retain-build.yml +++ b/eng/common/templates/steps/retain-build.yml @@ -1,28 +1,7 @@ -parameters: - # Optional azure devops PAT with build execute permissions for the build's organization, - # only needed if the build that should be retained ran on a different organization than - # the pipeline where this template is executing from - Token: '' - # Optional BuildId to retain, defaults to the current running build - BuildId: '' - # Azure devops Organization URI for the build in the https://dev.azure.com/ format. - # Defaults to the organization the current pipeline is running on - AzdoOrgUri: '$(System.CollectionUri)' - # Azure devops project for the build. Defaults to the project the current pipeline is running on - AzdoProject: '$(System.TeamProject)' - steps: - - task: powershell@2 - inputs: - targetType: 'filePath' - filePath: eng/common/retain-build.ps1 - pwsh: true - arguments: > - -AzdoOrgUri: ${{parameters.AzdoOrgUri}} - -AzdoProject ${{parameters.AzdoProject}} - -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }} - -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}} - displayName: Enable permanent build retention - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - BUILD_ID: $(Build.BuildId) \ No newline at end of file +- template: /eng/common/core-templates/steps/retain-build.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/run-on-unix.yml b/eng/common/templates/steps/run-on-unix.yml deleted file mode 100644 index e1733814f65dc..0000000000000 --- a/eng/common/templates/steps/run-on-unix.yml +++ /dev/null @@ -1,7 +0,0 @@ -parameters: - agentOs: '' - steps: [] - -steps: -- ${{ if ne(parameters.agentOs, 'Windows_NT') }}: - - ${{ parameters.steps }} diff --git a/eng/common/templates/steps/run-on-windows.yml b/eng/common/templates/steps/run-on-windows.yml deleted file mode 100644 index 73e7e9c275a1f..0000000000000 --- a/eng/common/templates/steps/run-on-windows.yml +++ /dev/null @@ -1,7 +0,0 @@ -parameters: - agentOs: '' - steps: [] - -steps: -- ${{ if eq(parameters.agentOs, 'Windows_NT') }}: - - ${{ parameters.steps }} diff --git a/eng/common/templates/steps/run-script-ifequalelse.yml b/eng/common/templates/steps/run-script-ifequalelse.yml deleted file mode 100644 index 3d1242f5587c8..0000000000000 --- a/eng/common/templates/steps/run-script-ifequalelse.yml +++ /dev/null @@ -1,33 +0,0 @@ -parameters: - # if parameter1 equals parameter 2, run 'ifScript' command, else run 'elsescript' command - parameter1: '' - parameter2: '' - ifScript: '' - elseScript: '' - - # name of script step - name: Script - - # display name of script step - displayName: If-Equal-Else Script - - # environment - env: {} - - # conditional expression for step execution - condition: '' - -steps: -- ${{ if and(ne(parameters.ifScript, ''), eq(parameters.parameter1, parameters.parameter2)) }}: - - script: ${{ parameters.ifScript }} - name: ${{ parameters.name }} - displayName: ${{ parameters.displayName }} - env: ${{ parameters.env }} - condition: ${{ parameters.condition }} - -- ${{ if and(ne(parameters.elseScript, ''), ne(parameters.parameter1, parameters.parameter2)) }}: - - script: ${{ parameters.elseScript }} - name: ${{ parameters.name }} - displayName: ${{ parameters.displayName }} - env: ${{ parameters.env }} - condition: ${{ parameters.condition }} \ No newline at end of file diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml index 3eb7e2d5f840c..39f99fc2762d0 100644 --- a/eng/common/templates/steps/send-to-helix.yml +++ b/eng/common/templates/steps/send-to-helix.yml @@ -1,91 +1,7 @@ -# Please remember to update the documentation if you make changes to these parameters! -parameters: - HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/ - HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/' - HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number - HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues - HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group - HelixConfiguration: '' # optional -- additional property attached to a job - HelixPreCommands: '' # optional -- commands to run before Helix work item execution - HelixPostCommands: '' # optional -- commands to run after Helix work item execution - WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects - WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects - WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects - CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload - XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true - XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects - XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects - XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner - XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects - IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion - DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json - DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json - WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget." - IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set - HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net ) - Creator: '' # optional -- if the build is external, use this to specify who is sending the job - DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO - condition: succeeded() # optional -- condition for step to execute; defaults to succeeded() - continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false - steps: - - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"' - displayName: ${{ parameters.DisplayNamePrefix }} (Windows) - env: - BuildConfig: $(_BuildConfig) - HelixSource: ${{ parameters.HelixSource }} - HelixType: ${{ parameters.HelixType }} - HelixBuild: ${{ parameters.HelixBuild }} - HelixConfiguration: ${{ parameters.HelixConfiguration }} - HelixTargetQueues: ${{ parameters.HelixTargetQueues }} - HelixAccessToken: ${{ parameters.HelixAccessToken }} - HelixPreCommands: ${{ parameters.HelixPreCommands }} - HelixPostCommands: ${{ parameters.HelixPostCommands }} - WorkItemDirectory: ${{ parameters.WorkItemDirectory }} - WorkItemCommand: ${{ parameters.WorkItemCommand }} - WorkItemTimeout: ${{ parameters.WorkItemTimeout }} - CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} - XUnitProjects: ${{ parameters.XUnitProjects }} - XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} - XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} - XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} - XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} - IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} - DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} - DotNetCliVersion: ${{ parameters.DotNetCliVersion }} - WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} - HelixBaseUri: ${{ parameters.HelixBaseUri }} - Creator: ${{ parameters.Creator }} - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT')) - continueOnError: ${{ parameters.continueOnError }} - - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog - displayName: ${{ parameters.DisplayNamePrefix }} (Unix) - env: - BuildConfig: $(_BuildConfig) - HelixSource: ${{ parameters.HelixSource }} - HelixType: ${{ parameters.HelixType }} - HelixBuild: ${{ parameters.HelixBuild }} - HelixConfiguration: ${{ parameters.HelixConfiguration }} - HelixTargetQueues: ${{ parameters.HelixTargetQueues }} - HelixAccessToken: ${{ parameters.HelixAccessToken }} - HelixPreCommands: ${{ parameters.HelixPreCommands }} - HelixPostCommands: ${{ parameters.HelixPostCommands }} - WorkItemDirectory: ${{ parameters.WorkItemDirectory }} - WorkItemCommand: ${{ parameters.WorkItemCommand }} - WorkItemTimeout: ${{ parameters.WorkItemTimeout }} - CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} - XUnitProjects: ${{ parameters.XUnitProjects }} - XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} - XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} - XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} - XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} - IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} - DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} - DotNetCliVersion: ${{ parameters.DotNetCliVersion }} - WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} - HelixBaseUri: ${{ parameters.HelixBaseUri }} - Creator: ${{ parameters.Creator }} - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT')) - continueOnError: ${{ parameters.continueOnError }} +- template: /eng/common/core-templates/steps/send-to-helix.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/source-build.yml b/eng/common/templates/steps/source-build.yml index 41bbb915736a6..23c1d6f4e9f8d 100644 --- a/eng/common/templates/steps/source-build.yml +++ b/eng/common/templates/steps/source-build.yml @@ -1,129 +1,7 @@ -parameters: - # This template adds arcade-powered source-build to CI. - - # This is a 'steps' template, and is intended for advanced scenarios where the existing build - # infra has a careful build methodology that must be followed. For example, a repo - # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline - # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to - # GitHub. Using this steps template leaves room for that infra to be included. - - # Defines the platform on which to run the steps. See 'eng/common/templates/job/source-build.yml' - # for details. The entire object is described in the 'job' template for simplicity, even though - # the usage of the properties on this object is split between the 'job' and 'steps' templates. - platform: {} - steps: -# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.) -- script: | - set -x - df -h - - # If building on the internal project, the artifact feeds variable may be available (usually only if needed) - # In that case, call the feed setup script to add internal feeds corresponding to public ones. - # In addition, add an msbuild argument to copy the WIP from the repo to the target build location. - # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those - # changes. - internalRestoreArgs= - if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then - # Temporarily work around https://github.com/dotnet/arcade/issues/7709 - chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh - $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw) - internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true' - - # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo. - # This only works if there is a username/email configured, which won't be the case in most CI runs. - git config --get user.email - if [ $? -ne 0 ]; then - git config user.email dn-bot@microsoft.com - git config user.name dn-bot - fi - fi - - # If building on the internal project, the internal storage variable may be available (usually only if needed) - # In that case, add variables to allow the download of internal runtimes if the specified versions are not found - # in the default public locations. - internalRuntimeDownloadArgs= - if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then - internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)' - fi - - buildConfig=Release - # Check if AzDO substitutes in a build config from a variable, and use it if so. - if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then - buildConfig='$(_BuildConfig)' - fi - - officialBuildArgs= - if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then - officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)' - fi - - targetRidArgs= - if [ '${{ parameters.platform.targetRID }}' != '' ]; then - targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}' - fi - - runtimeOsArgs= - if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then - runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}' - fi - - baseOsArgs= - if [ '${{ parameters.platform.baseOS }}' != '' ]; then - baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}' - fi - - publishArgs= - if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then - publishArgs='--publish' - fi - - assetManifestFileName=SourceBuild_RidSpecific.xml - if [ '${{ parameters.platform.name }}' != '' ]; then - assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml - fi - - ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ - --configuration $buildConfig \ - --restore --build --pack $publishArgs -bl \ - $officialBuildArgs \ - $internalRuntimeDownloadArgs \ - $internalRestoreArgs \ - $targetRidArgs \ - $runtimeOsArgs \ - $baseOsArgs \ - /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \ - /p:ArcadeBuildFromSource=true \ - /p:AssetManifestFileName=$assetManifestFileName - displayName: Build - -# Upload build logs for diagnosis. -- task: CopyFiles@2 - displayName: Prepare BuildLogs staging directory - inputs: - SourceFolder: '$(Build.SourcesDirectory)' - Contents: | - **/*.log - **/*.binlog - artifacts/source-build/self/prebuilt-report/** - TargetFolder: '$(Build.StagingDirectory)/BuildLogs' - CleanTargetFolder: true - continueOnError: true - condition: succeededOrFailed() - -- task: PublishPipelineArtifact@1 - displayName: Publish BuildLogs - inputs: - targetPath: '$(Build.StagingDirectory)/BuildLogs' - artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt) - continueOnError: true - condition: succeededOrFailed() +- template: /eng/common/core-templates/steps/source-build.yml + parameters: + is1ESPipeline: false -# Manually inject component detection so that we can ignore the source build upstream cache, which contains -# a nupkg cache of input packages (a local feed). -# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir' -# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets -- task: ComponentGovernanceComponentDetection@0 - displayName: Component Detection (Exclude upstream cache) - inputs: - ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/source-build/self/src/artifacts/obj/source-built-upstream-cache' + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/eng/common/templates/steps/telemetry-end.yml b/eng/common/templates/steps/telemetry-end.yml deleted file mode 100644 index fadc04ca1b9a3..0000000000000 --- a/eng/common/templates/steps/telemetry-end.yml +++ /dev/null @@ -1,102 +0,0 @@ -parameters: - maxRetries: 5 - retryDelay: 10 # in seconds - -steps: -- bash: | - if [ "$AGENT_JOBSTATUS" = "Succeeded" ] || [ "$AGENT_JOBSTATUS" = "PartiallySucceeded" ]; then - errorCount=0 - else - errorCount=1 - fi - warningCount=0 - - curlStatus=1 - retryCount=0 - # retry loop to harden against spotty telemetry connections - # we don't retry successes and 4xx client errors - until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]] - do - if [ $retryCount -gt 0 ]; then - echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..." - sleep $RetryDelay - fi - - # create a temporary file for curl output - res=`mktemp` - - curlResult=` - curl --verbose --output $res --write-out "%{http_code}"\ - -H 'Content-Type: application/json' \ - -H "X-Helix-Job-Token: $Helix_JobToken" \ - -H 'Content-Length: 0' \ - -X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$Helix_WorkItemId/finish" \ - --data-urlencode "errorCount=$errorCount" \ - --data-urlencode "warningCount=$warningCount"` - curlStatus=$? - - if [ $curlStatus -eq 0 ]; then - if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then - curlStatus=$curlResult - fi - fi - - let retryCount++ - done - - if [ $curlStatus -ne 0 ]; then - echo "Failed to Send Build Finish information after $retryCount retries" - vstsLogOutput="vso[task.logissue type=error;sourcepath=templates/steps/telemetry-end.yml;code=1;]Failed to Send Build Finish information: $curlStatus" - echo "##$vstsLogOutput" - exit 1 - fi - displayName: Send Unix Build End Telemetry - env: - # defined via VSTS variables in start-job.sh - Helix_JobToken: $(Helix_JobToken) - Helix_WorkItemId: $(Helix_WorkItemId) - MaxRetries: ${{ parameters.maxRetries }} - RetryDelay: ${{ parameters.retryDelay }} - condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT')) -- powershell: | - if (($env:Agent_JobStatus -eq 'Succeeded') -or ($env:Agent_JobStatus -eq 'PartiallySucceeded')) { - $ErrorCount = 0 - } else { - $ErrorCount = 1 - } - $WarningCount = 0 - - # Basic retry loop to harden against server flakiness - $retryCount = 0 - while ($retryCount -lt $env:MaxRetries) { - try { - Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$env:Helix_WorkItemId/finish?errorCount=$ErrorCount&warningCount=$WarningCount" -Method Post -ContentType "application/json" -Body "" ` - -Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken } - break - } - catch { - $statusCode = $_.Exception.Response.StatusCode.value__ - if ($statusCode -ge 400 -and $statusCode -le 499) { - Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)" - Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message - exit 1 - } - Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..." - $retryCount++ - sleep $env:RetryDelay - continue - } - } - - if ($retryCount -ge $env:MaxRetries) { - Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries." - exit 1 - } - displayName: Send Windows Build End Telemetry - env: - # defined via VSTS variables in start-job.ps1 - Helix_JobToken: $(Helix_JobToken) - Helix_WorkItemId: $(Helix_WorkItemId) - MaxRetries: ${{ parameters.maxRetries }} - RetryDelay: ${{ parameters.retryDelay }} - condition: and(always(),eq(variables['Agent.Os'], 'Windows_NT')) diff --git a/eng/common/templates/steps/telemetry-start.yml b/eng/common/templates/steps/telemetry-start.yml deleted file mode 100644 index 32c01ef0b553b..0000000000000 --- a/eng/common/templates/steps/telemetry-start.yml +++ /dev/null @@ -1,241 +0,0 @@ -parameters: - helixSource: 'undefined_defaulted_in_telemetry.yml' - helixType: 'undefined_defaulted_in_telemetry.yml' - buildConfig: '' - runAsPublic: false - maxRetries: 5 - retryDelay: 10 # in seconds - -steps: -- ${{ if and(eq(parameters.runAsPublic, 'false'), not(eq(variables['System.TeamProject'], 'public'))) }}: - - task: AzureKeyVault@1 - inputs: - azureSubscription: 'HelixProd_KeyVault' - KeyVaultName: HelixProdKV - SecretsFilter: 'HelixApiAccessToken' - condition: always() -- bash: | - # create a temporary file - jobInfo=`mktemp` - - # write job info content to temporary file - cat > $jobInfo < /dev/null; then echo "Curl failed; dumping some information about dotnet.microsoft.com for later investigation" - echo | openssl s_client -showcerts -servername dotnet.microsoft.com -connect dotnet.microsoft.com:443 + echo | openssl s_client -showcerts -servername dotnet.microsoft.com -connect dotnet.microsoft.com:443 || true fi echo "Will now retry the same URL with verbose logging." with_retries curl "$install_script_url" -sSL --verbose --retry 10 --create-dirs -o "$install_script" || { @@ -343,7 +341,7 @@ function InitializeBuildTool { _InitializeBuildToolCommand="msbuild" # use override if it exists - commonly set by source-build if [[ "${_OverrideArcadeInitializeBuildToolFramework:-x}" == "x" ]]; then - _InitializeBuildToolFramework="net8.0" + _InitializeBuildToolFramework="net9.0" else _InitializeBuildToolFramework="${_OverrideArcadeInitializeBuildToolFramework}" fi @@ -458,12 +456,10 @@ function MSBuild { local possiblePaths=() possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.ArcadeLogging.dll" ) possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll" ) - possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.ArcadeLogging.dll" ) - possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.Arcade.Sdk.dll" ) - possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.ArcadeLogging.dll" ) - possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.Arcade.Sdk.dll" ) possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.ArcadeLogging.dll" ) possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.Arcade.Sdk.dll" ) + possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.ArcadeLogging.dll" ) + possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.Arcade.Sdk.dll" ) for path in "${possiblePaths[@]}"; do if [[ -f $path ]]; then selectedPath=$path @@ -510,7 +506,8 @@ function MSBuild-Core { echo "Build failed with exit code $exit_code. Check errors above." # When running on Azure Pipelines, override the returned exit code to avoid double logging. - if [[ "$ci" == "true" && -n ${SYSTEM_TEAMPROJECT:-} ]]; then + # Skip this when the build is a child of the VMR orchestrator build. + if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$product_build" != true && "$properties" != *"DotNetBuildRepo=true"* ]]; then Write-PipelineSetResult -result "Failed" -message "msbuild execution failed." # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error diff --git a/eng/targets/Settings.props b/eng/targets/Settings.props index 99698db200629..3598b4bdc7671 100644 --- a/eng/targets/Settings.props +++ b/eng/targets/Settings.props @@ -58,6 +58,8 @@ false true + + <_SkipUpgradeNetAnalyzersNuGetWarning>true diff --git a/eng/targets/TargetFrameworks.props b/eng/targets/TargetFrameworks.props index 20b3ce78a455f..8493c1046cb51 100644 --- a/eng/targets/TargetFrameworks.props +++ b/eng/targets/TargetFrameworks.props @@ -18,6 +18,7 @@ net8.0 net7.0;net8.0 net6.0 + net9.0 + + $(NetMinimum) $(NetPrevious) $(NetCurrent);$(NetPrevious) $(NetCurrent);$(NetPrevious) $(NetPrevious) + $(NetPrevious) @@ -52,6 +56,7 @@ $(NetCurrent);$(NetPrevious) $(NetCurrent);$(NetPrevious) $(NetCurrent) + $(NetCurrent) diff --git a/global.json b/global.json index dd2c371a82896..1ad426b264b05 100644 --- a/global.json +++ b/global.json @@ -1,19 +1,19 @@ { "sdk": { - "version": "8.0.101", + "version": "9.0.100-preview.3.24204.13", "allowPrerelease": false, "rollForward": "patch" }, "tools": { - "dotnet": "8.0.101", + "dotnet": "9.0.100-preview.3.24204.13", "vs": { "version": "17.8.0" }, "xcopy-msbuild": "17.8.1-2" }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "8.0.0-beta.24225.1", - "Microsoft.DotNet.Helix.Sdk": "8.0.0-beta.24225.1", + "Microsoft.DotNet.Arcade.Sdk": "9.0.0-beta.24265.1", + "Microsoft.DotNet.Helix.Sdk": "9.0.0-beta.24265.1", "Microsoft.Build.Traversal": "3.4.0" } } diff --git a/src/Compilers/CSharp/Test/Emit2/Emit/EditAndContinue/EditAndContinueTests.cs b/src/Compilers/CSharp/Test/Emit2/Emit/EditAndContinue/EditAndContinueTests.cs index 04b2a6d0025ee..41ff3e1fd848c 100644 --- a/src/Compilers/CSharp/Test/Emit2/Emit/EditAndContinue/EditAndContinueTests.cs +++ b/src/Compilers/CSharp/Test/Emit2/Emit/EditAndContinue/EditAndContinueTests.cs @@ -1036,14 +1036,29 @@ static void G() {} Row(10, TableIndex.CustomAttribute, EditAndContinueOperation.Default),// add new row ]); - g.VerifyCustomAttributes( - [ - new CustomAttributeRow(Handle(8, TableIndex.MethodDef), Handle(3, TableIndex.MethodDef)), - new CustomAttributeRow(Handle(10, TableIndex.MethodDef), Handle(4, TableIndex.MethodDef)), - new CustomAttributeRow(Handle(8, TableIndex.MethodDef), Handle(5, TableIndex.MethodDef)), - new CustomAttributeRow(Handle(8, TableIndex.MethodDef), Handle(6, TableIndex.MethodDef)), - new CustomAttributeRow(Handle(10, TableIndex.MethodDef), Handle(7, TableIndex.MethodDef)), - ]); + // https://github.com/dotnet/roslyn/issues/73513 + if (RuntimeUtilities.IsCoreClr9OrHigherRuntime) + { + g.VerifyCustomAttributes( + [ + new CustomAttributeRow(Handle(8, TableIndex.MethodDef), Handle(3, TableIndex.MethodDef)), + new CustomAttributeRow(Handle(8, TableIndex.MethodDef), Handle(5, TableIndex.MethodDef)), + new CustomAttributeRow(Handle(8, TableIndex.MethodDef), Handle(6, TableIndex.MethodDef)), + new CustomAttributeRow(Handle(10, TableIndex.MethodDef), Handle(4, TableIndex.MethodDef)), + new CustomAttributeRow(Handle(10, TableIndex.MethodDef), Handle(7, TableIndex.MethodDef)), + ]); + } + else + { + g.VerifyCustomAttributes( + [ + new CustomAttributeRow(Handle(8, TableIndex.MethodDef), Handle(3, TableIndex.MethodDef)), + new CustomAttributeRow(Handle(10, TableIndex.MethodDef), Handle(4, TableIndex.MethodDef)), + new CustomAttributeRow(Handle(8, TableIndex.MethodDef), Handle(5, TableIndex.MethodDef)), + new CustomAttributeRow(Handle(8, TableIndex.MethodDef), Handle(6, TableIndex.MethodDef)), + new CustomAttributeRow(Handle(10, TableIndex.MethodDef), Handle(7, TableIndex.MethodDef)), + ]); + } }) .Verify(); } @@ -1422,15 +1437,32 @@ [A7]void H() { } Handle(9, TableIndex.CustomAttribute), ]); - g.VerifyCustomAttributes( - [ - new CustomAttributeRow(Handle(9, TableIndex.MethodDef), Handle(2, TableIndex.MethodDef)), // F [A1] -> [A2] - new CustomAttributeRow(Handle(0, TableIndex.MethodDef), Handle(0, TableIndex.MemberRef)), // F [A2] delete - new CustomAttributeRow(Handle(10, TableIndex.MethodDef), Handle(4, TableIndex.MethodDef)),// G [A3] -> [A4] - new CustomAttributeRow(Handle(11, TableIndex.MethodDef), Handle(7, TableIndex.MethodDef)),// H [A6] -> [A7] - new CustomAttributeRow(Handle(0, TableIndex.MethodDef), Handle(0, TableIndex.MemberRef)), // H [A5] delete - new CustomAttributeRow(Handle(10, TableIndex.MethodDef), Handle(3, TableIndex.MethodDef)),// G [A3] add with RowId 9 - ]); + // https://github.com/dotnet/roslyn/issues/73513 + if (RuntimeUtilities.IsCoreClr9OrHigherRuntime) + { + g.VerifyCustomAttributes( + [ + new CustomAttributeRow(Handle(0, TableIndex.MethodDef), Handle(0, TableIndex.MemberRef)), // F [A2] delete + new CustomAttributeRow(Handle(0, TableIndex.MethodDef), Handle(0, TableIndex.MemberRef)), // H [A5] delete + new CustomAttributeRow(Handle(9, TableIndex.MethodDef), Handle(2, TableIndex.MethodDef)), // F [A1] -> [A2] + new CustomAttributeRow(Handle(10, TableIndex.MethodDef), Handle(4, TableIndex.MethodDef)),// G [A3] -> [A4] + new CustomAttributeRow(Handle(10, TableIndex.MethodDef), Handle(3, TableIndex.MethodDef)),// G [A3] add with RowId 9 + new CustomAttributeRow(Handle(11, TableIndex.MethodDef), Handle(7, TableIndex.MethodDef)),// H [A6] -> [A7] + ]); + } + else + { + g.VerifyCustomAttributes( + [ + new CustomAttributeRow(Handle(9, TableIndex.MethodDef), Handle(2, TableIndex.MethodDef)), // F [A1] -> [A2] + new CustomAttributeRow(Handle(0, TableIndex.MethodDef), Handle(0, TableIndex.MemberRef)), // F [A2] delete + new CustomAttributeRow(Handle(10, TableIndex.MethodDef), Handle(4, TableIndex.MethodDef)),// G [A3] -> [A4] + new CustomAttributeRow(Handle(11, TableIndex.MethodDef), Handle(7, TableIndex.MethodDef)),// H [A6] -> [A7] + new CustomAttributeRow(Handle(0, TableIndex.MethodDef), Handle(0, TableIndex.MemberRef)), // H [A5] delete + new CustomAttributeRow(Handle(10, TableIndex.MethodDef), Handle(3, TableIndex.MethodDef)),// G [A3] add with RowId 9 + ]); + + } }) .AddGeneration( source: common + """ @@ -1472,15 +1504,32 @@ void G() { } Handle(11, TableIndex.CustomAttribute), ]); - g.VerifyCustomAttributes( - [ - new CustomAttributeRow(Handle(0, TableIndex.MethodDef), Handle(0, TableIndex.MemberRef)), // G [A4] delete - new CustomAttributeRow(Handle(11, TableIndex.MethodDef), Handle(5, TableIndex.MethodDef)), // H [A5] - new CustomAttributeRow(Handle(11, TableIndex.MethodDef), Handle(6, TableIndex.MethodDef)), // H [A6] - new CustomAttributeRow(Handle(0, TableIndex.MethodDef), Handle(0, TableIndex.MemberRef)), // G [A3] delete - new CustomAttributeRow(Handle(11, TableIndex.MethodDef), Handle(7, TableIndex.MethodDef)), // H [A7] add with RowId 10 - new CustomAttributeRow(Handle(11, TableIndex.MethodDef), Handle(8, TableIndex.MethodDef)), // H [A8] add with RowId 11 - ]); + // https://github.com/dotnet/roslyn/issues/73513 + if (RuntimeUtilities.IsCoreClr9OrHigherRuntime) + { + g.VerifyCustomAttributes( + [ + new CustomAttributeRow(Handle(0, TableIndex.MethodDef), Handle(0, TableIndex.MemberRef)), // G [A4] delete + new CustomAttributeRow(Handle(0, TableIndex.MethodDef), Handle(0, TableIndex.MemberRef)), // G [A3] delete + new CustomAttributeRow(Handle(11, TableIndex.MethodDef), Handle(5, TableIndex.MethodDef)), // H [A5] + new CustomAttributeRow(Handle(11, TableIndex.MethodDef), Handle(6, TableIndex.MethodDef)), // H [A6] + new CustomAttributeRow(Handle(11, TableIndex.MethodDef), Handle(7, TableIndex.MethodDef)), // H [A7] add with RowId 10 + new CustomAttributeRow(Handle(11, TableIndex.MethodDef), Handle(8, TableIndex.MethodDef)), // H [A8] add with RowId 11 + ]); + } + else + { + g.VerifyCustomAttributes( + [ + new CustomAttributeRow(Handle(0, TableIndex.MethodDef), Handle(0, TableIndex.MemberRef)), // G [A4] delete + new CustomAttributeRow(Handle(11, TableIndex.MethodDef), Handle(5, TableIndex.MethodDef)), // H [A5] + new CustomAttributeRow(Handle(11, TableIndex.MethodDef), Handle(6, TableIndex.MethodDef)), // H [A6] + new CustomAttributeRow(Handle(0, TableIndex.MethodDef), Handle(0, TableIndex.MemberRef)), // G [A3] delete + new CustomAttributeRow(Handle(11, TableIndex.MethodDef), Handle(7, TableIndex.MethodDef)), // H [A7] add with RowId 10 + new CustomAttributeRow(Handle(11, TableIndex.MethodDef), Handle(8, TableIndex.MethodDef)), // H [A8] add with RowId 11 + ]); + + } }) .AddGeneration( source: common + """ @@ -1648,11 +1697,23 @@ [A4] void G() { } Handle(5, TableIndex.CustomAttribute), ]); - g.VerifyCustomAttributes( - [ - new CustomAttributeRow(Handle(6, TableIndex.MethodDef), Handle(4, TableIndex.MethodDef)), // F: [A1] -> [A3] - new CustomAttributeRow(Handle(5, TableIndex.MethodDef), Handle(3, TableIndex.MethodDef)), // G: [A2] -> [A4] - ]); + // https://github.com/dotnet/roslyn/issues/73513 + if (RuntimeUtilities.IsCoreClr9OrHigherRuntime) + { + g.VerifyCustomAttributes( + [ + new CustomAttributeRow(Handle(5, TableIndex.MethodDef), Handle(3, TableIndex.MethodDef)), // G: [A2] -> [A4] + new CustomAttributeRow(Handle(6, TableIndex.MethodDef), Handle(4, TableIndex.MethodDef)), // F: [A1] -> [A3] + ]); + } + else + { + g.VerifyCustomAttributes( + [ + new CustomAttributeRow(Handle(6, TableIndex.MethodDef), Handle(4, TableIndex.MethodDef)), // F: [A1] -> [A3] + new CustomAttributeRow(Handle(5, TableIndex.MethodDef), Handle(3, TableIndex.MethodDef)), // G: [A2] -> [A4] + ]); + } }) .Verify(); } @@ -3024,15 +3085,23 @@ class C g.VerifyTypeDefNames("E", "C", "D"); g.VerifyMethodDefNames(); - g.VerifyCustomAttributes( - [ - new CustomAttributeRow(Handle(14, TableIndex.TypeDef), Handle(1, TableIndex.MethodDef)), // E - new CustomAttributeRow(Handle(15, TableIndex.TypeDef), Handle(3, TableIndex.MethodDef)), // C - new CustomAttributeRow(Handle(16, TableIndex.TypeDef), Handle(6, TableIndex.MethodDef)), // D - new CustomAttributeRow(Handle(2, TableIndex.Field), Handle(2, TableIndex.MethodDef)), // E.A - new CustomAttributeRow(Handle(3, TableIndex.Field), Handle(4, TableIndex.MethodDef)), // _x - new CustomAttributeRow(Handle(1, TableIndex.Property), Handle(5, TableIndex.MethodDef)) // X - ]); + // https://github.com/dotnet/roslyn/issues/73513 + if (RuntimeUtilities.IsCoreClr9OrHigherRuntime) + { + g.VerifyCustomAttributes( + [ + new CustomAttributeRow(Handle(1, TableIndex.Property), Handle(5, TableIndex.MethodDef)), // X + new CustomAttributeRow(Handle(2, TableIndex.Field), Handle(2, TableIndex.MethodDef)), // E.A + new CustomAttributeRow(Handle(3, TableIndex.Field), Handle(4, TableIndex.MethodDef)), // _x + new CustomAttributeRow(Handle(14, TableIndex.TypeDef), Handle(1, TableIndex.MethodDef)), // E + new CustomAttributeRow(Handle(15, TableIndex.TypeDef), Handle(3, TableIndex.MethodDef)), // C + new CustomAttributeRow(Handle(16, TableIndex.TypeDef), Handle(6, TableIndex.MethodDef)), // D + ]); + } + else + { + + } g.VerifyEncLogDefinitions( [ @@ -3105,15 +3174,31 @@ class C g.VerifyTypeDefNames("E", "C", "D"); g.VerifyMethodDefNames(); - g.VerifyCustomAttributes( - [ - new CustomAttributeRow(Handle(14, TableIndex.TypeDef), Handle(7, TableIndex.MethodDef)), // E - new CustomAttributeRow(Handle(15, TableIndex.TypeDef), Handle(9, TableIndex.MethodDef)), // C - new CustomAttributeRow(Handle(16, TableIndex.TypeDef), Handle(12, TableIndex.MethodDef)),// D - new CustomAttributeRow(Handle(2, TableIndex.Field), Handle(8, TableIndex.MethodDef)), // E.A - new CustomAttributeRow(Handle(3, TableIndex.Field), Handle(10, TableIndex.MethodDef)), // _x - new CustomAttributeRow(Handle(1, TableIndex.Property), Handle(11, TableIndex.MethodDef)) // X - ]); + // https://github.com/dotnet/roslyn/issues/73513 + if (RuntimeUtilities.IsCoreClr9OrHigherRuntime) + { + g.VerifyCustomAttributes( + [ + new CustomAttributeRow(Handle(1, TableIndex.Property), Handle(11, TableIndex.MethodDef)),// X + new CustomAttributeRow(Handle(2, TableIndex.Field), Handle(8, TableIndex.MethodDef)), // E.A + new CustomAttributeRow(Handle(3, TableIndex.Field), Handle(10, TableIndex.MethodDef)), // _x + new CustomAttributeRow(Handle(14, TableIndex.TypeDef), Handle(7, TableIndex.MethodDef)), // E + new CustomAttributeRow(Handle(15, TableIndex.TypeDef), Handle(9, TableIndex.MethodDef)), // C + new CustomAttributeRow(Handle(16, TableIndex.TypeDef), Handle(12, TableIndex.MethodDef)),// D + ]); + } + else + { + g.VerifyCustomAttributes( + [ + new CustomAttributeRow(Handle(14, TableIndex.TypeDef), Handle(7, TableIndex.MethodDef)), // E + new CustomAttributeRow(Handle(15, TableIndex.TypeDef), Handle(9, TableIndex.MethodDef)), // C + new CustomAttributeRow(Handle(16, TableIndex.TypeDef), Handle(12, TableIndex.MethodDef)),// D + new CustomAttributeRow(Handle(2, TableIndex.Field), Handle(8, TableIndex.MethodDef)), // E.A + new CustomAttributeRow(Handle(3, TableIndex.Field), Handle(10, TableIndex.MethodDef)), // _x + new CustomAttributeRow(Handle(1, TableIndex.Property), Handle(11, TableIndex.MethodDef)),// X + ]); + } g.VerifyEncLogDefinitions(new[] { @@ -6964,16 +7049,33 @@ [B] static void M2<[A]T>() { } Handle(6, TableIndex.MethodSemantics), Handle(2, TableIndex.GenericParam)); - CheckAttributes(reader1, - new CustomAttributeRow(Handle(1, TableIndex.GenericParam), Handle(1, TableIndex.MethodDef)), - new CustomAttributeRow(Handle(3, TableIndex.Field), Handle(1, TableIndex.MethodDef)), - new CustomAttributeRow(Handle(4, TableIndex.Field), Handle(11, TableIndex.MemberRef)), - new CustomAttributeRow(Handle(4, TableIndex.Field), Handle(12, TableIndex.MemberRef)), - new CustomAttributeRow(Handle(12, TableIndex.MethodDef), Handle(2, TableIndex.MethodDef)), - new CustomAttributeRow(Handle(14, TableIndex.MethodDef), Handle(11, TableIndex.MemberRef)), - new CustomAttributeRow(Handle(15, TableIndex.MethodDef), Handle(11, TableIndex.MemberRef)), - new CustomAttributeRow(Handle(2, TableIndex.Event), Handle(1, TableIndex.MethodDef)), - new CustomAttributeRow(Handle(2, TableIndex.Property), Handle(2, TableIndex.MethodDef))); + // https://github.com/dotnet/roslyn/issues/73513 + if (RuntimeUtilities.IsCoreClr9OrHigherRuntime) + { + CheckAttributes(reader1, + new CustomAttributeRow(Handle(1, TableIndex.GenericParam), Handle(1, TableIndex.MethodDef)), + new CustomAttributeRow(Handle(2, TableIndex.Property), Handle(2, TableIndex.MethodDef)), + new CustomAttributeRow(Handle(2, TableIndex.Event), Handle(1, TableIndex.MethodDef)), + new CustomAttributeRow(Handle(3, TableIndex.Field), Handle(1, TableIndex.MethodDef)), + new CustomAttributeRow(Handle(4, TableIndex.Field), Handle(11, TableIndex.MemberRef)), + new CustomAttributeRow(Handle(4, TableIndex.Field), Handle(12, TableIndex.MemberRef)), + new CustomAttributeRow(Handle(12, TableIndex.MethodDef), Handle(2, TableIndex.MethodDef)), + new CustomAttributeRow(Handle(14, TableIndex.MethodDef), Handle(11, TableIndex.MemberRef)), + new CustomAttributeRow(Handle(15, TableIndex.MethodDef), Handle(11, TableIndex.MemberRef))); + } + else + { + CheckAttributes(reader1, + new CustomAttributeRow(Handle(1, TableIndex.GenericParam), Handle(1, TableIndex.MethodDef)), + new CustomAttributeRow(Handle(3, TableIndex.Field), Handle(1, TableIndex.MethodDef)), + new CustomAttributeRow(Handle(4, TableIndex.Field), Handle(11, TableIndex.MemberRef)), + new CustomAttributeRow(Handle(4, TableIndex.Field), Handle(12, TableIndex.MemberRef)), + new CustomAttributeRow(Handle(12, TableIndex.MethodDef), Handle(2, TableIndex.MethodDef)), + new CustomAttributeRow(Handle(14, TableIndex.MethodDef), Handle(11, TableIndex.MemberRef)), + new CustomAttributeRow(Handle(15, TableIndex.MethodDef), Handle(11, TableIndex.MemberRef)), + new CustomAttributeRow(Handle(2, TableIndex.Event), Handle(1, TableIndex.MethodDef)), + new CustomAttributeRow(Handle(2, TableIndex.Property), Handle(2, TableIndex.MethodDef))); + } } /// diff --git a/src/Compilers/CSharp/Test/Emit2/Emit/LocalStateTracing/LocalStateTracingTests.cs b/src/Compilers/CSharp/Test/Emit2/Emit/LocalStateTracing/LocalStateTracingTests.cs index 4315771543ce5..000ddcb8d0bce 100644 --- a/src/Compilers/CSharp/Test/Emit2/Emit/LocalStateTracing/LocalStateTracingTests.cs +++ b/src/Compilers/CSharp/Test/Emit2/Emit/LocalStateTracing/LocalStateTracingTests.cs @@ -5520,7 +5520,7 @@ static void Main() Main: Entered Main: L'a' = 1 Main: L'b' = 2 -Main: L4 = System.Linq.Enumerable+SelectArrayIterator`2[System.Int32,System.Int32] +Main: L4 = System.Linq.Enumerable+ArraySelectIterator`2[System.Int32,System.Int32] Main: Entered lambda '
b__0'
b__0: P'item'[0] = 10
b__0: Returned diff --git a/src/Compilers/CSharp/Test/Emit2/Microsoft.CodeAnalysis.CSharp.Emit2.UnitTests.csproj b/src/Compilers/CSharp/Test/Emit2/Microsoft.CodeAnalysis.CSharp.Emit2.UnitTests.csproj index 3276fae69d6c0..aa297ca0ff6be 100644 --- a/src/Compilers/CSharp/Test/Emit2/Microsoft.CodeAnalysis.CSharp.Emit2.UnitTests.csproj +++ b/src/Compilers/CSharp/Test/Emit2/Microsoft.CodeAnalysis.CSharp.Emit2.UnitTests.csproj @@ -4,7 +4,7 @@ Library Microsoft.CodeAnalysis.CSharp.UnitTests - $(NetRoslyn);net472 + $(NetRoslynNext);net472 true diff --git a/src/Compilers/Core/CodeAnalysisTest/Microsoft.CodeAnalysis.UnitTests.csproj b/src/Compilers/Core/CodeAnalysisTest/Microsoft.CodeAnalysis.UnitTests.csproj index e80335a1d132f..f3d038eafe202 100644 --- a/src/Compilers/Core/CodeAnalysisTest/Microsoft.CodeAnalysis.UnitTests.csproj +++ b/src/Compilers/Core/CodeAnalysisTest/Microsoft.CodeAnalysis.UnitTests.csproj @@ -5,7 +5,7 @@ Library Microsoft.CodeAnalysis.UnitTests true - $(NetRoslyn);net472 + $(NetRoslynNext);net472 diff --git a/src/Compilers/Test/Core/Compilation/RuntimeUtilities.cs b/src/Compilers/Test/Core/Compilation/RuntimeUtilities.cs index 57825e1362a3f..6e29d5625fc80 100644 --- a/src/Compilers/Test/Core/Compilation/RuntimeUtilities.cs +++ b/src/Compilers/Test/Core/Compilation/RuntimeUtilities.cs @@ -27,14 +27,19 @@ public static partial class RuntimeUtilities #endif internal static bool IsCoreClrRuntime => !IsDesktopRuntime; + private static int? CoreClrRuntimeVersion { get; } = IsDesktopRuntime + ? null + : typeof(object).Assembly.GetName().Version.Major; + internal static bool IsCoreClr6Runtime => IsCoreClrRuntime && RuntimeInformation.FrameworkDescription.StartsWith(".NET 6.", StringComparison.Ordinal); internal static bool IsCoreClr8OrHigherRuntime - => IsCoreClrRuntime && RuntimeInformation.FrameworkDescription.StartsWith(".NET 8.", StringComparison.Ordinal); + => CoreClrRuntimeVersion is { } v && v >= 8; internal static bool IsCoreClr9OrHigherRuntime - => IsCoreClrRuntime && RuntimeInformation.FrameworkDescription.StartsWith(".NET 9.", StringComparison.Ordinal); + => CoreClrRuntimeVersion is { } v && v >= 9; + #if NET9_0_OR_GREATER #error Make the above check be an #if NET9_OR_GREATER when we add net8 support to build #endif diff --git a/src/Features/CSharpTest/Microsoft.CodeAnalysis.CSharp.Features.UnitTests.csproj b/src/Features/CSharpTest/Microsoft.CodeAnalysis.CSharp.Features.UnitTests.csproj index 801b6a5a0c0e4..73c7105b1f10e 100644 --- a/src/Features/CSharpTest/Microsoft.CodeAnalysis.CSharp.Features.UnitTests.csproj +++ b/src/Features/CSharpTest/Microsoft.CodeAnalysis.CSharp.Features.UnitTests.csproj @@ -3,7 +3,7 @@ Library - $(NetRoslyn);net472 + $(NetRoslynNext);net472 Microsoft.CodeAnalysis.CSharp.UnitTests true diff --git a/src/Features/CSharpTest/SemanticSearch/CSharpSemanticSearchServiceTests.cs b/src/Features/CSharpTest/SemanticSearch/CSharpSemanticSearchServiceTests.cs index 02d2a79a8fc74..c841e11de33b6 100644 --- a/src/Features/CSharpTest/SemanticSearch/CSharpSemanticSearchServiceTests.cs +++ b/src/Features/CSharpTest/SemanticSearch/CSharpSemanticSearchServiceTests.cs @@ -240,7 +240,7 @@ static ISymbol F(ISymbol s) AssertEx.Equal( $" at Program.<
$>g__F|0_1(ISymbol s) in {FeaturesResources.Query}:line 11" + Environment.NewLine + $" at Program.<>c.<
$>b__0_2(ISymbol x) in {FeaturesResources.Query}:line 5" + Environment.NewLine + - $" at System.Linq.Enumerable.SelectArrayIterator`2.MoveNext()" + Environment.NewLine, + $" at System.Linq.Enumerable.ArraySelectIterator`2.MoveNext()" + Environment.NewLine, exception.StackTrace.JoinText()); } } diff --git a/src/Scripting/CSharpTest/CommandLineRunnerTests.cs b/src/Scripting/CSharpTest/CommandLineRunnerTests.cs index 0d50eb1e902cc..bdf2e78cd6788 100644 --- a/src/Scripting/CSharpTest/CommandLineRunnerTests.cs +++ b/src/Scripting/CSharpTest/CommandLineRunnerTests.cs @@ -121,6 +121,9 @@ select x * x "); runner.RunInteractive(); + var iteratorType = RuntimeUtilities.IsCoreClr9OrHigherRuntime + ? "ArrayWhereSelectIterator" + : "WhereSelectArrayIterator"; AssertEx.AssertEqualToleratingWhitespaceDifferences( $@"{LogoAndHelpPrompt} > async Task GetStuffAsync() @@ -133,7 +136,7 @@ select x * x > from x in await GetStuffAsync() . where x > 2 . select x * x -Enumerable.WhereSelectArrayIterator {{ 9, 16, 25 }} +Enumerable.{iteratorType} {{ 9, 16, 25 }} > ", runner.Console.Out.ToString()); AssertEx.AssertEqualToleratingWhitespaceDifferences( diff --git a/src/Scripting/CSharpTest/Microsoft.CodeAnalysis.CSharp.Scripting.UnitTests.csproj b/src/Scripting/CSharpTest/Microsoft.CodeAnalysis.CSharp.Scripting.UnitTests.csproj index 5e8ced49b9982..3c7a527a29e20 100644 --- a/src/Scripting/CSharpTest/Microsoft.CodeAnalysis.CSharp.Scripting.UnitTests.csproj +++ b/src/Scripting/CSharpTest/Microsoft.CodeAnalysis.CSharp.Scripting.UnitTests.csproj @@ -5,7 +5,7 @@ Library Microsoft.CodeAnalysis.CSharp.Scripting.UnitTests true - $(NetRoslyn);net472 + $(NetRoslynNext);net472 diff --git a/src/Scripting/CSharpTest/ObjectFormatterTests.cs b/src/Scripting/CSharpTest/ObjectFormatterTests.cs index b4ce5c4f787f5..07201a7ada581 100644 --- a/src/Scripting/CSharpTest/ObjectFormatterTests.cs +++ b/src/Scripting/CSharpTest/ObjectFormatterTests.cs @@ -17,6 +17,7 @@ using Microsoft.CodeAnalysis.Scripting; using Microsoft.CodeAnalysis.Scripting.Hosting; using Microsoft.CodeAnalysis.Scripting.Hosting.UnitTests; +using Microsoft.CodeAnalysis.Test.Utilities; using ObjectFormatterFixtures; using Roslyn.Test.Utilities; using Xunit; @@ -507,7 +508,10 @@ public void DebuggerProxy_FrameworkTypes_IEnumerable_Exception() return i < 7; }); str = s_formatter.FormatObject(obj, SingleLineOptions); - Assert.Equal("Enumerable.WhereEnumerableIterator { 0, 1, 2, 3, 4, ! ... }", str); + var iteratorType = RuntimeUtilities.IsCoreClr9OrHigherRuntime + ? "IEnumerableWhereIterator" + : "WhereEnumerableIterator"; + Assert.Equal($"Enumerable.{iteratorType} {{ 0, 1, 2, 3, 4, ! ... }}", str); } [Fact] diff --git a/src/Tools/BuildBoss/ProjectCheckerUtil.cs b/src/Tools/BuildBoss/ProjectCheckerUtil.cs index 30169fa97809f..f42d30b8e6ace 100644 --- a/src/Tools/BuildBoss/ProjectCheckerUtil.cs +++ b/src/Tools/BuildBoss/ProjectCheckerUtil.cs @@ -247,6 +247,7 @@ private bool CheckTargetFrameworks(TextWriter textWriter) case "net472": case "netstandard2.0": case "$(NetRoslyn)": + case "$(NetRoslynNext)": case "$(NetRoslynSourceBuild)": case "$(NetRoslynToolset)": case "$(NetRoslynAll)":